mirror of
https://github.com/Xahau/xahaud.git
synced 2025-11-04 18:55:49 +00:00
Compare commits
15 Commits
ce7b1c4f1d
...
f8d1a6f2b4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f8d1a6f2b4 | ||
|
|
c46ede7c8f | ||
|
|
0e2bc365ea | ||
|
|
446bc76b69 | ||
|
|
a0c38a4fb3 | ||
|
|
631650f7eb | ||
|
|
0b31d8e534 | ||
|
|
ecf03f4afe | ||
|
|
b801c2837d | ||
|
|
1474e808cb | ||
|
|
457e633a81 | ||
|
|
7ea99caa19 | ||
|
|
3e5c15c172 | ||
|
|
52b4fb503c | ||
|
|
98123fa934 |
@@ -68,6 +68,7 @@ runs:
|
||||
FAIL_ON_MISS: ${{ inputs.fail-on-cache-miss }}
|
||||
LOOKUP_ONLY: ${{ inputs.lookup-only }}
|
||||
USE_DELTAS: ${{ inputs.use-deltas }}
|
||||
COMMIT_MSG: ${{ github.event.head_commit.message }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
@@ -80,12 +81,57 @@ runs:
|
||||
echo "Use deltas: ${USE_DELTAS}"
|
||||
echo ""
|
||||
|
||||
# Normalize target path (expand tilde and resolve to absolute path)
|
||||
# This ensures consistent path comparison in the mount registry
|
||||
if [[ "${TARGET_PATH}" == ~* ]]; then
|
||||
# Expand tilde manually (works even if directory doesn't exist yet)
|
||||
TARGET_PATH="${HOME}${TARGET_PATH:1}"
|
||||
fi
|
||||
echo "Normalized target path: ${TARGET_PATH}"
|
||||
|
||||
# Generate unique cache workspace
|
||||
CACHE_HASH=$(echo "${CACHE_KEY}" | md5sum | cut -d' ' -f1)
|
||||
CACHE_WORKSPACE="/tmp/xahau-cache-${CACHE_HASH}"
|
||||
|
||||
echo "Cache workspace: ${CACHE_WORKSPACE}"
|
||||
|
||||
# Check for [ci-clear-cache] tag in commit message
|
||||
if echo "${COMMIT_MSG}" | grep -q '\[ci-clear-cache\]'; then
|
||||
echo ""
|
||||
echo "🗑️ [ci-clear-cache] detected in commit message"
|
||||
echo "Clearing cache for key: ${CACHE_KEY}"
|
||||
echo ""
|
||||
|
||||
# Delete base layer
|
||||
S3_BASE_KEY="s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst"
|
||||
if aws s3 ls "${S3_BASE_KEY}" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
echo "Deleting base layer: ${S3_BASE_KEY}"
|
||||
aws s3 rm "${S3_BASE_KEY}" --region "${S3_REGION}" 2>/dev/null || true
|
||||
echo "✓ Base layer deleted"
|
||||
else
|
||||
echo "ℹ️ No base layer found to delete"
|
||||
fi
|
||||
|
||||
# Delete all delta layers for this key
|
||||
echo "Deleting all delta layers matching: ${CACHE_KEY}-delta-*"
|
||||
DELTA_COUNT=$(aws s3 ls "s3://${S3_BUCKET}/" --region "${S3_REGION}" | grep "${CACHE_KEY}-delta-" | wc -l || echo "0")
|
||||
DELTA_COUNT=$(echo "${DELTA_COUNT}" | tr -d ' \n') # Trim whitespace
|
||||
if [ "${DELTA_COUNT}" -gt 0 ]; then
|
||||
aws s3 rm "s3://${S3_BUCKET}/" --recursive \
|
||||
--exclude "*" \
|
||||
--include "${CACHE_KEY}-delta-*" \
|
||||
--region "${S3_REGION}" 2>/dev/null || true
|
||||
echo "✓ Deleted ${DELTA_COUNT} delta layer(s)"
|
||||
else
|
||||
echo "ℹ️ No delta layers found to delete"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "✅ Cache cleared successfully"
|
||||
echo "Build will proceed from scratch (bootstrap mode)"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Create OverlayFS directory structure
|
||||
mkdir -p "${CACHE_WORKSPACE}"/{base,upper,work,merged}
|
||||
|
||||
|
||||
@@ -52,6 +52,15 @@ runs:
|
||||
echo "S3 bucket: s3://${S3_BUCKET}"
|
||||
echo ""
|
||||
|
||||
# Normalize target path (expand tilde and resolve to absolute path)
|
||||
# This ensures consistent path comparison with the mount registry
|
||||
if [[ "${TARGET_PATH}" == ~* ]]; then
|
||||
# Expand tilde manually (works even if directory doesn't exist yet)
|
||||
TARGET_PATH="${HOME}${TARGET_PATH:1}"
|
||||
fi
|
||||
echo "Normalized target path: ${TARGET_PATH}"
|
||||
echo ""
|
||||
|
||||
# Find the cache workspace from mount registry
|
||||
MOUNT_REGISTRY="/tmp/xahau-cache-mounts.txt"
|
||||
|
||||
@@ -134,10 +143,13 @@ runs:
|
||||
echo "Uploading base layer to S3..."
|
||||
echo " Key: ${PRIMARY_KEY}-base.tar.zst"
|
||||
|
||||
aws s3 cp "${BASE_TARBALL}" "${S3_BASE_KEY}" \
|
||||
aws s3api put-object \
|
||||
--bucket "${S3_BUCKET}" \
|
||||
--key "${PRIMARY_KEY}-base.tar.zst" \
|
||||
--body "${BASE_TARBALL}" \
|
||||
--tagging 'type=base' \
|
||||
--region "${S3_REGION}" \
|
||||
--tagging "type=base" \
|
||||
--quiet
|
||||
>/dev/null
|
||||
|
||||
echo "✓ Uploaded: ${S3_BASE_KEY}"
|
||||
fi
|
||||
@@ -178,10 +190,13 @@ runs:
|
||||
echo "Uploading new base layer to S3..."
|
||||
echo " Key: ${PRIMARY_KEY}-base.tar.zst"
|
||||
|
||||
aws s3 cp "${BASE_TARBALL}" "${S3_BASE_KEY}" \
|
||||
aws s3api put-object \
|
||||
--bucket "${S3_BUCKET}" \
|
||||
--key "${PRIMARY_KEY}-base.tar.zst" \
|
||||
--body "${BASE_TARBALL}" \
|
||||
--tagging 'type=base' \
|
||||
--region "${S3_REGION}" \
|
||||
--tagging "type=base" \
|
||||
--quiet
|
||||
>/dev/null
|
||||
|
||||
echo "✓ Uploaded: ${S3_BASE_KEY}"
|
||||
fi
|
||||
@@ -295,14 +310,55 @@ runs:
|
||||
echo "Uploading timestamped delta to S3..."
|
||||
echo " Key: ${PRIMARY_KEY}-delta-${TIMESTAMP}-${COMMIT_SHA}.tar.zst"
|
||||
|
||||
# Upload with tag for auto-deletion after 7 days
|
||||
aws s3 cp "${DELTA_TARBALL}" "${S3_DELTA_TIMESTAMPED}" \
|
||||
# Upload with tag (deltas cleaned up inline - keep last 1)
|
||||
aws s3api put-object \
|
||||
--bucket "${S3_BUCKET}" \
|
||||
--key "${PRIMARY_KEY}-delta-${TIMESTAMP}-${COMMIT_SHA}.tar.zst" \
|
||||
--body "${DELTA_TARBALL}" \
|
||||
--tagging 'type=delta-archive' \
|
||||
--region "${S3_REGION}" \
|
||||
--tagging "type=delta-archive" \
|
||||
--quiet
|
||||
>/dev/null
|
||||
|
||||
echo "✓ Uploaded: ${S3_DELTA_TIMESTAMPED}"
|
||||
echo " (tagged for auto-deletion after 7 days)"
|
||||
|
||||
# Inline cleanup: Keep only latest delta (the one we just uploaded)
|
||||
echo ""
|
||||
echo "Cleaning up old deltas (keeping only latest)..."
|
||||
|
||||
# List all deltas for this key, sorted by LastModified (oldest first)
|
||||
ALL_DELTAS=$(aws s3api list-objects-v2 \
|
||||
--bucket "${S3_BUCKET}" \
|
||||
--prefix "${PRIMARY_KEY}-delta-" \
|
||||
--region "${S3_REGION}" \
|
||||
--query 'sort_by(Contents, &LastModified)[*].Key' \
|
||||
--output json 2>/dev/null || echo "[]")
|
||||
|
||||
DELTA_COUNT=$(echo "${ALL_DELTAS}" | jq 'length' 2>/dev/null || echo "0")
|
||||
|
||||
if [ "${DELTA_COUNT}" -gt 1 ]; then
|
||||
# Keep last 1 (newest), delete all older ones (all except last 1 = [0:-1])
|
||||
OLD_DELTAS=$(echo "${ALL_DELTAS}" | jq -r '.[0:-1][]' 2>/dev/null)
|
||||
|
||||
if [ -n "${OLD_DELTAS}" ]; then
|
||||
DELETE_COUNT=$((DELTA_COUNT - 1))
|
||||
echo " Found ${DELETE_COUNT} old delta(s) to delete"
|
||||
|
||||
# Create delete batch request JSON
|
||||
DELETE_OBJECTS=$(echo "${OLD_DELTAS}" | jq -R -s -c 'split("\n") | map(select(length > 0)) | map({Key: .}) | {Objects: ., Quiet: true}' 2>/dev/null)
|
||||
|
||||
if [ -n "${DELETE_OBJECTS}" ]; then
|
||||
aws s3api delete-objects \
|
||||
--bucket "${S3_BUCKET}" \
|
||||
--delete "${DELETE_OBJECTS}" \
|
||||
--region "${S3_REGION}" \
|
||||
>/dev/null 2>&1
|
||||
|
||||
echo "✓ Deleted ${DELETE_COUNT} old delta(s)"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "ℹ️ Only ${DELTA_COUNT} delta(s) exist, no cleanup needed"
|
||||
fi
|
||||
|
||||
# Cleanup delta tarball
|
||||
rm -f "${DELTA_TARBALL}"
|
||||
|
||||
@@ -2,6 +2,10 @@ name: 'Configure ccache'
|
||||
description: 'Sets up ccache with consistent configuration'
|
||||
|
||||
inputs:
|
||||
cache_dir:
|
||||
description: 'Path to ccache directory'
|
||||
required: false
|
||||
default: '~/.ccache'
|
||||
max_size:
|
||||
description: 'Maximum cache size'
|
||||
required: false
|
||||
@@ -14,18 +18,6 @@ inputs:
|
||||
description: 'How to check compiler for changes'
|
||||
required: false
|
||||
default: 'content'
|
||||
is_main_branch:
|
||||
description: 'Whether the current branch is the main branch'
|
||||
required: false
|
||||
default: 'false'
|
||||
main_cache_dir:
|
||||
description: 'Path to the main branch cache directory'
|
||||
required: false
|
||||
default: '~/.ccache-main'
|
||||
current_cache_dir:
|
||||
description: 'Path to the current branch cache directory'
|
||||
required: false
|
||||
default: '~/.ccache-current'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
@@ -33,31 +25,20 @@ runs:
|
||||
- name: Configure ccache
|
||||
shell: bash
|
||||
run: |
|
||||
# Create cache directories
|
||||
mkdir -p ${{ inputs.main_cache_dir }} ${{ inputs.current_cache_dir }}
|
||||
|
||||
# Set compiler check globally
|
||||
ccache -o compiler_check=${{ inputs.compiler_check }}
|
||||
|
||||
# Use a single config file location
|
||||
mkdir -p ~/.ccache
|
||||
export CONF_PATH="$HOME/.ccache/ccache.conf"
|
||||
|
||||
# Apply common settings
|
||||
echo "max_size = ${{ inputs.max_size }}" > "$CONF_PATH"
|
||||
echo "hash_dir = ${{ inputs.hash_dir }}" >> "$CONF_PATH"
|
||||
echo "compiler_check = ${{ inputs.compiler_check }}" >> "$CONF_PATH"
|
||||
|
||||
if [ "${{ inputs.is_main_branch }}" == "true" ]; then
|
||||
# Main branch: use main branch cache
|
||||
ccache --set-config=cache_dir="${{ inputs.main_cache_dir }}"
|
||||
echo "CCACHE_DIR=${{ inputs.main_cache_dir }}" >> $GITHUB_ENV
|
||||
else
|
||||
# Feature branch: use current branch cache with main as secondary
|
||||
ccache --set-config=cache_dir="${{ inputs.current_cache_dir }}"
|
||||
ccache --set-config=secondary_storage="file:${{ inputs.main_cache_dir }}"
|
||||
echo "CCACHE_DIR=${{ inputs.current_cache_dir }}" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
ccache -p # Print config for verification
|
||||
ccache -z # Zero statistics before the build
|
||||
# Create cache directory
|
||||
mkdir -p ${{ inputs.cache_dir }}
|
||||
|
||||
# Configure ccache settings
|
||||
ccache --set-config=cache_dir="${{ inputs.cache_dir }}"
|
||||
ccache --set-config=max_size=${{ inputs.max_size }}
|
||||
ccache --set-config=hash_dir=${{ inputs.hash_dir }}
|
||||
ccache --set-config=compiler_check=${{ inputs.compiler_check }}
|
||||
|
||||
# Export for use by build tools
|
||||
echo "CCACHE_DIR=${{ inputs.cache_dir }}" >> $GITHUB_ENV
|
||||
|
||||
# Print config for verification
|
||||
ccache -p
|
||||
|
||||
# Zero statistics before the build
|
||||
ccache -z
|
||||
44
.github/actions/xahau-ga-build/action.yml
vendored
44
.github/actions/xahau-ga-build/action.yml
vendored
@@ -47,6 +47,12 @@ inputs:
|
||||
description: 'GCC version to use for Clang toolchain (e.g. 11, 13)'
|
||||
required: false
|
||||
default: ''
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 cache storage'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 cache storage'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
@@ -59,28 +65,19 @@ runs:
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Restore ccache directory for default branch
|
||||
- name: Restore ccache directory
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
id: ccache-restore
|
||||
uses: actions/cache/restore@v4
|
||||
uses: ./.github/actions/xahau-actions-cache-restore
|
||||
with:
|
||||
path: ~/.ccache-main
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
|
||||
- name: Restore ccache directory for current branch
|
||||
if: inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
|
||||
id: ccache-restore-current-branch
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ~/.ccache-current
|
||||
path: ~/.ccache
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
|
||||
- name: Configure project
|
||||
shell: bash
|
||||
@@ -159,16 +156,11 @@ runs:
|
||||
shell: bash
|
||||
run: ccache -s
|
||||
|
||||
- name: Save ccache directory for default branch
|
||||
if: always() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name == inputs.main_branch
|
||||
uses: actions/cache/save@v4
|
||||
- name: Save ccache directory
|
||||
if: always() && inputs.ccache_enabled == 'true'
|
||||
uses: ./.github/actions/xahau-actions-cache-save
|
||||
with:
|
||||
path: ~/.ccache-main
|
||||
key: ${{ steps.ccache-restore.outputs.cache-primary-key }}
|
||||
|
||||
- name: Save ccache directory for current branch
|
||||
if: always() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ~/.ccache-current
|
||||
key: ${{ steps.ccache-restore-current-branch.outputs.cache-primary-key }}
|
||||
path: ~/.ccache
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
|
||||
55
.github/actions/xahau-ga-dependencies/action.yml
vendored
55
.github/actions/xahau-ga-dependencies/action.yml
vendored
@@ -32,6 +32,12 @@ inputs:
|
||||
options:
|
||||
- libstdcxx
|
||||
- libcxx
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 cache storage'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 cache storage'
|
||||
required: true
|
||||
|
||||
outputs:
|
||||
cache-hit:
|
||||
@@ -41,47 +47,21 @@ outputs:
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Generate safe branch name
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: safe-branch
|
||||
shell: bash
|
||||
run: |
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Check conanfile changes
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: check-conanfile-changes
|
||||
shell: bash
|
||||
run: |
|
||||
# Check if we're on the main branch
|
||||
if [ "${{ github.ref_name }}" == "${{ inputs.main_branch }}" ]; then
|
||||
echo "should-save-conan-cache=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# Fetch main branch for comparison
|
||||
git fetch origin ${{ inputs.main_branch }}
|
||||
|
||||
# Check if conanfile.txt or conanfile.py has changed compared to main branch
|
||||
if git diff --quiet origin/${{ inputs.main_branch }}..HEAD -- '**/conanfile.txt' '**/conanfile.py'; then
|
||||
echo "should-save-conan-cache=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "should-save-conan-cache=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Restore Conan cache
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: cache-restore-conan
|
||||
uses: actions/cache/restore@v4
|
||||
uses: ./.github/actions/xahau-actions-cache-restore
|
||||
with:
|
||||
path: |
|
||||
~/.conan
|
||||
~/.conan2
|
||||
path: ~/.conan2
|
||||
# Note: compiler-id format is compiler-version-stdlib[-gccversion]
|
||||
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-${{ inputs.configuration }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
use-deltas: 'false'
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
|
||||
- name: Export custom recipes
|
||||
shell: bash
|
||||
@@ -107,10 +87,11 @@ runs:
|
||||
..
|
||||
|
||||
- name: Save Conan cache
|
||||
if: always() && inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true' && steps.check-conanfile-changes.outputs.should-save-conan-cache == 'true'
|
||||
uses: actions/cache/save@v4
|
||||
if: always() && inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true'
|
||||
uses: ./.github/actions/xahau-actions-cache-save
|
||||
with:
|
||||
path: |
|
||||
~/.conan
|
||||
~/.conan2
|
||||
key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }}
|
||||
path: ~/.conan2
|
||||
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-${{ inputs.configuration }}
|
||||
use-deltas: 'false'
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
|
||||
290
.github/workflows/test-cache-actions.yml.disabled
vendored
Normal file
290
.github/workflows/test-cache-actions.yml.disabled
vendored
Normal file
@@ -0,0 +1,290 @@
|
||||
name: Test Cache Actions (State Machine)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["nd-experiment-overlayfs-*"]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
state_assertion:
|
||||
description: 'Expected state (optional, e.g. "2" to assert state 2)'
|
||||
required: false
|
||||
type: string
|
||||
default: '1'
|
||||
start_state:
|
||||
description: 'Force specific starting state (optional, e.g. "3" to start at state 3)'
|
||||
required: false
|
||||
type: string
|
||||
clear_cache:
|
||||
description: 'Clear cache before running'
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test-cache-state-machine:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CACHE_KEY: test-state-machine-${{ github.ref_name }}
|
||||
CACHE_DIR: /tmp/test-cache
|
||||
S3_BUCKET: xahaud-github-actions-cache-niq
|
||||
S3_REGION: us-east-1
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Parse Inputs (workflow_dispatch or commit message)
|
||||
id: parse-inputs
|
||||
run: |
|
||||
# Priority 1: workflow_dispatch inputs (manual trigger)
|
||||
STATE_ASSERTION="${{ inputs.state_assertion }}"
|
||||
START_STATE="${{ inputs.start_state }}"
|
||||
SHOULD_CLEAR="${{ inputs.clear_cache }}"
|
||||
|
||||
# Priority 2: commit message tags (push event)
|
||||
if [ "${{ github.event_name }}" = "push" ]; then
|
||||
COMMIT_MSG="${{ github.event.head_commit.message }}"
|
||||
|
||||
# Parse [state:N] assertion tag (optional, if not provided as input)
|
||||
if [ -z "${STATE_ASSERTION}" ] && echo "${COMMIT_MSG}" | grep -qE '\[state:[0-9]+\]'; then
|
||||
STATE_ASSERTION=$(echo "${COMMIT_MSG}" | grep -oE '\[state:[0-9]+\]' | grep -oE '[0-9]+')
|
||||
echo "State assertion found in commit: ${STATE_ASSERTION}"
|
||||
fi
|
||||
|
||||
# Parse [start-state:N] force tag (optional, if not provided as input)
|
||||
if [ -z "${START_STATE}" ] && echo "${COMMIT_MSG}" | grep -qE '\[start-state:[0-9]+\]'; then
|
||||
START_STATE=$(echo "${COMMIT_MSG}" | grep -oE '\[start-state:[0-9]+\]' | grep -oE '[0-9]+')
|
||||
echo "Start state found in commit: ${START_STATE}"
|
||||
fi
|
||||
|
||||
# Parse [ci-clear-cache] tag (if not provided as input)
|
||||
if [ "${SHOULD_CLEAR}" != "true" ] && echo "${COMMIT_MSG}" | grep -q '\[ci-clear-cache\]'; then
|
||||
SHOULD_CLEAR=true
|
||||
echo "Cache clear requested in commit"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Output final values
|
||||
echo "state_assertion=${STATE_ASSERTION}" >> "$GITHUB_OUTPUT"
|
||||
echo "start_state=${START_STATE}" >> "$GITHUB_OUTPUT"
|
||||
echo "should_clear=${SHOULD_CLEAR}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Log what we're using
|
||||
echo ""
|
||||
echo "Configuration:"
|
||||
[ -n "${STATE_ASSERTION}" ] && echo " State assertion: ${STATE_ASSERTION}"
|
||||
[ -n "${START_STATE}" ] && echo " Start state: ${START_STATE}"
|
||||
echo " Clear cache: ${SHOULD_CLEAR}"
|
||||
|
||||
- name: Check S3 State (Before Restore)
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "S3 State Check (Before Restore)"
|
||||
echo "=========================================="
|
||||
echo "Cache key: ${CACHE_KEY}"
|
||||
echo ""
|
||||
|
||||
# Check if base exists
|
||||
BASE_EXISTS=false
|
||||
if aws s3 ls "s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
BASE_EXISTS=true
|
||||
fi
|
||||
echo "Base exists: ${BASE_EXISTS}"
|
||||
|
||||
# Count deltas
|
||||
DELTA_COUNT=$(aws s3 ls "s3://${S3_BUCKET}/" --region "${S3_REGION}" | grep "${CACHE_KEY}-delta-" | wc -l || echo "0")
|
||||
echo "Delta count: ${DELTA_COUNT}"
|
||||
|
||||
- name: Restore Cache
|
||||
uses: ./.github/actions/xahau-actions-cache-restore
|
||||
with:
|
||||
path: ${{ env.CACHE_DIR }}
|
||||
key: ${{ env.CACHE_KEY }}
|
||||
s3-bucket: ${{ env.S3_BUCKET }}
|
||||
s3-region: ${{ env.S3_REGION }}
|
||||
use-deltas: 'true'
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Auto-Detect State and Validate
|
||||
id: state
|
||||
env:
|
||||
STATE_ASSERTION: ${{ steps.parse-inputs.outputs.state_assertion }}
|
||||
START_STATE: ${{ steps.parse-inputs.outputs.start_state }}
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "State Detection and Validation"
|
||||
echo "=========================================="
|
||||
|
||||
# Create cache directory if it doesn't exist
|
||||
mkdir -p "${CACHE_DIR}"
|
||||
|
||||
# Handle [start-state:N] - force specific state
|
||||
if [ -n "${START_STATE}" ]; then
|
||||
echo "🎯 [start-state:${START_STATE}] detected - forcing state setup"
|
||||
|
||||
# Clear cache and create state files 0 through START_STATE
|
||||
rm -f ${CACHE_DIR}/state*.txt 2>/dev/null || true
|
||||
for i in $(seq 0 ${START_STATE}); do
|
||||
echo "State ${i} - Forced at $(date)" > "${CACHE_DIR}/state${i}.txt"
|
||||
echo "Commit: ${{ github.sha }}" >> "${CACHE_DIR}/state${i}.txt"
|
||||
done
|
||||
|
||||
DETECTED_STATE=${START_STATE}
|
||||
echo "✓ Forced to state ${DETECTED_STATE}"
|
||||
else
|
||||
# Auto-detect state by counting state files
|
||||
STATE_FILES=$(ls ${CACHE_DIR}/state*.txt 2>/dev/null | wc -l)
|
||||
DETECTED_STATE=${STATE_FILES}
|
||||
echo "Auto-detected state: ${DETECTED_STATE} (${STATE_FILES} state files)"
|
||||
fi
|
||||
|
||||
# Show cache contents
|
||||
echo ""
|
||||
echo "Cache contents:"
|
||||
if [ -d "${CACHE_DIR}" ] && [ "$(ls -A ${CACHE_DIR})" ]; then
|
||||
ls -la "${CACHE_DIR}"
|
||||
else
|
||||
echo "(empty)"
|
||||
fi
|
||||
|
||||
# Validate [state:N] assertion if provided
|
||||
if [ -n "${STATE_ASSERTION}" ]; then
|
||||
echo ""
|
||||
echo "Validating assertion: [state:${STATE_ASSERTION}]"
|
||||
if [ "${DETECTED_STATE}" -ne "${STATE_ASSERTION}" ]; then
|
||||
echo "❌ ERROR: State mismatch!"
|
||||
echo " Expected (from [state:N]): ${STATE_ASSERTION}"
|
||||
echo " Detected (from cache): ${DETECTED_STATE}"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Assertion passed: detected == expected (${DETECTED_STATE})"
|
||||
fi
|
||||
|
||||
# Output detected state for next steps
|
||||
echo "detected_state=${DETECTED_STATE}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
|
||||
- name: Simulate Build (State Transition)
|
||||
env:
|
||||
DETECTED_STATE: ${{ steps.state.outputs.detected_state }}
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "Simulating Build (State Transition)"
|
||||
echo "=========================================="
|
||||
|
||||
# Calculate next state
|
||||
NEXT_STATE=$((DETECTED_STATE + 1))
|
||||
echo "Transitioning: State ${DETECTED_STATE} → State ${NEXT_STATE}"
|
||||
echo ""
|
||||
|
||||
# Create state file for next state
|
||||
STATE_FILE="${CACHE_DIR}/state${NEXT_STATE}.txt"
|
||||
echo "State ${NEXT_STATE} - Created at $(date)" > "${STATE_FILE}"
|
||||
echo "Commit: ${{ github.sha }}" >> "${STATE_FILE}"
|
||||
echo "Message: ${{ github.event.head_commit.message }}" >> "${STATE_FILE}"
|
||||
|
||||
echo "✓ Created ${STATE_FILE}"
|
||||
|
||||
# Show final cache state
|
||||
echo ""
|
||||
echo "Final cache contents:"
|
||||
ls -la "${CACHE_DIR}"
|
||||
|
||||
echo ""
|
||||
echo "State files:"
|
||||
cat ${CACHE_DIR}/state*.txt
|
||||
|
||||
- name: Save Cache
|
||||
uses: ./.github/actions/xahau-actions-cache-save
|
||||
with:
|
||||
path: ${{ env.CACHE_DIR }}
|
||||
key: ${{ env.CACHE_KEY }}
|
||||
s3-bucket: ${{ env.S3_BUCKET }}
|
||||
s3-region: ${{ env.S3_REGION }}
|
||||
use-deltas: 'true'
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Validate S3 State (After Save)
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
DETECTED_STATE: ${{ steps.state.outputs.detected_state }}
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "S3 State Validation (After Save)"
|
||||
echo "=========================================="
|
||||
|
||||
# Calculate next state (what we just saved)
|
||||
NEXT_STATE=$((DETECTED_STATE + 1))
|
||||
echo "Saved state: ${NEXT_STATE}"
|
||||
echo ""
|
||||
|
||||
# Check if base exists
|
||||
if aws s3 ls "s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
BASE_SIZE=$(aws s3 ls "s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst" --region "${S3_REGION}" | awk '{print $3}')
|
||||
echo "✓ Base exists: ${CACHE_KEY}-base.tar.zst (${BASE_SIZE} bytes)"
|
||||
else
|
||||
echo "❌ ERROR: Base should exist after save"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# List deltas
|
||||
echo ""
|
||||
echo "Delta layers:"
|
||||
DELTAS=$(aws s3 ls "s3://${S3_BUCKET}/" --region "${S3_REGION}" | grep "${CACHE_KEY}-delta-" || echo "")
|
||||
if [ -n "${DELTAS}" ]; then
|
||||
echo "${DELTAS}"
|
||||
DELTA_COUNT=$(echo "${DELTAS}" | wc -l)
|
||||
else
|
||||
echo "(none)"
|
||||
DELTA_COUNT=0
|
||||
fi
|
||||
|
||||
# Validate S3 state
|
||||
echo ""
|
||||
if [ "${DETECTED_STATE}" -eq 0 ]; then
|
||||
# Saved state 1 from bootstrap (state 0 → 1)
|
||||
if [ "${DELTA_COUNT}" -ne 0 ]; then
|
||||
echo "⚠️ WARNING: Bootstrap (state 1) should have 0 deltas, found ${DELTA_COUNT}"
|
||||
else
|
||||
echo "✓ State 1 saved: base exists, 0 deltas"
|
||||
fi
|
||||
else
|
||||
# Saved delta (state N+1)
|
||||
if [ "${DELTA_COUNT}" -ne 1 ]; then
|
||||
echo "⚠️ WARNING: State ${NEXT_STATE} expects 1 delta (inline cleanup), found ${DELTA_COUNT}"
|
||||
echo "This might be OK if multiple builds ran concurrently"
|
||||
else
|
||||
echo "✓ State ${NEXT_STATE} saved: base + 1 delta (old deltas cleaned)"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "✅ State ${DETECTED_STATE} → ${NEXT_STATE} Complete!"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "Next commit will auto-detect state ${NEXT_STATE}"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " # Normal (auto-advance)"
|
||||
echo " git commit -m 'continue testing'"
|
||||
echo ""
|
||||
echo " # With assertion (validate state)"
|
||||
echo " git commit -m 'test delta [state:${NEXT_STATE}]'"
|
||||
echo ""
|
||||
echo " # Clear cache and restart"
|
||||
echo " git commit -m 'fresh start [ci-clear-cache]'"
|
||||
echo ""
|
||||
echo " # Jump to specific state"
|
||||
echo " git commit -m 'jump to state 3 [start-state:3]'"
|
||||
@@ -2,7 +2,7 @@ name: Nix - GA Runner
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
branches: ["dev", "candidate", "release", "nd-experiment-overlayfs-2025-10-29"]
|
||||
pull_request:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
schedule:
|
||||
@@ -156,7 +156,7 @@ jobs:
|
||||
env:
|
||||
build_dir: .build
|
||||
# Bump this number to invalidate all caches globally.
|
||||
CACHE_VERSION: 2
|
||||
CACHE_VERSION: 3
|
||||
MAIN_BRANCH_NAME: dev
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -237,7 +237,6 @@ jobs:
|
||||
max_size: 2G
|
||||
hash_dir: true
|
||||
compiler_check: content
|
||||
is_main_branch: ${{ github.ref_name == env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Configure Conan
|
||||
run: |
|
||||
@@ -294,6 +293,8 @@ jobs:
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
stdlib: ${{ matrix.stdlib }}
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Build
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
@@ -308,6 +309,8 @@ jobs:
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
stdlib: ${{ matrix.stdlib }}
|
||||
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Set artifact name
|
||||
id: set-artifact-name
|
||||
Reference in New Issue
Block a user