mirror of
https://github.com/Xahau/xahaud.git
synced 2025-11-04 18:55:49 +00:00
Compare commits
35 Commits
sync-2.4.0
...
83f6bc64e1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
83f6bc64e1 | ||
|
|
be6fad9692 | ||
|
|
b24e4647ba | ||
|
|
638cb0afe5 | ||
|
|
bd384e6bc1 | ||
|
|
4c546e5d91 | ||
|
|
28727b3f86 | ||
|
|
a4f96a435a | ||
|
|
d0f63cc2d1 | ||
|
|
2433bfe277 | ||
|
|
ef40a7f351 | ||
|
|
a4a4126bdc | ||
|
|
0559b6c418 | ||
|
|
f8d1a6f2b4 | ||
|
|
c46ede7c8f | ||
|
|
0e2bc365ea | ||
|
|
446bc76b69 | ||
|
|
a0c38a4fb3 | ||
|
|
631650f7eb | ||
|
|
0b31d8e534 | ||
|
|
ecf03f4afe | ||
|
|
b801c2837d | ||
|
|
1474e808cb | ||
|
|
457e633a81 | ||
|
|
7ea99caa19 | ||
|
|
3e5c15c172 | ||
|
|
52b4fb503c | ||
|
|
98123fa934 | ||
|
|
ce7b1c4f1d | ||
|
|
e062dcae58 | ||
|
|
a9d284fec1 | ||
|
|
065d0c3e07 | ||
|
|
4fda40b709 | ||
|
|
6014356d91 | ||
|
|
d790f97430 |
211
.github/actions/xahau-actions-cache-restore/action.yml
vendored
Normal file
211
.github/actions/xahau-actions-cache-restore/action.yml
vendored
Normal file
@@ -0,0 +1,211 @@
|
||||
name: 'Xahau Cache Restore (S3)'
|
||||
description: 'Drop-in replacement for actions/cache/restore using S3 storage'
|
||||
|
||||
inputs:
|
||||
path:
|
||||
description: 'A list of files, directories, and wildcard patterns to cache (currently only single path supported)'
|
||||
required: true
|
||||
key:
|
||||
description: 'An explicit key for restoring the cache'
|
||||
required: true
|
||||
restore-keys:
|
||||
description: 'An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key'
|
||||
required: false
|
||||
default: ''
|
||||
s3-bucket:
|
||||
description: 'S3 bucket name for cache storage'
|
||||
required: false
|
||||
default: 'xahaud-github-actions-cache-niq'
|
||||
s3-region:
|
||||
description: 'S3 region'
|
||||
required: false
|
||||
default: 'us-east-1'
|
||||
fail-on-cache-miss:
|
||||
description: 'Fail the workflow if cache entry is not found'
|
||||
required: false
|
||||
default: 'false'
|
||||
lookup-only:
|
||||
description: 'Check if a cache entry exists for the given input(s) without downloading it'
|
||||
required: false
|
||||
default: 'false'
|
||||
# Note: Composite actions can't access secrets.* directly - must be passed from workflow
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 access'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 access'
|
||||
required: true
|
||||
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||
value: ${{ steps.restore-cache.outputs.cache-hit }}
|
||||
cache-primary-key:
|
||||
description: 'The key that was used to restore the cache (may be from restore-keys)'
|
||||
value: ${{ steps.restore-cache.outputs.cache-primary-key }}
|
||||
cache-matched-key:
|
||||
description: 'The key that was used to restore the cache (exact or prefix match)'
|
||||
value: ${{ steps.restore-cache.outputs.cache-matched-key }}
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Restore cache from S3
|
||||
id: restore-cache
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ inputs.aws-access-key-id }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ inputs.aws-secret-access-key }}
|
||||
S3_BUCKET: ${{ inputs.s3-bucket }}
|
||||
S3_REGION: ${{ inputs.s3-region }}
|
||||
CACHE_KEY: ${{ inputs.key }}
|
||||
RESTORE_KEYS: ${{ inputs.restore-keys }}
|
||||
TARGET_PATH: ${{ inputs.path }}
|
||||
FAIL_ON_MISS: ${{ inputs.fail-on-cache-miss }}
|
||||
LOOKUP_ONLY: ${{ inputs.lookup-only }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "=========================================="
|
||||
echo "Xahau Cache Restore (S3)"
|
||||
echo "=========================================="
|
||||
echo "Target path: ${TARGET_PATH}"
|
||||
echo "Cache key: ${CACHE_KEY}"
|
||||
echo "S3 bucket: s3://${S3_BUCKET}"
|
||||
echo ""
|
||||
|
||||
# Normalize target path (expand tilde and resolve to absolute path)
|
||||
if [[ "${TARGET_PATH}" == ~* ]]; then
|
||||
TARGET_PATH="${HOME}${TARGET_PATH:1}"
|
||||
fi
|
||||
TARGET_PATH=$(realpath -m "${TARGET_PATH}")
|
||||
echo "Normalized target path: ${TARGET_PATH}"
|
||||
echo ""
|
||||
|
||||
# Function to try restoring a cache key
|
||||
try_restore_key() {
|
||||
local key=$1
|
||||
local s3_key="s3://${S3_BUCKET}/${key}-base.tar.zst"
|
||||
|
||||
echo "Checking for key: ${key}"
|
||||
|
||||
if aws s3 ls "${s3_key}" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
echo "✓ Found cache: ${s3_key}"
|
||||
return 0
|
||||
else
|
||||
echo "✗ Not found: ${key}"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Try exact match first
|
||||
MATCHED_KEY=""
|
||||
EXACT_MATCH="false"
|
||||
|
||||
if try_restore_key "${CACHE_KEY}"; then
|
||||
MATCHED_KEY="${CACHE_KEY}"
|
||||
EXACT_MATCH="true"
|
||||
echo ""
|
||||
echo "🎯 Exact cache hit for key: ${CACHE_KEY}"
|
||||
else
|
||||
# Try restore-keys (prefix matching)
|
||||
if [ -n "${RESTORE_KEYS}" ]; then
|
||||
echo ""
|
||||
echo "Primary key not found, trying restore-keys..."
|
||||
|
||||
while IFS= read -r restore_key; do
|
||||
[ -z "${restore_key}" ] && continue
|
||||
restore_key=$(echo "${restore_key}" | xargs)
|
||||
|
||||
if try_restore_key "${restore_key}"; then
|
||||
MATCHED_KEY="${restore_key}"
|
||||
EXACT_MATCH="false"
|
||||
echo ""
|
||||
echo "✓ Cache restored from fallback key: ${restore_key}"
|
||||
break
|
||||
fi
|
||||
done <<< "${RESTORE_KEYS}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check if we found anything
|
||||
if [ -z "${MATCHED_KEY}" ]; then
|
||||
echo ""
|
||||
echo "❌ No cache found for key: ${CACHE_KEY}"
|
||||
|
||||
if [ "${FAIL_ON_MISS}" = "true" ]; then
|
||||
echo "fail-on-cache-miss is enabled, failing workflow"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Set outputs for cache miss
|
||||
echo "cache-hit=false" >> $GITHUB_OUTPUT
|
||||
echo "cache-primary-key=" >> $GITHUB_OUTPUT
|
||||
echo "cache-matched-key=" >> $GITHUB_OUTPUT
|
||||
|
||||
# Create empty cache directory
|
||||
mkdir -p "${TARGET_PATH}"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache restore completed (bootstrap mode)"
|
||||
echo "Created empty cache directory: ${TARGET_PATH}"
|
||||
echo "=========================================="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# If lookup-only, we're done
|
||||
if [ "${LOOKUP_ONLY}" = "true" ]; then
|
||||
echo "cache-hit=${EXACT_MATCH}" >> $GITHUB_OUTPUT
|
||||
echo "cache-primary-key=${CACHE_KEY}" >> $GITHUB_OUTPUT
|
||||
echo "cache-matched-key=${MATCHED_KEY}" >> $GITHUB_OUTPUT
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache lookup completed (lookup-only mode)"
|
||||
echo "Cache exists: ${MATCHED_KEY}"
|
||||
echo "=========================================="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Download and extract cache
|
||||
S3_KEY="s3://${S3_BUCKET}/${MATCHED_KEY}-base.tar.zst"
|
||||
TEMP_TARBALL="/tmp/xahau-cache-restore-$$.tar.zst"
|
||||
|
||||
echo ""
|
||||
echo "Downloading cache..."
|
||||
aws s3 cp "${S3_KEY}" "${TEMP_TARBALL}" --region "${S3_REGION}"
|
||||
|
||||
TARBALL_SIZE=$(du -h "${TEMP_TARBALL}" | cut -f1)
|
||||
echo "✓ Downloaded: ${TARBALL_SIZE}"
|
||||
|
||||
# Create parent directory if needed
|
||||
mkdir -p "$(dirname "${TARGET_PATH}")"
|
||||
|
||||
# Remove existing target if it exists
|
||||
if [ -e "${TARGET_PATH}" ]; then
|
||||
echo "Removing existing target: ${TARGET_PATH}"
|
||||
rm -rf "${TARGET_PATH}"
|
||||
fi
|
||||
|
||||
# Create target directory and extract
|
||||
mkdir -p "${TARGET_PATH}"
|
||||
echo ""
|
||||
echo "Extracting cache..."
|
||||
zstd -d -c "${TEMP_TARBALL}" | tar -xf - -C "${TARGET_PATH}"
|
||||
echo "✓ Cache extracted to: ${TARGET_PATH}"
|
||||
|
||||
# Cleanup
|
||||
rm -f "${TEMP_TARBALL}"
|
||||
|
||||
# Set outputs
|
||||
echo "cache-hit=${EXACT_MATCH}" >> $GITHUB_OUTPUT
|
||||
echo "cache-primary-key=${CACHE_KEY}" >> $GITHUB_OUTPUT
|
||||
echo "cache-matched-key=${MATCHED_KEY}" >> $GITHUB_OUTPUT
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache restore completed successfully"
|
||||
echo "Cache hit: ${EXACT_MATCH}"
|
||||
echo "Matched key: ${MATCHED_KEY}"
|
||||
echo "=========================================="
|
||||
110
.github/actions/xahau-actions-cache-save/action.yml
vendored
Normal file
110
.github/actions/xahau-actions-cache-save/action.yml
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
name: 'Xahau Cache Save (S3)'
|
||||
description: 'Drop-in replacement for actions/cache/save using S3 storage'
|
||||
|
||||
inputs:
|
||||
path:
|
||||
description: 'A list of files, directories, and wildcard patterns to cache (currently only single path supported)'
|
||||
required: true
|
||||
key:
|
||||
description: 'An explicit key for saving the cache'
|
||||
required: true
|
||||
s3-bucket:
|
||||
description: 'S3 bucket name for cache storage'
|
||||
required: false
|
||||
default: 'xahaud-github-actions-cache-niq'
|
||||
s3-region:
|
||||
description: 'S3 region'
|
||||
required: false
|
||||
default: 'us-east-1'
|
||||
# Note: Composite actions can't access secrets.* directly - must be passed from workflow
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 access'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 access'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Save cache to S3
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ inputs.aws-access-key-id }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ inputs.aws-secret-access-key }}
|
||||
S3_BUCKET: ${{ inputs.s3-bucket }}
|
||||
S3_REGION: ${{ inputs.s3-region }}
|
||||
CACHE_KEY: ${{ inputs.key }}
|
||||
TARGET_PATH: ${{ inputs.path }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "=========================================="
|
||||
echo "Xahau Cache Save (S3)"
|
||||
echo "=========================================="
|
||||
echo "Target path: ${TARGET_PATH}"
|
||||
echo "Cache key: ${CACHE_KEY}"
|
||||
echo "S3 bucket: s3://${S3_BUCKET}"
|
||||
echo ""
|
||||
|
||||
# Normalize target path (expand tilde and resolve to absolute path)
|
||||
if [[ "${TARGET_PATH}" == ~* ]]; then
|
||||
TARGET_PATH="${HOME}${TARGET_PATH:1}"
|
||||
fi
|
||||
echo "Normalized target path: ${TARGET_PATH}"
|
||||
echo ""
|
||||
|
||||
# Check if target directory exists
|
||||
if [ ! -d "${TARGET_PATH}" ]; then
|
||||
echo "⚠️ Target directory does not exist: ${TARGET_PATH}"
|
||||
echo "Skipping cache save."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Use static base name (one base per key, immutable)
|
||||
S3_BASE_KEY="s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst"
|
||||
|
||||
# Check if base already exists (immutability - first write wins)
|
||||
if aws s3 ls "${S3_BASE_KEY}" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
echo "⚠️ Cache already exists: ${S3_BASE_KEY}"
|
||||
echo "Skipping upload (immutability - first write wins, like GitHub Actions)"
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache save completed (already exists)"
|
||||
echo "=========================================="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create tarball
|
||||
BASE_TARBALL="/tmp/xahau-cache-base-$$.tar.zst"
|
||||
|
||||
echo "Creating cache tarball..."
|
||||
tar -cf - -C "${TARGET_PATH}" . | zstd -3 -T0 -q -o "${BASE_TARBALL}"
|
||||
|
||||
BASE_SIZE=$(du -h "${BASE_TARBALL}" | cut -f1)
|
||||
echo "✓ Cache tarball created: ${BASE_SIZE}"
|
||||
echo ""
|
||||
|
||||
# Upload to S3
|
||||
echo "Uploading cache to S3..."
|
||||
echo " Key: ${CACHE_KEY}-base.tar.zst"
|
||||
|
||||
aws s3api put-object \
|
||||
--bucket "${S3_BUCKET}" \
|
||||
--key "${CACHE_KEY}-base.tar.zst" \
|
||||
--body "${BASE_TARBALL}" \
|
||||
--tagging 'type=base' \
|
||||
--region "${S3_REGION}" \
|
||||
>/dev/null
|
||||
|
||||
echo "✓ Uploaded: ${S3_BASE_KEY}"
|
||||
|
||||
# Cleanup
|
||||
rm -f "${BASE_TARBALL}"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache save completed successfully"
|
||||
echo "Cache size: ${BASE_SIZE}"
|
||||
echo "Cache key: ${CACHE_KEY}"
|
||||
echo "=========================================="
|
||||
@@ -1,63 +0,0 @@
|
||||
name: 'Configure ccache'
|
||||
description: 'Sets up ccache with consistent configuration'
|
||||
|
||||
inputs:
|
||||
max_size:
|
||||
description: 'Maximum cache size'
|
||||
required: false
|
||||
default: '2G'
|
||||
hash_dir:
|
||||
description: 'Whether to include directory paths in hash'
|
||||
required: false
|
||||
default: 'true'
|
||||
compiler_check:
|
||||
description: 'How to check compiler for changes'
|
||||
required: false
|
||||
default: 'content'
|
||||
is_main_branch:
|
||||
description: 'Whether the current branch is the main branch'
|
||||
required: false
|
||||
default: 'false'
|
||||
main_cache_dir:
|
||||
description: 'Path to the main branch cache directory'
|
||||
required: false
|
||||
default: '~/.ccache-main'
|
||||
current_cache_dir:
|
||||
description: 'Path to the current branch cache directory'
|
||||
required: false
|
||||
default: '~/.ccache-current'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Configure ccache
|
||||
shell: bash
|
||||
run: |
|
||||
# Create cache directories
|
||||
mkdir -p ${{ inputs.main_cache_dir }} ${{ inputs.current_cache_dir }}
|
||||
|
||||
# Set compiler check globally
|
||||
ccache -o compiler_check=${{ inputs.compiler_check }}
|
||||
|
||||
# Use a single config file location
|
||||
mkdir -p ~/.ccache
|
||||
export CONF_PATH="$HOME/.ccache/ccache.conf"
|
||||
|
||||
# Apply common settings
|
||||
echo "max_size = ${{ inputs.max_size }}" > "$CONF_PATH"
|
||||
echo "hash_dir = ${{ inputs.hash_dir }}" >> "$CONF_PATH"
|
||||
echo "compiler_check = ${{ inputs.compiler_check }}" >> "$CONF_PATH"
|
||||
|
||||
if [ "${{ inputs.is_main_branch }}" == "true" ]; then
|
||||
# Main branch: use main branch cache
|
||||
ccache --set-config=cache_dir="${{ inputs.main_cache_dir }}"
|
||||
echo "CCACHE_DIR=${{ inputs.main_cache_dir }}" >> $GITHUB_ENV
|
||||
else
|
||||
# Feature branch: use current branch cache with main as secondary
|
||||
ccache --set-config=cache_dir="${{ inputs.current_cache_dir }}"
|
||||
ccache --set-config=secondary_storage="file:${{ inputs.main_cache_dir }}"
|
||||
echo "CCACHE_DIR=${{ inputs.current_cache_dir }}" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
ccache -p # Print config for verification
|
||||
ccache -z # Zero statistics before the build
|
||||
121
.github/actions/xahau-ga-build/action.yml
vendored
121
.github/actions/xahau-ga-build/action.yml
vendored
@@ -47,6 +47,24 @@ inputs:
|
||||
description: 'GCC version to use for Clang toolchain (e.g. 11, 13)'
|
||||
required: false
|
||||
default: ''
|
||||
ccache_max_size:
|
||||
description: 'Maximum ccache size'
|
||||
required: false
|
||||
default: '2G'
|
||||
ccache_hash_dir:
|
||||
description: 'Whether to include directory paths in hash'
|
||||
required: false
|
||||
default: 'true'
|
||||
ccache_compiler_check:
|
||||
description: 'How to check compiler for changes'
|
||||
required: false
|
||||
default: 'content'
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 cache storage'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 cache storage'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
@@ -59,28 +77,44 @@ runs:
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Restore ccache directory for default branch
|
||||
- name: Restore ccache directory
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
id: ccache-restore
|
||||
uses: actions/cache/restore@v4
|
||||
uses: ./.github/actions/xahau-actions-cache-restore
|
||||
with:
|
||||
path: ~/.ccache-main
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
|
||||
- name: Restore ccache directory for current branch
|
||||
if: inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
|
||||
id: ccache-restore-current-branch
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ~/.ccache-current
|
||||
path: ~/.ccache
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
|
||||
- name: Configure ccache
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
# Use ccache's default cache_dir (~/.ccache) - don't override it
|
||||
# This avoids tilde expansion issues when setting it explicitly
|
||||
|
||||
# Create cache directory using ccache's default
|
||||
mkdir -p ~/.ccache
|
||||
|
||||
# Configure ccache settings (but NOT cache_dir - use default)
|
||||
# This overwrites any cached config to ensure fresh configuration
|
||||
ccache --set-config=max_size=${{ inputs.ccache_max_size }}
|
||||
ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }}
|
||||
ccache --set-config=compiler_check=${{ inputs.ccache_compiler_check }}
|
||||
|
||||
# Note: Not setting CCACHE_DIR - let ccache use its default (~/.ccache)
|
||||
|
||||
# Print config for verification
|
||||
echo "=== ccache configuration ==="
|
||||
ccache -p
|
||||
|
||||
# Zero statistics before the build
|
||||
ccache -z
|
||||
|
||||
- name: Configure project
|
||||
shell: bash
|
||||
@@ -96,14 +130,27 @@ runs:
|
||||
if [ -n "${{ inputs.cxx }}" ]; then
|
||||
export CXX="${{ inputs.cxx }}"
|
||||
fi
|
||||
|
||||
|
||||
# Configure ccache launcher args
|
||||
CCACHE_ARGS=""
|
||||
|
||||
# Create wrapper toolchain that overlays ccache on top of Conan's toolchain
|
||||
# This enables ccache for the main app build without affecting Conan dependency builds
|
||||
if [ "${{ inputs.ccache_enabled }}" = "true" ]; then
|
||||
CCACHE_ARGS="-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||
cat > wrapper_toolchain.cmake <<'EOF'
|
||||
# Include Conan's generated toolchain first (sets compiler, flags, etc.)
|
||||
# Note: CMAKE_CURRENT_LIST_DIR is the directory containing this wrapper (.build/)
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/build/generators/conan_toolchain.cmake)
|
||||
|
||||
# Overlay ccache configuration for main application build
|
||||
# This does NOT affect Conan dependency builds (already completed)
|
||||
set(CMAKE_C_COMPILER_LAUNCHER ccache CACHE STRING "C compiler launcher" FORCE)
|
||||
set(CMAKE_CXX_COMPILER_LAUNCHER ccache CACHE STRING "C++ compiler launcher" FORCE)
|
||||
EOF
|
||||
TOOLCHAIN_FILE="wrapper_toolchain.cmake"
|
||||
echo "✅ Created wrapper toolchain with ccache enabled"
|
||||
else
|
||||
TOOLCHAIN_FILE="build/generators/conan_toolchain.cmake"
|
||||
echo "ℹ️ Using Conan toolchain directly (ccache disabled)"
|
||||
fi
|
||||
|
||||
|
||||
# Configure C++ standard library if specified
|
||||
# libstdcxx used for clang-14/16 to work around missing lexicographical_compare_three_way in libc++
|
||||
# libcxx can be used with clang-17+ which has full C++20 support
|
||||
@@ -143,32 +190,36 @@ runs:
|
||||
# So we get: .build/build/generators/ with our non-standard folder name
|
||||
cmake .. \
|
||||
-G "${{ inputs.generator }}" \
|
||||
$CCACHE_ARGS \
|
||||
${CMAKE_CXX_FLAGS:+-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS"} \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=${TOOLCHAIN_FILE} \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }}
|
||||
|
||||
- name: Show ccache config before build
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "ccache configuration before build"
|
||||
echo "=========================================="
|
||||
ccache -p
|
||||
echo ""
|
||||
|
||||
- name: Build project
|
||||
shell: bash
|
||||
run: |
|
||||
cd ${{ inputs.build_dir }}
|
||||
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc)
|
||||
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc) -- -v
|
||||
|
||||
- name: Show ccache statistics
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
shell: bash
|
||||
run: ccache -s
|
||||
|
||||
- name: Save ccache directory for default branch
|
||||
if: always() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name == inputs.main_branch
|
||||
uses: actions/cache/save@v4
|
||||
- name: Save ccache directory
|
||||
if: always() && inputs.ccache_enabled == 'true'
|
||||
uses: ./.github/actions/xahau-actions-cache-save
|
||||
with:
|
||||
path: ~/.ccache-main
|
||||
key: ${{ steps.ccache-restore.outputs.cache-primary-key }}
|
||||
|
||||
- name: Save ccache directory for current branch
|
||||
if: always() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ~/.ccache-current
|
||||
key: ${{ steps.ccache-restore-current-branch.outputs.cache-primary-key }}
|
||||
path: ~/.ccache
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
|
||||
127
.github/actions/xahau-ga-dependencies/action.yml
vendored
127
.github/actions/xahau-ga-dependencies/action.yml
vendored
@@ -25,6 +25,28 @@ inputs:
|
||||
description: 'Main branch name for restore keys'
|
||||
required: false
|
||||
default: 'dev'
|
||||
os:
|
||||
description: 'Operating system (Linux, Macos)'
|
||||
required: false
|
||||
default: 'Linux'
|
||||
arch:
|
||||
description: 'Architecture (x86_64, armv8)'
|
||||
required: false
|
||||
default: 'x86_64'
|
||||
compiler:
|
||||
description: 'Compiler type (gcc, clang, apple-clang)'
|
||||
required: true
|
||||
compiler_version:
|
||||
description: 'Compiler version (11, 13, 14, etc.)'
|
||||
required: true
|
||||
cc:
|
||||
description: 'C compiler executable (gcc-13, clang-14, etc.), empty for macOS'
|
||||
required: false
|
||||
default: ''
|
||||
cxx:
|
||||
description: 'C++ compiler executable (g++-14, clang++-14, etc.), empty for macOS'
|
||||
required: false
|
||||
default: ''
|
||||
stdlib:
|
||||
description: 'C++ standard library for Conan configuration (note: also in compiler-id)'
|
||||
required: true
|
||||
@@ -32,6 +54,12 @@ inputs:
|
||||
options:
|
||||
- libstdcxx
|
||||
- libcxx
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 cache storage'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 cache storage'
|
||||
required: true
|
||||
|
||||
outputs:
|
||||
cache-hit:
|
||||
@@ -41,47 +69,72 @@ outputs:
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Generate safe branch name
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: safe-branch
|
||||
shell: bash
|
||||
run: |
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Check conanfile changes
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: check-conanfile-changes
|
||||
shell: bash
|
||||
run: |
|
||||
# Check if we're on the main branch
|
||||
if [ "${{ github.ref_name }}" == "${{ inputs.main_branch }}" ]; then
|
||||
echo "should-save-conan-cache=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# Fetch main branch for comparison
|
||||
git fetch origin ${{ inputs.main_branch }}
|
||||
|
||||
# Check if conanfile.txt or conanfile.py has changed compared to main branch
|
||||
if git diff --quiet origin/${{ inputs.main_branch }}..HEAD -- '**/conanfile.txt' '**/conanfile.py'; then
|
||||
echo "should-save-conan-cache=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "should-save-conan-cache=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Restore Conan cache
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: cache-restore-conan
|
||||
uses: actions/cache/restore@v4
|
||||
uses: ./.github/actions/xahau-actions-cache-restore
|
||||
with:
|
||||
path: |
|
||||
~/.conan
|
||||
~/.conan2
|
||||
path: ~/.conan2
|
||||
# Note: compiler-id format is compiler-version-stdlib[-gccversion]
|
||||
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-${{ inputs.configuration }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
|
||||
- name: Configure Conan
|
||||
shell: bash
|
||||
run: |
|
||||
# Create the default profile directory if it doesn't exist
|
||||
mkdir -p ~/.conan2/profiles
|
||||
|
||||
# Determine the correct libcxx based on stdlib parameter
|
||||
if [ "${{ inputs.stdlib }}" = "libcxx" ]; then
|
||||
LIBCXX="libc++"
|
||||
else
|
||||
LIBCXX="libstdc++11"
|
||||
fi
|
||||
|
||||
# Create profile with our specific settings
|
||||
# This overwrites any cached profile to ensure fresh configuration
|
||||
cat > ~/.conan2/profiles/default <<EOF
|
||||
[settings]
|
||||
arch=${{ inputs.arch }}
|
||||
build_type=${{ inputs.configuration }}
|
||||
compiler=${{ inputs.compiler }}
|
||||
compiler.cppstd=20
|
||||
compiler.libcxx=${LIBCXX}
|
||||
compiler.version=${{ inputs.compiler_version }}
|
||||
os=${{ inputs.os }}
|
||||
EOF
|
||||
|
||||
# Add buildenv and conf sections for Linux (not needed for macOS)
|
||||
if [ "${{ inputs.os }}" = "Linux" ] && [ -n "${{ inputs.cc }}" ]; then
|
||||
cat >> ~/.conan2/profiles/default <<EOF
|
||||
|
||||
[buildenv]
|
||||
CC=/usr/bin/${{ inputs.cc }}
|
||||
CXX=/usr/bin/${{ inputs.cxx }}
|
||||
|
||||
[conf]
|
||||
tools.build:compiler_executables={"c": "/usr/bin/${{ inputs.cc }}", "cpp": "/usr/bin/${{ inputs.cxx }}"}
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Add macOS-specific conf if needed
|
||||
if [ "${{ inputs.os }}" = "Macos" ]; then
|
||||
cat >> ~/.conan2/profiles/default <<EOF
|
||||
|
||||
[conf]
|
||||
# Workaround for gRPC with newer Apple Clang
|
||||
tools.build:cxxflags=["-Wno-missing-template-arg-list-after-template-kw"]
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Display profile for verification
|
||||
conan profile show
|
||||
|
||||
- name: Export custom recipes
|
||||
shell: bash
|
||||
@@ -107,10 +160,10 @@ runs:
|
||||
..
|
||||
|
||||
- name: Save Conan cache
|
||||
if: always() && inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true' && steps.check-conanfile-changes.outputs.should-save-conan-cache == 'true'
|
||||
uses: actions/cache/save@v4
|
||||
if: always() && inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true'
|
||||
uses: ./.github/actions/xahau-actions-cache-save
|
||||
with:
|
||||
path: |
|
||||
~/.conan
|
||||
~/.conan2
|
||||
key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }}
|
||||
path: ~/.conan2
|
||||
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-${{ inputs.configuration }}
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
|
||||
290
.github/workflows/test-cache-actions.yml.disabled
vendored
Normal file
290
.github/workflows/test-cache-actions.yml.disabled
vendored
Normal file
@@ -0,0 +1,290 @@
|
||||
name: Test Cache Actions (State Machine)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["nd-experiment-overlayfs-*"]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
state_assertion:
|
||||
description: 'Expected state (optional, e.g. "2" to assert state 2)'
|
||||
required: false
|
||||
type: string
|
||||
default: '1'
|
||||
start_state:
|
||||
description: 'Force specific starting state (optional, e.g. "3" to start at state 3)'
|
||||
required: false
|
||||
type: string
|
||||
clear_cache:
|
||||
description: 'Clear cache before running'
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test-cache-state-machine:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CACHE_KEY: test-state-machine-${{ github.ref_name }}
|
||||
CACHE_DIR: /tmp/test-cache
|
||||
S3_BUCKET: xahaud-github-actions-cache-niq
|
||||
S3_REGION: us-east-1
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Parse Inputs (workflow_dispatch or commit message)
|
||||
id: parse-inputs
|
||||
run: |
|
||||
# Priority 1: workflow_dispatch inputs (manual trigger)
|
||||
STATE_ASSERTION="${{ inputs.state_assertion }}"
|
||||
START_STATE="${{ inputs.start_state }}"
|
||||
SHOULD_CLEAR="${{ inputs.clear_cache }}"
|
||||
|
||||
# Priority 2: commit message tags (push event)
|
||||
if [ "${{ github.event_name }}" = "push" ]; then
|
||||
COMMIT_MSG="${{ github.event.head_commit.message }}"
|
||||
|
||||
# Parse [state:N] assertion tag (optional, if not provided as input)
|
||||
if [ -z "${STATE_ASSERTION}" ] && echo "${COMMIT_MSG}" | grep -qE '\[state:[0-9]+\]'; then
|
||||
STATE_ASSERTION=$(echo "${COMMIT_MSG}" | grep -oE '\[state:[0-9]+\]' | grep -oE '[0-9]+')
|
||||
echo "State assertion found in commit: ${STATE_ASSERTION}"
|
||||
fi
|
||||
|
||||
# Parse [start-state:N] force tag (optional, if not provided as input)
|
||||
if [ -z "${START_STATE}" ] && echo "${COMMIT_MSG}" | grep -qE '\[start-state:[0-9]+\]'; then
|
||||
START_STATE=$(echo "${COMMIT_MSG}" | grep -oE '\[start-state:[0-9]+\]' | grep -oE '[0-9]+')
|
||||
echo "Start state found in commit: ${START_STATE}"
|
||||
fi
|
||||
|
||||
# Parse [ci-clear-cache] tag (if not provided as input)
|
||||
if [ "${SHOULD_CLEAR}" != "true" ] && echo "${COMMIT_MSG}" | grep -q '\[ci-clear-cache\]'; then
|
||||
SHOULD_CLEAR=true
|
||||
echo "Cache clear requested in commit"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Output final values
|
||||
echo "state_assertion=${STATE_ASSERTION}" >> "$GITHUB_OUTPUT"
|
||||
echo "start_state=${START_STATE}" >> "$GITHUB_OUTPUT"
|
||||
echo "should_clear=${SHOULD_CLEAR}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Log what we're using
|
||||
echo ""
|
||||
echo "Configuration:"
|
||||
[ -n "${STATE_ASSERTION}" ] && echo " State assertion: ${STATE_ASSERTION}"
|
||||
[ -n "${START_STATE}" ] && echo " Start state: ${START_STATE}"
|
||||
echo " Clear cache: ${SHOULD_CLEAR}"
|
||||
|
||||
- name: Check S3 State (Before Restore)
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "S3 State Check (Before Restore)"
|
||||
echo "=========================================="
|
||||
echo "Cache key: ${CACHE_KEY}"
|
||||
echo ""
|
||||
|
||||
# Check if base exists
|
||||
BASE_EXISTS=false
|
||||
if aws s3 ls "s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
BASE_EXISTS=true
|
||||
fi
|
||||
echo "Base exists: ${BASE_EXISTS}"
|
||||
|
||||
# Count deltas
|
||||
DELTA_COUNT=$(aws s3 ls "s3://${S3_BUCKET}/" --region "${S3_REGION}" | grep "${CACHE_KEY}-delta-" | wc -l || echo "0")
|
||||
echo "Delta count: ${DELTA_COUNT}"
|
||||
|
||||
- name: Restore Cache
|
||||
uses: ./.github/actions/xahau-actions-cache-restore
|
||||
with:
|
||||
path: ${{ env.CACHE_DIR }}
|
||||
key: ${{ env.CACHE_KEY }}
|
||||
s3-bucket: ${{ env.S3_BUCKET }}
|
||||
s3-region: ${{ env.S3_REGION }}
|
||||
use-deltas: 'true'
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Auto-Detect State and Validate
|
||||
id: state
|
||||
env:
|
||||
STATE_ASSERTION: ${{ steps.parse-inputs.outputs.state_assertion }}
|
||||
START_STATE: ${{ steps.parse-inputs.outputs.start_state }}
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "State Detection and Validation"
|
||||
echo "=========================================="
|
||||
|
||||
# Create cache directory if it doesn't exist
|
||||
mkdir -p "${CACHE_DIR}"
|
||||
|
||||
# Handle [start-state:N] - force specific state
|
||||
if [ -n "${START_STATE}" ]; then
|
||||
echo "🎯 [start-state:${START_STATE}] detected - forcing state setup"
|
||||
|
||||
# Clear cache and create state files 0 through START_STATE
|
||||
rm -f ${CACHE_DIR}/state*.txt 2>/dev/null || true
|
||||
for i in $(seq 0 ${START_STATE}); do
|
||||
echo "State ${i} - Forced at $(date)" > "${CACHE_DIR}/state${i}.txt"
|
||||
echo "Commit: ${{ github.sha }}" >> "${CACHE_DIR}/state${i}.txt"
|
||||
done
|
||||
|
||||
DETECTED_STATE=${START_STATE}
|
||||
echo "✓ Forced to state ${DETECTED_STATE}"
|
||||
else
|
||||
# Auto-detect state by counting state files
|
||||
STATE_FILES=$(ls ${CACHE_DIR}/state*.txt 2>/dev/null | wc -l)
|
||||
DETECTED_STATE=${STATE_FILES}
|
||||
echo "Auto-detected state: ${DETECTED_STATE} (${STATE_FILES} state files)"
|
||||
fi
|
||||
|
||||
# Show cache contents
|
||||
echo ""
|
||||
echo "Cache contents:"
|
||||
if [ -d "${CACHE_DIR}" ] && [ "$(ls -A ${CACHE_DIR})" ]; then
|
||||
ls -la "${CACHE_DIR}"
|
||||
else
|
||||
echo "(empty)"
|
||||
fi
|
||||
|
||||
# Validate [state:N] assertion if provided
|
||||
if [ -n "${STATE_ASSERTION}" ]; then
|
||||
echo ""
|
||||
echo "Validating assertion: [state:${STATE_ASSERTION}]"
|
||||
if [ "${DETECTED_STATE}" -ne "${STATE_ASSERTION}" ]; then
|
||||
echo "❌ ERROR: State mismatch!"
|
||||
echo " Expected (from [state:N]): ${STATE_ASSERTION}"
|
||||
echo " Detected (from cache): ${DETECTED_STATE}"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Assertion passed: detected == expected (${DETECTED_STATE})"
|
||||
fi
|
||||
|
||||
# Output detected state for next steps
|
||||
echo "detected_state=${DETECTED_STATE}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
|
||||
- name: Simulate Build (State Transition)
|
||||
env:
|
||||
DETECTED_STATE: ${{ steps.state.outputs.detected_state }}
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "Simulating Build (State Transition)"
|
||||
echo "=========================================="
|
||||
|
||||
# Calculate next state
|
||||
NEXT_STATE=$((DETECTED_STATE + 1))
|
||||
echo "Transitioning: State ${DETECTED_STATE} → State ${NEXT_STATE}"
|
||||
echo ""
|
||||
|
||||
# Create state file for next state
|
||||
STATE_FILE="${CACHE_DIR}/state${NEXT_STATE}.txt"
|
||||
echo "State ${NEXT_STATE} - Created at $(date)" > "${STATE_FILE}"
|
||||
echo "Commit: ${{ github.sha }}" >> "${STATE_FILE}"
|
||||
echo "Message: ${{ github.event.head_commit.message }}" >> "${STATE_FILE}"
|
||||
|
||||
echo "✓ Created ${STATE_FILE}"
|
||||
|
||||
# Show final cache state
|
||||
echo ""
|
||||
echo "Final cache contents:"
|
||||
ls -la "${CACHE_DIR}"
|
||||
|
||||
echo ""
|
||||
echo "State files:"
|
||||
cat ${CACHE_DIR}/state*.txt
|
||||
|
||||
- name: Save Cache
|
||||
uses: ./.github/actions/xahau-actions-cache-save
|
||||
with:
|
||||
path: ${{ env.CACHE_DIR }}
|
||||
key: ${{ env.CACHE_KEY }}
|
||||
s3-bucket: ${{ env.S3_BUCKET }}
|
||||
s3-region: ${{ env.S3_REGION }}
|
||||
use-deltas: 'true'
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Validate S3 State (After Save)
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
DETECTED_STATE: ${{ steps.state.outputs.detected_state }}
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "S3 State Validation (After Save)"
|
||||
echo "=========================================="
|
||||
|
||||
# Calculate next state (what we just saved)
|
||||
NEXT_STATE=$((DETECTED_STATE + 1))
|
||||
echo "Saved state: ${NEXT_STATE}"
|
||||
echo ""
|
||||
|
||||
# Check if base exists
|
||||
if aws s3 ls "s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
BASE_SIZE=$(aws s3 ls "s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst" --region "${S3_REGION}" | awk '{print $3}')
|
||||
echo "✓ Base exists: ${CACHE_KEY}-base.tar.zst (${BASE_SIZE} bytes)"
|
||||
else
|
||||
echo "❌ ERROR: Base should exist after save"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# List deltas
|
||||
echo ""
|
||||
echo "Delta layers:"
|
||||
DELTAS=$(aws s3 ls "s3://${S3_BUCKET}/" --region "${S3_REGION}" | grep "${CACHE_KEY}-delta-" || echo "")
|
||||
if [ -n "${DELTAS}" ]; then
|
||||
echo "${DELTAS}"
|
||||
DELTA_COUNT=$(echo "${DELTAS}" | wc -l)
|
||||
else
|
||||
echo "(none)"
|
||||
DELTA_COUNT=0
|
||||
fi
|
||||
|
||||
# Validate S3 state
|
||||
echo ""
|
||||
if [ "${DETECTED_STATE}" -eq 0 ]; then
|
||||
# Saved state 1 from bootstrap (state 0 → 1)
|
||||
if [ "${DELTA_COUNT}" -ne 0 ]; then
|
||||
echo "⚠️ WARNING: Bootstrap (state 1) should have 0 deltas, found ${DELTA_COUNT}"
|
||||
else
|
||||
echo "✓ State 1 saved: base exists, 0 deltas"
|
||||
fi
|
||||
else
|
||||
# Saved delta (state N+1)
|
||||
if [ "${DELTA_COUNT}" -ne 1 ]; then
|
||||
echo "⚠️ WARNING: State ${NEXT_STATE} expects 1 delta (inline cleanup), found ${DELTA_COUNT}"
|
||||
echo "This might be OK if multiple builds ran concurrently"
|
||||
else
|
||||
echo "✓ State ${NEXT_STATE} saved: base + 1 delta (old deltas cleaned)"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "✅ State ${DETECTED_STATE} → ${NEXT_STATE} Complete!"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "Next commit will auto-detect state ${NEXT_STATE}"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " # Normal (auto-advance)"
|
||||
echo " git commit -m 'continue testing'"
|
||||
echo ""
|
||||
echo " # With assertion (validate state)"
|
||||
echo " git commit -m 'test delta [state:${NEXT_STATE}]'"
|
||||
echo ""
|
||||
echo " # Clear cache and restart"
|
||||
echo " git commit -m 'fresh start [ci-clear-cache]'"
|
||||
echo ""
|
||||
echo " # Jump to specific state"
|
||||
echo " git commit -m 'jump to state 3 [start-state:3]'"
|
||||
182
.github/workflows/test-overlayfs-delta.yml.disabled
vendored
Normal file
182
.github/workflows/test-overlayfs-delta.yml.disabled
vendored
Normal file
@@ -0,0 +1,182 @@
|
||||
name: Test OverlayFS Delta Extraction
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["*"]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-overlayfs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# - name: Test encrypted secrets (decrypt test message)
|
||||
# run: |
|
||||
# echo "========================================"
|
||||
# echo "TESTING ENCRYPTED SECRETS"
|
||||
# echo "========================================"
|
||||
# echo ""
|
||||
# echo "Decrypting test message from .github/secrets/test-message.gpg"
|
||||
# echo "Using encryption key from GitHub Secrets..."
|
||||
# echo ""
|
||||
#
|
||||
# # Decrypt using key from GitHub Secrets
|
||||
# echo "${{ secrets.TEST_ENCRYPTION_KEY }}" | \
|
||||
# gpg --batch --yes --passphrase-fd 0 \
|
||||
# --decrypt .github/secrets/test-message.gpg
|
||||
#
|
||||
# echo ""
|
||||
# echo "========================================"
|
||||
# echo "If you see the success message above,"
|
||||
# echo "then encrypted secrets work! 🎉"
|
||||
# echo "========================================"
|
||||
# echo ""
|
||||
|
||||
- name: Setup OverlayFS layers
|
||||
run: |
|
||||
echo "=== Creating directory structure ==="
|
||||
mkdir -p /tmp/test/{base,delta,upper,work,merged}
|
||||
|
||||
echo "=== Creating base layer files ==="
|
||||
echo "base file 1" > /tmp/test/base/file1.txt
|
||||
echo "base file 2" > /tmp/test/base/file2.txt
|
||||
echo "base file 3" > /tmp/test/base/file3.txt
|
||||
mkdir -p /tmp/test/base/subdir
|
||||
echo "base subdir file" > /tmp/test/base/subdir/file.txt
|
||||
|
||||
echo "=== Base layer contents ==="
|
||||
find /tmp/test/base -type f -exec sh -c 'echo "{}:"; cat "{}"' \;
|
||||
|
||||
echo "=== Mounting OverlayFS ==="
|
||||
sudo mount -t overlay overlay \
|
||||
-o lowerdir=/tmp/test/base,upperdir=/tmp/test/upper,workdir=/tmp/test/work \
|
||||
/tmp/test/merged
|
||||
|
||||
echo "=== Mounted successfully ==="
|
||||
mount | grep overlay
|
||||
|
||||
- name: Verify merged view shows base files
|
||||
run: |
|
||||
echo "=== Contents of /merged (should show base files) ==="
|
||||
ls -R /tmp/test/merged
|
||||
find /tmp/test/merged -type f -exec sh -c 'echo "{}:"; cat "{}"' \;
|
||||
|
||||
- name: Make changes via merged layer
|
||||
run: |
|
||||
echo "=== Making changes via /merged ==="
|
||||
|
||||
# Overwrite existing file
|
||||
echo "MODIFIED file 2" > /tmp/test/merged/file2.txt
|
||||
echo "Modified file2.txt"
|
||||
|
||||
# Create new file
|
||||
echo "NEW file 4" > /tmp/test/merged/file4.txt
|
||||
echo "Created new file4.txt"
|
||||
|
||||
# Create new directory with file
|
||||
mkdir -p /tmp/test/merged/newdir
|
||||
echo "NEW file in new dir" > /tmp/test/merged/newdir/newfile.txt
|
||||
echo "Created newdir/newfile.txt"
|
||||
|
||||
# Add file to existing directory
|
||||
echo "NEW file in existing subdir" > /tmp/test/merged/subdir/newfile.txt
|
||||
echo "Created subdir/newfile.txt"
|
||||
|
||||
echo "=== Changes complete ==="
|
||||
|
||||
- name: Show the delta (upperdir)
|
||||
run: |
|
||||
echo "========================================"
|
||||
echo "THE DELTA (only changes in /upper):"
|
||||
echo "========================================"
|
||||
|
||||
if [ -z "$(ls -A /tmp/test/upper)" ]; then
|
||||
echo "Upper directory is empty - no changes detected"
|
||||
else
|
||||
echo "Upper directory structure:"
|
||||
ls -R /tmp/test/upper
|
||||
echo ""
|
||||
echo "Upper directory files with content:"
|
||||
find /tmp/test/upper -type f -exec sh -c 'echo "---"; echo "FILE: {}"; cat "{}"; echo ""' \;
|
||||
|
||||
echo "========================================"
|
||||
echo "SIZE OF DELTA:"
|
||||
du -sh /tmp/test/upper
|
||||
echo "========================================"
|
||||
fi
|
||||
|
||||
- name: Compare base vs upper vs merged
|
||||
run: |
|
||||
echo "========================================"
|
||||
echo "COMPARISON:"
|
||||
echo "========================================"
|
||||
|
||||
echo "BASE layer (original, untouched):"
|
||||
ls -la /tmp/test/base/
|
||||
echo ""
|
||||
|
||||
echo "UPPER layer (DELTA - only changes):"
|
||||
ls -la /tmp/test/upper/
|
||||
echo ""
|
||||
|
||||
echo "MERGED layer (unified view = base + upper):"
|
||||
ls -la /tmp/test/merged/
|
||||
echo ""
|
||||
|
||||
echo "========================================"
|
||||
echo "PROOF: Upper dir contains ONLY the delta!"
|
||||
echo "========================================"
|
||||
|
||||
- name: Simulate tarball creation (what we'd upload)
|
||||
run: |
|
||||
echo "=== Creating tarball of delta ==="
|
||||
tar -czf /tmp/delta.tar.gz -C /tmp/test/upper .
|
||||
|
||||
echo "Delta tarball size:"
|
||||
ls -lh /tmp/delta.tar.gz
|
||||
|
||||
echo ""
|
||||
echo "Delta tarball contents:"
|
||||
tar -tzf /tmp/delta.tar.gz
|
||||
|
||||
echo ""
|
||||
echo "========================================"
|
||||
echo "This is what we'd upload to S3/rsync!"
|
||||
echo "Only ~few KB instead of entire cache!"
|
||||
echo "========================================"
|
||||
|
||||
- name: Upload delta to S3 (actual test!)
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
run: |
|
||||
echo "========================================"
|
||||
echo "UPLOADING TO S3"
|
||||
echo "========================================"
|
||||
|
||||
# Upload the delta tarball
|
||||
aws s3 cp /tmp/delta.tar.gz \
|
||||
s3://xahaud-github-actions-cache-niq/hello-world-first-test.tar.gz \
|
||||
--region us-east-1
|
||||
|
||||
echo ""
|
||||
echo "✅ Successfully uploaded to S3!"
|
||||
echo "File: s3://xahaud-github-actions-cache-niq/hello-world-first-test.tar.gz"
|
||||
echo ""
|
||||
|
||||
# Verify it exists
|
||||
echo "Verifying upload..."
|
||||
aws s3 ls s3://xahaud-github-actions-cache-niq/hello-world-first-test.tar.gz --region us-east-1
|
||||
|
||||
echo ""
|
||||
echo "========================================"
|
||||
echo "S3 upload test complete! 🚀"
|
||||
echo "========================================"
|
||||
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
echo "=== Unmounting OverlayFS ==="
|
||||
sudo umount /tmp/test/merged || true
|
||||
@@ -78,14 +78,6 @@ jobs:
|
||||
- name: Install ccache
|
||||
run: brew install ccache
|
||||
|
||||
- name: Configure ccache
|
||||
uses: ./.github/actions/xahau-configure-ccache
|
||||
with:
|
||||
max_size: 2G
|
||||
hash_dir: true
|
||||
compiler_check: content
|
||||
is_main_branch: ${{ github.ref_name == env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
echo "PATH:"
|
||||
@@ -98,32 +90,12 @@ jobs:
|
||||
echo "---- Full Environment ----"
|
||||
env
|
||||
|
||||
- name: Configure Conan
|
||||
- name: Detect compiler version
|
||||
id: detect-compiler
|
||||
run: |
|
||||
# Create the default profile directory if it doesn't exist
|
||||
mkdir -p ~/.conan2/profiles
|
||||
|
||||
# Detect compiler version
|
||||
COMPILER_VERSION=$(clang --version | grep -oE 'version [0-9]+' | grep -oE '[0-9]+')
|
||||
|
||||
# Create profile with our specific settings
|
||||
cat > ~/.conan2/profiles/default <<EOF
|
||||
[settings]
|
||||
arch=armv8
|
||||
build_type=Release
|
||||
compiler=apple-clang
|
||||
compiler.cppstd=20
|
||||
compiler.libcxx=libc++
|
||||
compiler.version=${COMPILER_VERSION}
|
||||
os=Macos
|
||||
|
||||
[conf]
|
||||
# Workaround for gRPC with newer Apple Clang
|
||||
tools.build:cxxflags=["-Wno-missing-template-arg-list-after-template-kw"]
|
||||
EOF
|
||||
|
||||
# Display profile for verification
|
||||
conan profile show
|
||||
echo "compiler_version=${COMPILER_VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "Detected Apple Clang version: ${COMPILER_VERSION}"
|
||||
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/xahau-ga-dependencies
|
||||
@@ -133,6 +105,13 @@ jobs:
|
||||
compiler-id: clang
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
os: Macos
|
||||
arch: armv8
|
||||
compiler: apple-clang
|
||||
compiler_version: ${{ steps.detect-compiler.outputs.compiler_version }}
|
||||
stdlib: libcxx
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Build
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
@@ -143,6 +122,9 @@ jobs:
|
||||
compiler-id: clang
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
stdlib: libcxx
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
54
.github/workflows/xahau-ga-nix.yml
vendored
54
.github/workflows/xahau-ga-nix.yml
vendored
@@ -2,7 +2,7 @@ name: Nix - GA Runner
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
branches: ["dev", "candidate", "release", "nd-experiment-overlayfs-2025-10-29"]
|
||||
pull_request:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
schedule:
|
||||
@@ -156,7 +156,7 @@ jobs:
|
||||
env:
|
||||
build_dir: .build
|
||||
# Bump this number to invalidate all caches globally.
|
||||
CACHE_VERSION: 2
|
||||
CACHE_VERSION: 3
|
||||
MAIN_BRANCH_NAME: dev
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -231,48 +231,6 @@ jobs:
|
||||
# Install Conan 2
|
||||
pip install --upgrade "conan>=2.0,<3"
|
||||
|
||||
- name: Configure ccache
|
||||
uses: ./.github/actions/xahau-configure-ccache
|
||||
with:
|
||||
max_size: 2G
|
||||
hash_dir: true
|
||||
compiler_check: content
|
||||
is_main_branch: ${{ github.ref_name == env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Configure Conan
|
||||
run: |
|
||||
# Create the default profile directory if it doesn't exist
|
||||
mkdir -p ~/.conan2/profiles
|
||||
|
||||
# Determine the correct libcxx based on stdlib parameter
|
||||
if [ "${{ matrix.stdlib }}" = "libcxx" ]; then
|
||||
LIBCXX="libc++"
|
||||
else
|
||||
LIBCXX="libstdc++11"
|
||||
fi
|
||||
|
||||
# Create profile with our specific settings
|
||||
cat > ~/.conan2/profiles/default <<EOF
|
||||
[settings]
|
||||
arch=x86_64
|
||||
build_type=${{ matrix.configuration }}
|
||||
compiler=${{ matrix.compiler }}
|
||||
compiler.cppstd=20
|
||||
compiler.libcxx=${LIBCXX}
|
||||
compiler.version=${{ matrix.compiler_version }}
|
||||
os=Linux
|
||||
|
||||
[buildenv]
|
||||
CC=/usr/bin/${{ matrix.cc }}
|
||||
CXX=/usr/bin/${{ matrix.cxx }}
|
||||
|
||||
[conf]
|
||||
tools.build:compiler_executables={"c": "/usr/bin/${{ matrix.cc }}", "cpp": "/usr/bin/${{ matrix.cxx }}"}
|
||||
EOF
|
||||
|
||||
# Display profile for verification
|
||||
conan profile show
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
echo "PATH:"
|
||||
@@ -293,7 +251,13 @@ jobs:
|
||||
compiler-id: ${{ matrix.compiler_id }}
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
compiler: ${{ matrix.compiler }}
|
||||
compiler_version: ${{ matrix.compiler_version }}
|
||||
cc: ${{ matrix.cc }}
|
||||
cxx: ${{ matrix.cxx }}
|
||||
stdlib: ${{ matrix.stdlib }}
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Build
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
@@ -308,6 +272,8 @@ jobs:
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
stdlib: ${{ matrix.stdlib }}
|
||||
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Set artifact name
|
||||
id: set-artifact-name
|
||||
|
||||
@@ -72,15 +72,15 @@ It generates many files of [results](results):
|
||||
desired as described above. In a perfect repo, this file will be
|
||||
empty.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../.github/workflows/levelization.yml) to validate
|
||||
Github workflow](../../.github/workflows/levelization.yml.disabled) to validate
|
||||
that nothing changed.
|
||||
* [`ordering.txt`](results/ordering.txt): A list showing relationships
|
||||
between modules where there are no loops as they actually exist, as
|
||||
opposed to how they are desired as described above.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../.github/workflows/levelization.yml) to validate
|
||||
Github workflow](../../.github/workflows/levelization.yml.disabled) to validate
|
||||
that nothing changed.
|
||||
* [`levelization.yml`](../../.github/workflows/levelization.yml)
|
||||
* [`levelization.yml`](../../.github/workflows/levelization.yml.disabled)
|
||||
Github Actions workflow to test that levelization loops haven't
|
||||
changed. Unfortunately, if changes are detected, it can't tell if
|
||||
they are improvements or not, so if you have resolved any issues or
|
||||
|
||||
Reference in New Issue
Block a user