mirror of
https://github.com/Xahau/xahaud.git
synced 2025-11-19 18:15:50 +00:00
Compare commits
3 Commits
nd-experim
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4a65401448 | ||
|
|
8bcebdea42 | ||
|
|
4cc63c028a |
@@ -1,211 +0,0 @@
|
|||||||
name: 'Xahau Cache Restore (S3)'
|
|
||||||
description: 'Drop-in replacement for actions/cache/restore using S3 storage'
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
path:
|
|
||||||
description: 'A list of files, directories, and wildcard patterns to cache (currently only single path supported)'
|
|
||||||
required: true
|
|
||||||
key:
|
|
||||||
description: 'An explicit key for restoring the cache'
|
|
||||||
required: true
|
|
||||||
restore-keys:
|
|
||||||
description: 'An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key'
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
s3-bucket:
|
|
||||||
description: 'S3 bucket name for cache storage'
|
|
||||||
required: false
|
|
||||||
default: 'xahaud-github-actions-cache-niq'
|
|
||||||
s3-region:
|
|
||||||
description: 'S3 region'
|
|
||||||
required: false
|
|
||||||
default: 'us-east-1'
|
|
||||||
fail-on-cache-miss:
|
|
||||||
description: 'Fail the workflow if cache entry is not found'
|
|
||||||
required: false
|
|
||||||
default: 'false'
|
|
||||||
lookup-only:
|
|
||||||
description: 'Check if a cache entry exists for the given input(s) without downloading it'
|
|
||||||
required: false
|
|
||||||
default: 'false'
|
|
||||||
# Note: Composite actions can't access secrets.* directly - must be passed from workflow
|
|
||||||
aws-access-key-id:
|
|
||||||
description: 'AWS Access Key ID for S3 access'
|
|
||||||
required: true
|
|
||||||
aws-secret-access-key:
|
|
||||||
description: 'AWS Secret Access Key for S3 access'
|
|
||||||
required: true
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
cache-hit:
|
|
||||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
|
||||||
value: ${{ steps.restore-cache.outputs.cache-hit }}
|
|
||||||
cache-primary-key:
|
|
||||||
description: 'The key that was used to restore the cache (may be from restore-keys)'
|
|
||||||
value: ${{ steps.restore-cache.outputs.cache-primary-key }}
|
|
||||||
cache-matched-key:
|
|
||||||
description: 'The key that was used to restore the cache (exact or prefix match)'
|
|
||||||
value: ${{ steps.restore-cache.outputs.cache-matched-key }}
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: 'composite'
|
|
||||||
steps:
|
|
||||||
- name: Restore cache from S3
|
|
||||||
id: restore-cache
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ inputs.aws-access-key-id }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ inputs.aws-secret-access-key }}
|
|
||||||
S3_BUCKET: ${{ inputs.s3-bucket }}
|
|
||||||
S3_REGION: ${{ inputs.s3-region }}
|
|
||||||
CACHE_KEY: ${{ inputs.key }}
|
|
||||||
RESTORE_KEYS: ${{ inputs.restore-keys }}
|
|
||||||
TARGET_PATH: ${{ inputs.path }}
|
|
||||||
FAIL_ON_MISS: ${{ inputs.fail-on-cache-miss }}
|
|
||||||
LOOKUP_ONLY: ${{ inputs.lookup-only }}
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
echo "=========================================="
|
|
||||||
echo "Xahau Cache Restore (S3)"
|
|
||||||
echo "=========================================="
|
|
||||||
echo "Target path: ${TARGET_PATH}"
|
|
||||||
echo "Cache key: ${CACHE_KEY}"
|
|
||||||
echo "S3 bucket: s3://${S3_BUCKET}"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Normalize target path (expand tilde and resolve to absolute path)
|
|
||||||
if [[ "${TARGET_PATH}" == ~* ]]; then
|
|
||||||
TARGET_PATH="${HOME}${TARGET_PATH:1}"
|
|
||||||
fi
|
|
||||||
TARGET_PATH=$(realpath -m "${TARGET_PATH}")
|
|
||||||
echo "Normalized target path: ${TARGET_PATH}"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Function to try restoring a cache key
|
|
||||||
try_restore_key() {
|
|
||||||
local key=$1
|
|
||||||
local s3_key="s3://${S3_BUCKET}/${key}-base.tar.zst"
|
|
||||||
|
|
||||||
echo "Checking for key: ${key}"
|
|
||||||
|
|
||||||
if aws s3 ls "${s3_key}" --region "${S3_REGION}" >/dev/null 2>&1; then
|
|
||||||
echo "✓ Found cache: ${s3_key}"
|
|
||||||
return 0
|
|
||||||
else
|
|
||||||
echo "✗ Not found: ${key}"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Try exact match first
|
|
||||||
MATCHED_KEY=""
|
|
||||||
EXACT_MATCH="false"
|
|
||||||
|
|
||||||
if try_restore_key "${CACHE_KEY}"; then
|
|
||||||
MATCHED_KEY="${CACHE_KEY}"
|
|
||||||
EXACT_MATCH="true"
|
|
||||||
echo ""
|
|
||||||
echo "🎯 Exact cache hit for key: ${CACHE_KEY}"
|
|
||||||
else
|
|
||||||
# Try restore-keys (prefix matching)
|
|
||||||
if [ -n "${RESTORE_KEYS}" ]; then
|
|
||||||
echo ""
|
|
||||||
echo "Primary key not found, trying restore-keys..."
|
|
||||||
|
|
||||||
while IFS= read -r restore_key; do
|
|
||||||
[ -z "${restore_key}" ] && continue
|
|
||||||
restore_key=$(echo "${restore_key}" | xargs)
|
|
||||||
|
|
||||||
if try_restore_key "${restore_key}"; then
|
|
||||||
MATCHED_KEY="${restore_key}"
|
|
||||||
EXACT_MATCH="false"
|
|
||||||
echo ""
|
|
||||||
echo "✓ Cache restored from fallback key: ${restore_key}"
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done <<< "${RESTORE_KEYS}"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if we found anything
|
|
||||||
if [ -z "${MATCHED_KEY}" ]; then
|
|
||||||
echo ""
|
|
||||||
echo "❌ No cache found for key: ${CACHE_KEY}"
|
|
||||||
|
|
||||||
if [ "${FAIL_ON_MISS}" = "true" ]; then
|
|
||||||
echo "fail-on-cache-miss is enabled, failing workflow"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Set outputs for cache miss
|
|
||||||
echo "cache-hit=false" >> $GITHUB_OUTPUT
|
|
||||||
echo "cache-primary-key=" >> $GITHUB_OUTPUT
|
|
||||||
echo "cache-matched-key=" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
# Create empty cache directory
|
|
||||||
mkdir -p "${TARGET_PATH}"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "=========================================="
|
|
||||||
echo "Cache restore completed (bootstrap mode)"
|
|
||||||
echo "Created empty cache directory: ${TARGET_PATH}"
|
|
||||||
echo "=========================================="
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# If lookup-only, we're done
|
|
||||||
if [ "${LOOKUP_ONLY}" = "true" ]; then
|
|
||||||
echo "cache-hit=${EXACT_MATCH}" >> $GITHUB_OUTPUT
|
|
||||||
echo "cache-primary-key=${CACHE_KEY}" >> $GITHUB_OUTPUT
|
|
||||||
echo "cache-matched-key=${MATCHED_KEY}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "=========================================="
|
|
||||||
echo "Cache lookup completed (lookup-only mode)"
|
|
||||||
echo "Cache exists: ${MATCHED_KEY}"
|
|
||||||
echo "=========================================="
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Download and extract cache
|
|
||||||
S3_KEY="s3://${S3_BUCKET}/${MATCHED_KEY}-base.tar.zst"
|
|
||||||
TEMP_TARBALL="/tmp/xahau-cache-restore-$$.tar.zst"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "Downloading cache..."
|
|
||||||
aws s3 cp "${S3_KEY}" "${TEMP_TARBALL}" --region "${S3_REGION}"
|
|
||||||
|
|
||||||
TARBALL_SIZE=$(du -h "${TEMP_TARBALL}" | cut -f1)
|
|
||||||
echo "✓ Downloaded: ${TARBALL_SIZE}"
|
|
||||||
|
|
||||||
# Create parent directory if needed
|
|
||||||
mkdir -p "$(dirname "${TARGET_PATH}")"
|
|
||||||
|
|
||||||
# Remove existing target if it exists
|
|
||||||
if [ -e "${TARGET_PATH}" ]; then
|
|
||||||
echo "Removing existing target: ${TARGET_PATH}"
|
|
||||||
rm -rf "${TARGET_PATH}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create target directory and extract
|
|
||||||
mkdir -p "${TARGET_PATH}"
|
|
||||||
echo ""
|
|
||||||
echo "Extracting cache..."
|
|
||||||
zstd -d -c "${TEMP_TARBALL}" | tar -xf - -C "${TARGET_PATH}"
|
|
||||||
echo "✓ Cache extracted to: ${TARGET_PATH}"
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
rm -f "${TEMP_TARBALL}"
|
|
||||||
|
|
||||||
# Set outputs
|
|
||||||
echo "cache-hit=${EXACT_MATCH}" >> $GITHUB_OUTPUT
|
|
||||||
echo "cache-primary-key=${CACHE_KEY}" >> $GITHUB_OUTPUT
|
|
||||||
echo "cache-matched-key=${MATCHED_KEY}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "=========================================="
|
|
||||||
echo "Cache restore completed successfully"
|
|
||||||
echo "Cache hit: ${EXACT_MATCH}"
|
|
||||||
echo "Matched key: ${MATCHED_KEY}"
|
|
||||||
echo "=========================================="
|
|
||||||
110
.github/actions/xahau-actions-cache-save/action.yml
vendored
110
.github/actions/xahau-actions-cache-save/action.yml
vendored
@@ -1,110 +0,0 @@
|
|||||||
name: 'Xahau Cache Save (S3)'
|
|
||||||
description: 'Drop-in replacement for actions/cache/save using S3 storage'
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
path:
|
|
||||||
description: 'A list of files, directories, and wildcard patterns to cache (currently only single path supported)'
|
|
||||||
required: true
|
|
||||||
key:
|
|
||||||
description: 'An explicit key for saving the cache'
|
|
||||||
required: true
|
|
||||||
s3-bucket:
|
|
||||||
description: 'S3 bucket name for cache storage'
|
|
||||||
required: false
|
|
||||||
default: 'xahaud-github-actions-cache-niq'
|
|
||||||
s3-region:
|
|
||||||
description: 'S3 region'
|
|
||||||
required: false
|
|
||||||
default: 'us-east-1'
|
|
||||||
# Note: Composite actions can't access secrets.* directly - must be passed from workflow
|
|
||||||
aws-access-key-id:
|
|
||||||
description: 'AWS Access Key ID for S3 access'
|
|
||||||
required: true
|
|
||||||
aws-secret-access-key:
|
|
||||||
description: 'AWS Secret Access Key for S3 access'
|
|
||||||
required: true
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: 'composite'
|
|
||||||
steps:
|
|
||||||
- name: Save cache to S3
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ inputs.aws-access-key-id }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ inputs.aws-secret-access-key }}
|
|
||||||
S3_BUCKET: ${{ inputs.s3-bucket }}
|
|
||||||
S3_REGION: ${{ inputs.s3-region }}
|
|
||||||
CACHE_KEY: ${{ inputs.key }}
|
|
||||||
TARGET_PATH: ${{ inputs.path }}
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
echo "=========================================="
|
|
||||||
echo "Xahau Cache Save (S3)"
|
|
||||||
echo "=========================================="
|
|
||||||
echo "Target path: ${TARGET_PATH}"
|
|
||||||
echo "Cache key: ${CACHE_KEY}"
|
|
||||||
echo "S3 bucket: s3://${S3_BUCKET}"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Normalize target path (expand tilde and resolve to absolute path)
|
|
||||||
if [[ "${TARGET_PATH}" == ~* ]]; then
|
|
||||||
TARGET_PATH="${HOME}${TARGET_PATH:1}"
|
|
||||||
fi
|
|
||||||
echo "Normalized target path: ${TARGET_PATH}"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Check if target directory exists
|
|
||||||
if [ ! -d "${TARGET_PATH}" ]; then
|
|
||||||
echo "⚠️ Target directory does not exist: ${TARGET_PATH}"
|
|
||||||
echo "Skipping cache save."
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Use static base name (one base per key, immutable)
|
|
||||||
S3_BASE_KEY="s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst"
|
|
||||||
|
|
||||||
# Check if base already exists (immutability - first write wins)
|
|
||||||
if aws s3 ls "${S3_BASE_KEY}" --region "${S3_REGION}" >/dev/null 2>&1; then
|
|
||||||
echo "⚠️ Cache already exists: ${S3_BASE_KEY}"
|
|
||||||
echo "Skipping upload (immutability - first write wins, like GitHub Actions)"
|
|
||||||
echo ""
|
|
||||||
echo "=========================================="
|
|
||||||
echo "Cache save completed (already exists)"
|
|
||||||
echo "=========================================="
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create tarball
|
|
||||||
BASE_TARBALL="/tmp/xahau-cache-base-$$.tar.zst"
|
|
||||||
|
|
||||||
echo "Creating cache tarball..."
|
|
||||||
tar -cf - -C "${TARGET_PATH}" . | zstd -3 -T0 -q -o "${BASE_TARBALL}"
|
|
||||||
|
|
||||||
BASE_SIZE=$(du -h "${BASE_TARBALL}" | cut -f1)
|
|
||||||
echo "✓ Cache tarball created: ${BASE_SIZE}"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Upload to S3
|
|
||||||
echo "Uploading cache to S3..."
|
|
||||||
echo " Key: ${CACHE_KEY}-base.tar.zst"
|
|
||||||
|
|
||||||
aws s3api put-object \
|
|
||||||
--bucket "${S3_BUCKET}" \
|
|
||||||
--key "${CACHE_KEY}-base.tar.zst" \
|
|
||||||
--body "${BASE_TARBALL}" \
|
|
||||||
--tagging 'type=base' \
|
|
||||||
--region "${S3_REGION}" \
|
|
||||||
>/dev/null
|
|
||||||
|
|
||||||
echo "✓ Uploaded: ${S3_BASE_KEY}"
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
rm -f "${BASE_TARBALL}"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "=========================================="
|
|
||||||
echo "Cache save completed successfully"
|
|
||||||
echo "Cache size: ${BASE_SIZE}"
|
|
||||||
echo "Cache key: ${CACHE_KEY}"
|
|
||||||
echo "=========================================="
|
|
||||||
63
.github/actions/xahau-configure-ccache/action.yml
vendored
Normal file
63
.github/actions/xahau-configure-ccache/action.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
name: 'Configure ccache'
|
||||||
|
description: 'Sets up ccache with consistent configuration'
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
max_size:
|
||||||
|
description: 'Maximum cache size'
|
||||||
|
required: false
|
||||||
|
default: '2G'
|
||||||
|
hash_dir:
|
||||||
|
description: 'Whether to include directory paths in hash'
|
||||||
|
required: false
|
||||||
|
default: 'true'
|
||||||
|
compiler_check:
|
||||||
|
description: 'How to check compiler for changes'
|
||||||
|
required: false
|
||||||
|
default: 'content'
|
||||||
|
is_main_branch:
|
||||||
|
description: 'Whether the current branch is the main branch'
|
||||||
|
required: false
|
||||||
|
default: 'false'
|
||||||
|
main_cache_dir:
|
||||||
|
description: 'Path to the main branch cache directory'
|
||||||
|
required: false
|
||||||
|
default: '~/.ccache-main'
|
||||||
|
current_cache_dir:
|
||||||
|
description: 'Path to the current branch cache directory'
|
||||||
|
required: false
|
||||||
|
default: '~/.ccache-current'
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: 'composite'
|
||||||
|
steps:
|
||||||
|
- name: Configure ccache
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
# Create cache directories
|
||||||
|
mkdir -p ${{ inputs.main_cache_dir }} ${{ inputs.current_cache_dir }}
|
||||||
|
|
||||||
|
# Set compiler check globally
|
||||||
|
ccache -o compiler_check=${{ inputs.compiler_check }}
|
||||||
|
|
||||||
|
# Use a single config file location
|
||||||
|
mkdir -p ~/.ccache
|
||||||
|
export CONF_PATH="$HOME/.ccache/ccache.conf"
|
||||||
|
|
||||||
|
# Apply common settings
|
||||||
|
echo "max_size = ${{ inputs.max_size }}" > "$CONF_PATH"
|
||||||
|
echo "hash_dir = ${{ inputs.hash_dir }}" >> "$CONF_PATH"
|
||||||
|
echo "compiler_check = ${{ inputs.compiler_check }}" >> "$CONF_PATH"
|
||||||
|
|
||||||
|
if [ "${{ inputs.is_main_branch }}" == "true" ]; then
|
||||||
|
# Main branch: use main branch cache
|
||||||
|
ccache --set-config=cache_dir="${{ inputs.main_cache_dir }}"
|
||||||
|
echo "CCACHE_DIR=${{ inputs.main_cache_dir }}" >> $GITHUB_ENV
|
||||||
|
else
|
||||||
|
# Feature branch: use current branch cache with main as secondary
|
||||||
|
ccache --set-config=cache_dir="${{ inputs.current_cache_dir }}"
|
||||||
|
ccache --set-config=secondary_storage="file:${{ inputs.main_cache_dir }}"
|
||||||
|
echo "CCACHE_DIR=${{ inputs.current_cache_dir }}" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
ccache -p # Print config for verification
|
||||||
|
ccache -z # Zero statistics before the build
|
||||||
121
.github/actions/xahau-ga-build/action.yml
vendored
121
.github/actions/xahau-ga-build/action.yml
vendored
@@ -47,24 +47,6 @@ inputs:
|
|||||||
description: 'GCC version to use for Clang toolchain (e.g. 11, 13)'
|
description: 'GCC version to use for Clang toolchain (e.g. 11, 13)'
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
ccache_max_size:
|
|
||||||
description: 'Maximum ccache size'
|
|
||||||
required: false
|
|
||||||
default: '2G'
|
|
||||||
ccache_hash_dir:
|
|
||||||
description: 'Whether to include directory paths in hash'
|
|
||||||
required: false
|
|
||||||
default: 'true'
|
|
||||||
ccache_compiler_check:
|
|
||||||
description: 'How to check compiler for changes'
|
|
||||||
required: false
|
|
||||||
default: 'content'
|
|
||||||
aws-access-key-id:
|
|
||||||
description: 'AWS Access Key ID for S3 cache storage'
|
|
||||||
required: true
|
|
||||||
aws-secret-access-key:
|
|
||||||
description: 'AWS Secret Access Key for S3 cache storage'
|
|
||||||
required: true
|
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: 'composite'
|
using: 'composite'
|
||||||
@@ -77,44 +59,28 @@ runs:
|
|||||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Restore ccache directory
|
- name: Restore ccache directory for default branch
|
||||||
if: inputs.ccache_enabled == 'true'
|
if: inputs.ccache_enabled == 'true'
|
||||||
id: ccache-restore
|
id: ccache-restore
|
||||||
uses: ./.github/actions/xahau-actions-cache-restore
|
uses: actions/cache/restore@v4
|
||||||
with:
|
with:
|
||||||
path: ~/.ccache
|
path: ~/.ccache-main
|
||||||
|
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
||||||
|
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||||
|
|
||||||
|
- name: Restore ccache directory for current branch
|
||||||
|
if: inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
|
||||||
|
id: ccache-restore-current-branch
|
||||||
|
uses: actions/cache/restore@v4
|
||||||
|
with:
|
||||||
|
path: ~/.ccache-current
|
||||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
|
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
||||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
||||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
|
||||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
|
||||||
|
|
||||||
- name: Configure ccache
|
|
||||||
if: inputs.ccache_enabled == 'true'
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
# Use ccache's default cache_dir (~/.ccache) - don't override it
|
|
||||||
# This avoids tilde expansion issues when setting it explicitly
|
|
||||||
|
|
||||||
# Create cache directory using ccache's default
|
|
||||||
mkdir -p ~/.ccache
|
|
||||||
|
|
||||||
# Configure ccache settings (but NOT cache_dir - use default)
|
|
||||||
# This overwrites any cached config to ensure fresh configuration
|
|
||||||
ccache --set-config=max_size=${{ inputs.ccache_max_size }}
|
|
||||||
ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }}
|
|
||||||
ccache --set-config=compiler_check=${{ inputs.ccache_compiler_check }}
|
|
||||||
|
|
||||||
# Note: Not setting CCACHE_DIR - let ccache use its default (~/.ccache)
|
|
||||||
|
|
||||||
# Print config for verification
|
|
||||||
echo "=== ccache configuration ==="
|
|
||||||
ccache -p
|
|
||||||
|
|
||||||
# Zero statistics before the build
|
|
||||||
ccache -z
|
|
||||||
|
|
||||||
- name: Configure project
|
- name: Configure project
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -130,27 +96,14 @@ runs:
|
|||||||
if [ -n "${{ inputs.cxx }}" ]; then
|
if [ -n "${{ inputs.cxx }}" ]; then
|
||||||
export CXX="${{ inputs.cxx }}"
|
export CXX="${{ inputs.cxx }}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Create wrapper toolchain that overlays ccache on top of Conan's toolchain
|
|
||||||
# This enables ccache for the main app build without affecting Conan dependency builds
|
# Configure ccache launcher args
|
||||||
|
CCACHE_ARGS=""
|
||||||
if [ "${{ inputs.ccache_enabled }}" = "true" ]; then
|
if [ "${{ inputs.ccache_enabled }}" = "true" ]; then
|
||||||
cat > wrapper_toolchain.cmake <<'EOF'
|
CCACHE_ARGS="-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||||
# Include Conan's generated toolchain first (sets compiler, flags, etc.)
|
|
||||||
# Note: CMAKE_CURRENT_LIST_DIR is the directory containing this wrapper (.build/)
|
|
||||||
include(${CMAKE_CURRENT_LIST_DIR}/build/generators/conan_toolchain.cmake)
|
|
||||||
|
|
||||||
# Overlay ccache configuration for main application build
|
|
||||||
# This does NOT affect Conan dependency builds (already completed)
|
|
||||||
set(CMAKE_C_COMPILER_LAUNCHER ccache CACHE STRING "C compiler launcher" FORCE)
|
|
||||||
set(CMAKE_CXX_COMPILER_LAUNCHER ccache CACHE STRING "C++ compiler launcher" FORCE)
|
|
||||||
EOF
|
|
||||||
TOOLCHAIN_FILE="wrapper_toolchain.cmake"
|
|
||||||
echo "✅ Created wrapper toolchain with ccache enabled"
|
|
||||||
else
|
|
||||||
TOOLCHAIN_FILE="build/generators/conan_toolchain.cmake"
|
|
||||||
echo "ℹ️ Using Conan toolchain directly (ccache disabled)"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Configure C++ standard library if specified
|
# Configure C++ standard library if specified
|
||||||
# libstdcxx used for clang-14/16 to work around missing lexicographical_compare_three_way in libc++
|
# libstdcxx used for clang-14/16 to work around missing lexicographical_compare_three_way in libc++
|
||||||
# libcxx can be used with clang-17+ which has full C++20 support
|
# libcxx can be used with clang-17+ which has full C++20 support
|
||||||
@@ -190,36 +143,32 @@ runs:
|
|||||||
# So we get: .build/build/generators/ with our non-standard folder name
|
# So we get: .build/build/generators/ with our non-standard folder name
|
||||||
cmake .. \
|
cmake .. \
|
||||||
-G "${{ inputs.generator }}" \
|
-G "${{ inputs.generator }}" \
|
||||||
|
$CCACHE_ARGS \
|
||||||
${CMAKE_CXX_FLAGS:+-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS"} \
|
${CMAKE_CXX_FLAGS:+-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS"} \
|
||||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=${TOOLCHAIN_FILE} \
|
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||||
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }}
|
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }}
|
||||||
|
|
||||||
- name: Show ccache config before build
|
|
||||||
if: inputs.ccache_enabled == 'true'
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "=========================================="
|
|
||||||
echo "ccache configuration before build"
|
|
||||||
echo "=========================================="
|
|
||||||
ccache -p
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
- name: Build project
|
- name: Build project
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cd ${{ inputs.build_dir }}
|
cd ${{ inputs.build_dir }}
|
||||||
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc) -- -v
|
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc)
|
||||||
|
|
||||||
- name: Show ccache statistics
|
- name: Show ccache statistics
|
||||||
if: inputs.ccache_enabled == 'true'
|
if: inputs.ccache_enabled == 'true'
|
||||||
shell: bash
|
shell: bash
|
||||||
run: ccache -s
|
run: ccache -s
|
||||||
|
|
||||||
- name: Save ccache directory
|
- name: Save ccache directory for default branch
|
||||||
if: always() && inputs.ccache_enabled == 'true'
|
if: always() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name == inputs.main_branch
|
||||||
uses: ./.github/actions/xahau-actions-cache-save
|
uses: actions/cache/save@v4
|
||||||
with:
|
with:
|
||||||
path: ~/.ccache
|
path: ~/.ccache-main
|
||||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
|
key: ${{ steps.ccache-restore.outputs.cache-primary-key }}
|
||||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
|
||||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
- name: Save ccache directory for current branch
|
||||||
|
if: always() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
|
||||||
|
uses: actions/cache/save@v4
|
||||||
|
with:
|
||||||
|
path: ~/.ccache-current
|
||||||
|
key: ${{ steps.ccache-restore-current-branch.outputs.cache-primary-key }}
|
||||||
|
|||||||
127
.github/actions/xahau-ga-dependencies/action.yml
vendored
127
.github/actions/xahau-ga-dependencies/action.yml
vendored
@@ -25,28 +25,6 @@ inputs:
|
|||||||
description: 'Main branch name for restore keys'
|
description: 'Main branch name for restore keys'
|
||||||
required: false
|
required: false
|
||||||
default: 'dev'
|
default: 'dev'
|
||||||
os:
|
|
||||||
description: 'Operating system (Linux, Macos)'
|
|
||||||
required: false
|
|
||||||
default: 'Linux'
|
|
||||||
arch:
|
|
||||||
description: 'Architecture (x86_64, armv8)'
|
|
||||||
required: false
|
|
||||||
default: 'x86_64'
|
|
||||||
compiler:
|
|
||||||
description: 'Compiler type (gcc, clang, apple-clang)'
|
|
||||||
required: true
|
|
||||||
compiler_version:
|
|
||||||
description: 'Compiler version (11, 13, 14, etc.)'
|
|
||||||
required: true
|
|
||||||
cc:
|
|
||||||
description: 'C compiler executable (gcc-13, clang-14, etc.), empty for macOS'
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
cxx:
|
|
||||||
description: 'C++ compiler executable (g++-14, clang++-14, etc.), empty for macOS'
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
stdlib:
|
stdlib:
|
||||||
description: 'C++ standard library for Conan configuration (note: also in compiler-id)'
|
description: 'C++ standard library for Conan configuration (note: also in compiler-id)'
|
||||||
required: true
|
required: true
|
||||||
@@ -54,12 +32,6 @@ inputs:
|
|||||||
options:
|
options:
|
||||||
- libstdcxx
|
- libstdcxx
|
||||||
- libcxx
|
- libcxx
|
||||||
aws-access-key-id:
|
|
||||||
description: 'AWS Access Key ID for S3 cache storage'
|
|
||||||
required: true
|
|
||||||
aws-secret-access-key:
|
|
||||||
description: 'AWS Secret Access Key for S3 cache storage'
|
|
||||||
required: true
|
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
cache-hit:
|
cache-hit:
|
||||||
@@ -69,72 +41,47 @@ outputs:
|
|||||||
runs:
|
runs:
|
||||||
using: 'composite'
|
using: 'composite'
|
||||||
steps:
|
steps:
|
||||||
|
- name: Generate safe branch name
|
||||||
|
if: inputs.cache_enabled == 'true'
|
||||||
|
id: safe-branch
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||||
|
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Check conanfile changes
|
||||||
|
if: inputs.cache_enabled == 'true'
|
||||||
|
id: check-conanfile-changes
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
# Check if we're on the main branch
|
||||||
|
if [ "${{ github.ref_name }}" == "${{ inputs.main_branch }}" ]; then
|
||||||
|
echo "should-save-conan-cache=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
# Fetch main branch for comparison
|
||||||
|
git fetch origin ${{ inputs.main_branch }}
|
||||||
|
|
||||||
|
# Check if conanfile.txt or conanfile.py has changed compared to main branch
|
||||||
|
if git diff --quiet origin/${{ inputs.main_branch }}..HEAD -- '**/conanfile.txt' '**/conanfile.py'; then
|
||||||
|
echo "should-save-conan-cache=false" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "should-save-conan-cache=true" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Restore Conan cache
|
- name: Restore Conan cache
|
||||||
if: inputs.cache_enabled == 'true'
|
if: inputs.cache_enabled == 'true'
|
||||||
id: cache-restore-conan
|
id: cache-restore-conan
|
||||||
uses: ./.github/actions/xahau-actions-cache-restore
|
uses: actions/cache/restore@v4
|
||||||
with:
|
with:
|
||||||
path: ~/.conan2
|
path: |
|
||||||
|
~/.conan
|
||||||
|
~/.conan2
|
||||||
# Note: compiler-id format is compiler-version-stdlib[-gccversion]
|
# Note: compiler-id format is compiler-version-stdlib[-gccversion]
|
||||||
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-${{ inputs.configuration }}
|
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-${{ inputs.configuration }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-
|
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-
|
||||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
|
||||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
|
||||||
|
|
||||||
- name: Configure Conan
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
# Create the default profile directory if it doesn't exist
|
|
||||||
mkdir -p ~/.conan2/profiles
|
|
||||||
|
|
||||||
# Determine the correct libcxx based on stdlib parameter
|
|
||||||
if [ "${{ inputs.stdlib }}" = "libcxx" ]; then
|
|
||||||
LIBCXX="libc++"
|
|
||||||
else
|
|
||||||
LIBCXX="libstdc++11"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create profile with our specific settings
|
|
||||||
# This overwrites any cached profile to ensure fresh configuration
|
|
||||||
cat > ~/.conan2/profiles/default <<EOF
|
|
||||||
[settings]
|
|
||||||
arch=${{ inputs.arch }}
|
|
||||||
build_type=${{ inputs.configuration }}
|
|
||||||
compiler=${{ inputs.compiler }}
|
|
||||||
compiler.cppstd=20
|
|
||||||
compiler.libcxx=${LIBCXX}
|
|
||||||
compiler.version=${{ inputs.compiler_version }}
|
|
||||||
os=${{ inputs.os }}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
# Add buildenv and conf sections for Linux (not needed for macOS)
|
|
||||||
if [ "${{ inputs.os }}" = "Linux" ] && [ -n "${{ inputs.cc }}" ]; then
|
|
||||||
cat >> ~/.conan2/profiles/default <<EOF
|
|
||||||
|
|
||||||
[buildenv]
|
|
||||||
CC=/usr/bin/${{ inputs.cc }}
|
|
||||||
CXX=/usr/bin/${{ inputs.cxx }}
|
|
||||||
|
|
||||||
[conf]
|
|
||||||
tools.build:compiler_executables={"c": "/usr/bin/${{ inputs.cc }}", "cpp": "/usr/bin/${{ inputs.cxx }}"}
|
|
||||||
EOF
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Add macOS-specific conf if needed
|
|
||||||
if [ "${{ inputs.os }}" = "Macos" ]; then
|
|
||||||
cat >> ~/.conan2/profiles/default <<EOF
|
|
||||||
|
|
||||||
[conf]
|
|
||||||
# Workaround for gRPC with newer Apple Clang
|
|
||||||
tools.build:cxxflags=["-Wno-missing-template-arg-list-after-template-kw"]
|
|
||||||
EOF
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Display profile for verification
|
|
||||||
conan profile show
|
|
||||||
|
|
||||||
- name: Export custom recipes
|
- name: Export custom recipes
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -160,10 +107,10 @@ runs:
|
|||||||
..
|
..
|
||||||
|
|
||||||
- name: Save Conan cache
|
- name: Save Conan cache
|
||||||
if: always() && inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true'
|
if: always() && inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true' && steps.check-conanfile-changes.outputs.should-save-conan-cache == 'true'
|
||||||
uses: ./.github/actions/xahau-actions-cache-save
|
uses: actions/cache/save@v4
|
||||||
with:
|
with:
|
||||||
path: ~/.conan2
|
path: |
|
||||||
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-${{ inputs.configuration }}
|
~/.conan
|
||||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
~/.conan2
|
||||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }}
|
||||||
|
|||||||
290
.github/workflows/test-cache-actions.yml.disabled
vendored
290
.github/workflows/test-cache-actions.yml.disabled
vendored
@@ -1,290 +0,0 @@
|
|||||||
name: Test Cache Actions (State Machine)
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: ["nd-experiment-overlayfs-*"]
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
state_assertion:
|
|
||||||
description: 'Expected state (optional, e.g. "2" to assert state 2)'
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: '1'
|
|
||||||
start_state:
|
|
||||||
description: 'Force specific starting state (optional, e.g. "3" to start at state 3)'
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
clear_cache:
|
|
||||||
description: 'Clear cache before running'
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test-cache-state-machine:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
CACHE_KEY: test-state-machine-${{ github.ref_name }}
|
|
||||||
CACHE_DIR: /tmp/test-cache
|
|
||||||
S3_BUCKET: xahaud-github-actions-cache-niq
|
|
||||||
S3_REGION: us-east-1
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Parse Inputs (workflow_dispatch or commit message)
|
|
||||||
id: parse-inputs
|
|
||||||
run: |
|
|
||||||
# Priority 1: workflow_dispatch inputs (manual trigger)
|
|
||||||
STATE_ASSERTION="${{ inputs.state_assertion }}"
|
|
||||||
START_STATE="${{ inputs.start_state }}"
|
|
||||||
SHOULD_CLEAR="${{ inputs.clear_cache }}"
|
|
||||||
|
|
||||||
# Priority 2: commit message tags (push event)
|
|
||||||
if [ "${{ github.event_name }}" = "push" ]; then
|
|
||||||
COMMIT_MSG="${{ github.event.head_commit.message }}"
|
|
||||||
|
|
||||||
# Parse [state:N] assertion tag (optional, if not provided as input)
|
|
||||||
if [ -z "${STATE_ASSERTION}" ] && echo "${COMMIT_MSG}" | grep -qE '\[state:[0-9]+\]'; then
|
|
||||||
STATE_ASSERTION=$(echo "${COMMIT_MSG}" | grep -oE '\[state:[0-9]+\]' | grep -oE '[0-9]+')
|
|
||||||
echo "State assertion found in commit: ${STATE_ASSERTION}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Parse [start-state:N] force tag (optional, if not provided as input)
|
|
||||||
if [ -z "${START_STATE}" ] && echo "${COMMIT_MSG}" | grep -qE '\[start-state:[0-9]+\]'; then
|
|
||||||
START_STATE=$(echo "${COMMIT_MSG}" | grep -oE '\[start-state:[0-9]+\]' | grep -oE '[0-9]+')
|
|
||||||
echo "Start state found in commit: ${START_STATE}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Parse [ci-clear-cache] tag (if not provided as input)
|
|
||||||
if [ "${SHOULD_CLEAR}" != "true" ] && echo "${COMMIT_MSG}" | grep -q '\[ci-clear-cache\]'; then
|
|
||||||
SHOULD_CLEAR=true
|
|
||||||
echo "Cache clear requested in commit"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Output final values
|
|
||||||
echo "state_assertion=${STATE_ASSERTION}" >> "$GITHUB_OUTPUT"
|
|
||||||
echo "start_state=${START_STATE}" >> "$GITHUB_OUTPUT"
|
|
||||||
echo "should_clear=${SHOULD_CLEAR}" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
# Log what we're using
|
|
||||||
echo ""
|
|
||||||
echo "Configuration:"
|
|
||||||
[ -n "${STATE_ASSERTION}" ] && echo " State assertion: ${STATE_ASSERTION}"
|
|
||||||
[ -n "${START_STATE}" ] && echo " Start state: ${START_STATE}"
|
|
||||||
echo " Clear cache: ${SHOULD_CLEAR}"
|
|
||||||
|
|
||||||
- name: Check S3 State (Before Restore)
|
|
||||||
env:
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
|
||||||
run: |
|
|
||||||
echo "=========================================="
|
|
||||||
echo "S3 State Check (Before Restore)"
|
|
||||||
echo "=========================================="
|
|
||||||
echo "Cache key: ${CACHE_KEY}"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Check if base exists
|
|
||||||
BASE_EXISTS=false
|
|
||||||
if aws s3 ls "s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst" --region "${S3_REGION}" >/dev/null 2>&1; then
|
|
||||||
BASE_EXISTS=true
|
|
||||||
fi
|
|
||||||
echo "Base exists: ${BASE_EXISTS}"
|
|
||||||
|
|
||||||
# Count deltas
|
|
||||||
DELTA_COUNT=$(aws s3 ls "s3://${S3_BUCKET}/" --region "${S3_REGION}" | grep "${CACHE_KEY}-delta-" | wc -l || echo "0")
|
|
||||||
echo "Delta count: ${DELTA_COUNT}"
|
|
||||||
|
|
||||||
- name: Restore Cache
|
|
||||||
uses: ./.github/actions/xahau-actions-cache-restore
|
|
||||||
with:
|
|
||||||
path: ${{ env.CACHE_DIR }}
|
|
||||||
key: ${{ env.CACHE_KEY }}
|
|
||||||
s3-bucket: ${{ env.S3_BUCKET }}
|
|
||||||
s3-region: ${{ env.S3_REGION }}
|
|
||||||
use-deltas: 'true'
|
|
||||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
|
||||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
|
||||||
|
|
||||||
- name: Auto-Detect State and Validate
|
|
||||||
id: state
|
|
||||||
env:
|
|
||||||
STATE_ASSERTION: ${{ steps.parse-inputs.outputs.state_assertion }}
|
|
||||||
START_STATE: ${{ steps.parse-inputs.outputs.start_state }}
|
|
||||||
run: |
|
|
||||||
echo "=========================================="
|
|
||||||
echo "State Detection and Validation"
|
|
||||||
echo "=========================================="
|
|
||||||
|
|
||||||
# Create cache directory if it doesn't exist
|
|
||||||
mkdir -p "${CACHE_DIR}"
|
|
||||||
|
|
||||||
# Handle [start-state:N] - force specific state
|
|
||||||
if [ -n "${START_STATE}" ]; then
|
|
||||||
echo "🎯 [start-state:${START_STATE}] detected - forcing state setup"
|
|
||||||
|
|
||||||
# Clear cache and create state files 0 through START_STATE
|
|
||||||
rm -f ${CACHE_DIR}/state*.txt 2>/dev/null || true
|
|
||||||
for i in $(seq 0 ${START_STATE}); do
|
|
||||||
echo "State ${i} - Forced at $(date)" > "${CACHE_DIR}/state${i}.txt"
|
|
||||||
echo "Commit: ${{ github.sha }}" >> "${CACHE_DIR}/state${i}.txt"
|
|
||||||
done
|
|
||||||
|
|
||||||
DETECTED_STATE=${START_STATE}
|
|
||||||
echo "✓ Forced to state ${DETECTED_STATE}"
|
|
||||||
else
|
|
||||||
# Auto-detect state by counting state files
|
|
||||||
STATE_FILES=$(ls ${CACHE_DIR}/state*.txt 2>/dev/null | wc -l)
|
|
||||||
DETECTED_STATE=${STATE_FILES}
|
|
||||||
echo "Auto-detected state: ${DETECTED_STATE} (${STATE_FILES} state files)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Show cache contents
|
|
||||||
echo ""
|
|
||||||
echo "Cache contents:"
|
|
||||||
if [ -d "${CACHE_DIR}" ] && [ "$(ls -A ${CACHE_DIR})" ]; then
|
|
||||||
ls -la "${CACHE_DIR}"
|
|
||||||
else
|
|
||||||
echo "(empty)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Validate [state:N] assertion if provided
|
|
||||||
if [ -n "${STATE_ASSERTION}" ]; then
|
|
||||||
echo ""
|
|
||||||
echo "Validating assertion: [state:${STATE_ASSERTION}]"
|
|
||||||
if [ "${DETECTED_STATE}" -ne "${STATE_ASSERTION}" ]; then
|
|
||||||
echo "❌ ERROR: State mismatch!"
|
|
||||||
echo " Expected (from [state:N]): ${STATE_ASSERTION}"
|
|
||||||
echo " Detected (from cache): ${DETECTED_STATE}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "✓ Assertion passed: detected == expected (${DETECTED_STATE})"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Output detected state for next steps
|
|
||||||
echo "detected_state=${DETECTED_STATE}" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "=========================================="
|
|
||||||
|
|
||||||
- name: Simulate Build (State Transition)
|
|
||||||
env:
|
|
||||||
DETECTED_STATE: ${{ steps.state.outputs.detected_state }}
|
|
||||||
run: |
|
|
||||||
echo "=========================================="
|
|
||||||
echo "Simulating Build (State Transition)"
|
|
||||||
echo "=========================================="
|
|
||||||
|
|
||||||
# Calculate next state
|
|
||||||
NEXT_STATE=$((DETECTED_STATE + 1))
|
|
||||||
echo "Transitioning: State ${DETECTED_STATE} → State ${NEXT_STATE}"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Create state file for next state
|
|
||||||
STATE_FILE="${CACHE_DIR}/state${NEXT_STATE}.txt"
|
|
||||||
echo "State ${NEXT_STATE} - Created at $(date)" > "${STATE_FILE}"
|
|
||||||
echo "Commit: ${{ github.sha }}" >> "${STATE_FILE}"
|
|
||||||
echo "Message: ${{ github.event.head_commit.message }}" >> "${STATE_FILE}"
|
|
||||||
|
|
||||||
echo "✓ Created ${STATE_FILE}"
|
|
||||||
|
|
||||||
# Show final cache state
|
|
||||||
echo ""
|
|
||||||
echo "Final cache contents:"
|
|
||||||
ls -la "${CACHE_DIR}"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "State files:"
|
|
||||||
cat ${CACHE_DIR}/state*.txt
|
|
||||||
|
|
||||||
- name: Save Cache
|
|
||||||
uses: ./.github/actions/xahau-actions-cache-save
|
|
||||||
with:
|
|
||||||
path: ${{ env.CACHE_DIR }}
|
|
||||||
key: ${{ env.CACHE_KEY }}
|
|
||||||
s3-bucket: ${{ env.S3_BUCKET }}
|
|
||||||
s3-region: ${{ env.S3_REGION }}
|
|
||||||
use-deltas: 'true'
|
|
||||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
|
||||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
|
||||||
|
|
||||||
- name: Validate S3 State (After Save)
|
|
||||||
env:
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
|
||||||
DETECTED_STATE: ${{ steps.state.outputs.detected_state }}
|
|
||||||
run: |
|
|
||||||
echo "=========================================="
|
|
||||||
echo "S3 State Validation (After Save)"
|
|
||||||
echo "=========================================="
|
|
||||||
|
|
||||||
# Calculate next state (what we just saved)
|
|
||||||
NEXT_STATE=$((DETECTED_STATE + 1))
|
|
||||||
echo "Saved state: ${NEXT_STATE}"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Check if base exists
|
|
||||||
if aws s3 ls "s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst" --region "${S3_REGION}" >/dev/null 2>&1; then
|
|
||||||
BASE_SIZE=$(aws s3 ls "s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst" --region "${S3_REGION}" | awk '{print $3}')
|
|
||||||
echo "✓ Base exists: ${CACHE_KEY}-base.tar.zst (${BASE_SIZE} bytes)"
|
|
||||||
else
|
|
||||||
echo "❌ ERROR: Base should exist after save"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# List deltas
|
|
||||||
echo ""
|
|
||||||
echo "Delta layers:"
|
|
||||||
DELTAS=$(aws s3 ls "s3://${S3_BUCKET}/" --region "${S3_REGION}" | grep "${CACHE_KEY}-delta-" || echo "")
|
|
||||||
if [ -n "${DELTAS}" ]; then
|
|
||||||
echo "${DELTAS}"
|
|
||||||
DELTA_COUNT=$(echo "${DELTAS}" | wc -l)
|
|
||||||
else
|
|
||||||
echo "(none)"
|
|
||||||
DELTA_COUNT=0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Validate S3 state
|
|
||||||
echo ""
|
|
||||||
if [ "${DETECTED_STATE}" -eq 0 ]; then
|
|
||||||
# Saved state 1 from bootstrap (state 0 → 1)
|
|
||||||
if [ "${DELTA_COUNT}" -ne 0 ]; then
|
|
||||||
echo "⚠️ WARNING: Bootstrap (state 1) should have 0 deltas, found ${DELTA_COUNT}"
|
|
||||||
else
|
|
||||||
echo "✓ State 1 saved: base exists, 0 deltas"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
# Saved delta (state N+1)
|
|
||||||
if [ "${DELTA_COUNT}" -ne 1 ]; then
|
|
||||||
echo "⚠️ WARNING: State ${NEXT_STATE} expects 1 delta (inline cleanup), found ${DELTA_COUNT}"
|
|
||||||
echo "This might be OK if multiple builds ran concurrently"
|
|
||||||
else
|
|
||||||
echo "✓ State ${NEXT_STATE} saved: base + 1 delta (old deltas cleaned)"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "=========================================="
|
|
||||||
echo "✅ State ${DETECTED_STATE} → ${NEXT_STATE} Complete!"
|
|
||||||
echo "=========================================="
|
|
||||||
echo ""
|
|
||||||
echo "Next commit will auto-detect state ${NEXT_STATE}"
|
|
||||||
echo ""
|
|
||||||
echo "Options:"
|
|
||||||
echo " # Normal (auto-advance)"
|
|
||||||
echo " git commit -m 'continue testing'"
|
|
||||||
echo ""
|
|
||||||
echo " # With assertion (validate state)"
|
|
||||||
echo " git commit -m 'test delta [state:${NEXT_STATE}]'"
|
|
||||||
echo ""
|
|
||||||
echo " # Clear cache and restart"
|
|
||||||
echo " git commit -m 'fresh start [ci-clear-cache]'"
|
|
||||||
echo ""
|
|
||||||
echo " # Jump to specific state"
|
|
||||||
echo " git commit -m 'jump to state 3 [start-state:3]'"
|
|
||||||
182
.github/workflows/test-overlayfs-delta.yml.disabled
vendored
182
.github/workflows/test-overlayfs-delta.yml.disabled
vendored
@@ -1,182 +0,0 @@
|
|||||||
name: Test OverlayFS Delta Extraction
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: ["*"]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test-overlayfs:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
# - name: Test encrypted secrets (decrypt test message)
|
|
||||||
# run: |
|
|
||||||
# echo "========================================"
|
|
||||||
# echo "TESTING ENCRYPTED SECRETS"
|
|
||||||
# echo "========================================"
|
|
||||||
# echo ""
|
|
||||||
# echo "Decrypting test message from .github/secrets/test-message.gpg"
|
|
||||||
# echo "Using encryption key from GitHub Secrets..."
|
|
||||||
# echo ""
|
|
||||||
#
|
|
||||||
# # Decrypt using key from GitHub Secrets
|
|
||||||
# echo "${{ secrets.TEST_ENCRYPTION_KEY }}" | \
|
|
||||||
# gpg --batch --yes --passphrase-fd 0 \
|
|
||||||
# --decrypt .github/secrets/test-message.gpg
|
|
||||||
#
|
|
||||||
# echo ""
|
|
||||||
# echo "========================================"
|
|
||||||
# echo "If you see the success message above,"
|
|
||||||
# echo "then encrypted secrets work! 🎉"
|
|
||||||
# echo "========================================"
|
|
||||||
# echo ""
|
|
||||||
|
|
||||||
- name: Setup OverlayFS layers
|
|
||||||
run: |
|
|
||||||
echo "=== Creating directory structure ==="
|
|
||||||
mkdir -p /tmp/test/{base,delta,upper,work,merged}
|
|
||||||
|
|
||||||
echo "=== Creating base layer files ==="
|
|
||||||
echo "base file 1" > /tmp/test/base/file1.txt
|
|
||||||
echo "base file 2" > /tmp/test/base/file2.txt
|
|
||||||
echo "base file 3" > /tmp/test/base/file3.txt
|
|
||||||
mkdir -p /tmp/test/base/subdir
|
|
||||||
echo "base subdir file" > /tmp/test/base/subdir/file.txt
|
|
||||||
|
|
||||||
echo "=== Base layer contents ==="
|
|
||||||
find /tmp/test/base -type f -exec sh -c 'echo "{}:"; cat "{}"' \;
|
|
||||||
|
|
||||||
echo "=== Mounting OverlayFS ==="
|
|
||||||
sudo mount -t overlay overlay \
|
|
||||||
-o lowerdir=/tmp/test/base,upperdir=/tmp/test/upper,workdir=/tmp/test/work \
|
|
||||||
/tmp/test/merged
|
|
||||||
|
|
||||||
echo "=== Mounted successfully ==="
|
|
||||||
mount | grep overlay
|
|
||||||
|
|
||||||
- name: Verify merged view shows base files
|
|
||||||
run: |
|
|
||||||
echo "=== Contents of /merged (should show base files) ==="
|
|
||||||
ls -R /tmp/test/merged
|
|
||||||
find /tmp/test/merged -type f -exec sh -c 'echo "{}:"; cat "{}"' \;
|
|
||||||
|
|
||||||
- name: Make changes via merged layer
|
|
||||||
run: |
|
|
||||||
echo "=== Making changes via /merged ==="
|
|
||||||
|
|
||||||
# Overwrite existing file
|
|
||||||
echo "MODIFIED file 2" > /tmp/test/merged/file2.txt
|
|
||||||
echo "Modified file2.txt"
|
|
||||||
|
|
||||||
# Create new file
|
|
||||||
echo "NEW file 4" > /tmp/test/merged/file4.txt
|
|
||||||
echo "Created new file4.txt"
|
|
||||||
|
|
||||||
# Create new directory with file
|
|
||||||
mkdir -p /tmp/test/merged/newdir
|
|
||||||
echo "NEW file in new dir" > /tmp/test/merged/newdir/newfile.txt
|
|
||||||
echo "Created newdir/newfile.txt"
|
|
||||||
|
|
||||||
# Add file to existing directory
|
|
||||||
echo "NEW file in existing subdir" > /tmp/test/merged/subdir/newfile.txt
|
|
||||||
echo "Created subdir/newfile.txt"
|
|
||||||
|
|
||||||
echo "=== Changes complete ==="
|
|
||||||
|
|
||||||
- name: Show the delta (upperdir)
|
|
||||||
run: |
|
|
||||||
echo "========================================"
|
|
||||||
echo "THE DELTA (only changes in /upper):"
|
|
||||||
echo "========================================"
|
|
||||||
|
|
||||||
if [ -z "$(ls -A /tmp/test/upper)" ]; then
|
|
||||||
echo "Upper directory is empty - no changes detected"
|
|
||||||
else
|
|
||||||
echo "Upper directory structure:"
|
|
||||||
ls -R /tmp/test/upper
|
|
||||||
echo ""
|
|
||||||
echo "Upper directory files with content:"
|
|
||||||
find /tmp/test/upper -type f -exec sh -c 'echo "---"; echo "FILE: {}"; cat "{}"; echo ""' \;
|
|
||||||
|
|
||||||
echo "========================================"
|
|
||||||
echo "SIZE OF DELTA:"
|
|
||||||
du -sh /tmp/test/upper
|
|
||||||
echo "========================================"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Compare base vs upper vs merged
|
|
||||||
run: |
|
|
||||||
echo "========================================"
|
|
||||||
echo "COMPARISON:"
|
|
||||||
echo "========================================"
|
|
||||||
|
|
||||||
echo "BASE layer (original, untouched):"
|
|
||||||
ls -la /tmp/test/base/
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
echo "UPPER layer (DELTA - only changes):"
|
|
||||||
ls -la /tmp/test/upper/
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
echo "MERGED layer (unified view = base + upper):"
|
|
||||||
ls -la /tmp/test/merged/
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
echo "========================================"
|
|
||||||
echo "PROOF: Upper dir contains ONLY the delta!"
|
|
||||||
echo "========================================"
|
|
||||||
|
|
||||||
- name: Simulate tarball creation (what we'd upload)
|
|
||||||
run: |
|
|
||||||
echo "=== Creating tarball of delta ==="
|
|
||||||
tar -czf /tmp/delta.tar.gz -C /tmp/test/upper .
|
|
||||||
|
|
||||||
echo "Delta tarball size:"
|
|
||||||
ls -lh /tmp/delta.tar.gz
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "Delta tarball contents:"
|
|
||||||
tar -tzf /tmp/delta.tar.gz
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "========================================"
|
|
||||||
echo "This is what we'd upload to S3/rsync!"
|
|
||||||
echo "Only ~few KB instead of entire cache!"
|
|
||||||
echo "========================================"
|
|
||||||
|
|
||||||
- name: Upload delta to S3 (actual test!)
|
|
||||||
env:
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
|
||||||
run: |
|
|
||||||
echo "========================================"
|
|
||||||
echo "UPLOADING TO S3"
|
|
||||||
echo "========================================"
|
|
||||||
|
|
||||||
# Upload the delta tarball
|
|
||||||
aws s3 cp /tmp/delta.tar.gz \
|
|
||||||
s3://xahaud-github-actions-cache-niq/hello-world-first-test.tar.gz \
|
|
||||||
--region us-east-1
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "✅ Successfully uploaded to S3!"
|
|
||||||
echo "File: s3://xahaud-github-actions-cache-niq/hello-world-first-test.tar.gz"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Verify it exists
|
|
||||||
echo "Verifying upload..."
|
|
||||||
aws s3 ls s3://xahaud-github-actions-cache-niq/hello-world-first-test.tar.gz --region us-east-1
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "========================================"
|
|
||||||
echo "S3 upload test complete! 🚀"
|
|
||||||
echo "========================================"
|
|
||||||
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
echo "=== Unmounting OverlayFS ==="
|
|
||||||
sudo umount /tmp/test/merged || true
|
|
||||||
@@ -78,6 +78,14 @@ jobs:
|
|||||||
- name: Install ccache
|
- name: Install ccache
|
||||||
run: brew install ccache
|
run: brew install ccache
|
||||||
|
|
||||||
|
- name: Configure ccache
|
||||||
|
uses: ./.github/actions/xahau-configure-ccache
|
||||||
|
with:
|
||||||
|
max_size: 2G
|
||||||
|
hash_dir: true
|
||||||
|
compiler_check: content
|
||||||
|
is_main_branch: ${{ github.ref_name == env.MAIN_BRANCH_NAME }}
|
||||||
|
|
||||||
- name: Check environment
|
- name: Check environment
|
||||||
run: |
|
run: |
|
||||||
echo "PATH:"
|
echo "PATH:"
|
||||||
@@ -90,12 +98,32 @@ jobs:
|
|||||||
echo "---- Full Environment ----"
|
echo "---- Full Environment ----"
|
||||||
env
|
env
|
||||||
|
|
||||||
- name: Detect compiler version
|
- name: Configure Conan
|
||||||
id: detect-compiler
|
|
||||||
run: |
|
run: |
|
||||||
|
# Create the default profile directory if it doesn't exist
|
||||||
|
mkdir -p ~/.conan2/profiles
|
||||||
|
|
||||||
|
# Detect compiler version
|
||||||
COMPILER_VERSION=$(clang --version | grep -oE 'version [0-9]+' | grep -oE '[0-9]+')
|
COMPILER_VERSION=$(clang --version | grep -oE 'version [0-9]+' | grep -oE '[0-9]+')
|
||||||
echo "compiler_version=${COMPILER_VERSION}" >> $GITHUB_OUTPUT
|
|
||||||
echo "Detected Apple Clang version: ${COMPILER_VERSION}"
|
# Create profile with our specific settings
|
||||||
|
cat > ~/.conan2/profiles/default <<EOF
|
||||||
|
[settings]
|
||||||
|
arch=armv8
|
||||||
|
build_type=Release
|
||||||
|
compiler=apple-clang
|
||||||
|
compiler.cppstd=20
|
||||||
|
compiler.libcxx=libc++
|
||||||
|
compiler.version=${COMPILER_VERSION}
|
||||||
|
os=Macos
|
||||||
|
|
||||||
|
[conf]
|
||||||
|
# Workaround for gRPC with newer Apple Clang
|
||||||
|
tools.build:cxxflags=["-Wno-missing-template-arg-list-after-template-kw"]
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Display profile for verification
|
||||||
|
conan profile show
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
uses: ./.github/actions/xahau-ga-dependencies
|
uses: ./.github/actions/xahau-ga-dependencies
|
||||||
@@ -105,13 +133,6 @@ jobs:
|
|||||||
compiler-id: clang
|
compiler-id: clang
|
||||||
cache_version: ${{ env.CACHE_VERSION }}
|
cache_version: ${{ env.CACHE_VERSION }}
|
||||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||||
os: Macos
|
|
||||||
arch: armv8
|
|
||||||
compiler: apple-clang
|
|
||||||
compiler_version: ${{ steps.detect-compiler.outputs.compiler_version }}
|
|
||||||
stdlib: libcxx
|
|
||||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
|
||||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
uses: ./.github/actions/xahau-ga-build
|
uses: ./.github/actions/xahau-ga-build
|
||||||
@@ -122,9 +143,6 @@ jobs:
|
|||||||
compiler-id: clang
|
compiler-id: clang
|
||||||
cache_version: ${{ env.CACHE_VERSION }}
|
cache_version: ${{ env.CACHE_VERSION }}
|
||||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||||
stdlib: libcxx
|
|
||||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
|
||||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
|
||||||
|
|
||||||
- name: Test
|
- name: Test
|
||||||
run: |
|
run: |
|
||||||
54
.github/workflows/xahau-ga-nix.yml
vendored
54
.github/workflows/xahau-ga-nix.yml
vendored
@@ -2,7 +2,7 @@ name: Nix - GA Runner
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: ["dev", "candidate", "release", "nd-experiment-overlayfs-2025-10-29"]
|
branches: ["dev", "candidate", "release"]
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: ["dev", "candidate", "release"]
|
branches: ["dev", "candidate", "release"]
|
||||||
schedule:
|
schedule:
|
||||||
@@ -156,7 +156,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
build_dir: .build
|
build_dir: .build
|
||||||
# Bump this number to invalidate all caches globally.
|
# Bump this number to invalidate all caches globally.
|
||||||
CACHE_VERSION: 3
|
CACHE_VERSION: 2
|
||||||
MAIN_BRANCH_NAME: dev
|
MAIN_BRANCH_NAME: dev
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
@@ -231,6 +231,48 @@ jobs:
|
|||||||
# Install Conan 2
|
# Install Conan 2
|
||||||
pip install --upgrade "conan>=2.0,<3"
|
pip install --upgrade "conan>=2.0,<3"
|
||||||
|
|
||||||
|
- name: Configure ccache
|
||||||
|
uses: ./.github/actions/xahau-configure-ccache
|
||||||
|
with:
|
||||||
|
max_size: 2G
|
||||||
|
hash_dir: true
|
||||||
|
compiler_check: content
|
||||||
|
is_main_branch: ${{ github.ref_name == env.MAIN_BRANCH_NAME }}
|
||||||
|
|
||||||
|
- name: Configure Conan
|
||||||
|
run: |
|
||||||
|
# Create the default profile directory if it doesn't exist
|
||||||
|
mkdir -p ~/.conan2/profiles
|
||||||
|
|
||||||
|
# Determine the correct libcxx based on stdlib parameter
|
||||||
|
if [ "${{ matrix.stdlib }}" = "libcxx" ]; then
|
||||||
|
LIBCXX="libc++"
|
||||||
|
else
|
||||||
|
LIBCXX="libstdc++11"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create profile with our specific settings
|
||||||
|
cat > ~/.conan2/profiles/default <<EOF
|
||||||
|
[settings]
|
||||||
|
arch=x86_64
|
||||||
|
build_type=${{ matrix.configuration }}
|
||||||
|
compiler=${{ matrix.compiler }}
|
||||||
|
compiler.cppstd=20
|
||||||
|
compiler.libcxx=${LIBCXX}
|
||||||
|
compiler.version=${{ matrix.compiler_version }}
|
||||||
|
os=Linux
|
||||||
|
|
||||||
|
[buildenv]
|
||||||
|
CC=/usr/bin/${{ matrix.cc }}
|
||||||
|
CXX=/usr/bin/${{ matrix.cxx }}
|
||||||
|
|
||||||
|
[conf]
|
||||||
|
tools.build:compiler_executables={"c": "/usr/bin/${{ matrix.cc }}", "cpp": "/usr/bin/${{ matrix.cxx }}"}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Display profile for verification
|
||||||
|
conan profile show
|
||||||
|
|
||||||
- name: Check environment
|
- name: Check environment
|
||||||
run: |
|
run: |
|
||||||
echo "PATH:"
|
echo "PATH:"
|
||||||
@@ -251,13 +293,7 @@ jobs:
|
|||||||
compiler-id: ${{ matrix.compiler_id }}
|
compiler-id: ${{ matrix.compiler_id }}
|
||||||
cache_version: ${{ env.CACHE_VERSION }}
|
cache_version: ${{ env.CACHE_VERSION }}
|
||||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||||
compiler: ${{ matrix.compiler }}
|
|
||||||
compiler_version: ${{ matrix.compiler_version }}
|
|
||||||
cc: ${{ matrix.cc }}
|
|
||||||
cxx: ${{ matrix.cxx }}
|
|
||||||
stdlib: ${{ matrix.stdlib }}
|
stdlib: ${{ matrix.stdlib }}
|
||||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
|
||||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
uses: ./.github/actions/xahau-ga-build
|
uses: ./.github/actions/xahau-ga-build
|
||||||
@@ -272,8 +308,6 @@ jobs:
|
|||||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||||
stdlib: ${{ matrix.stdlib }}
|
stdlib: ${{ matrix.stdlib }}
|
||||||
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
|
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
|
||||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
|
||||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
|
||||||
|
|
||||||
- name: Set artifact name
|
- name: Set artifact name
|
||||||
id: set-artifact-name
|
id: set-artifact-name
|
||||||
|
|||||||
@@ -72,15 +72,15 @@ It generates many files of [results](results):
|
|||||||
desired as described above. In a perfect repo, this file will be
|
desired as described above. In a perfect repo, this file will be
|
||||||
empty.
|
empty.
|
||||||
This file is committed to the repo, and is used by the [levelization
|
This file is committed to the repo, and is used by the [levelization
|
||||||
Github workflow](../../.github/workflows/levelization.yml.disabled) to validate
|
Github workflow](../../.github/workflows/levelization.yml) to validate
|
||||||
that nothing changed.
|
that nothing changed.
|
||||||
* [`ordering.txt`](results/ordering.txt): A list showing relationships
|
* [`ordering.txt`](results/ordering.txt): A list showing relationships
|
||||||
between modules where there are no loops as they actually exist, as
|
between modules where there are no loops as they actually exist, as
|
||||||
opposed to how they are desired as described above.
|
opposed to how they are desired as described above.
|
||||||
This file is committed to the repo, and is used by the [levelization
|
This file is committed to the repo, and is used by the [levelization
|
||||||
Github workflow](../../.github/workflows/levelization.yml.disabled) to validate
|
Github workflow](../../.github/workflows/levelization.yml) to validate
|
||||||
that nothing changed.
|
that nothing changed.
|
||||||
* [`levelization.yml`](../../.github/workflows/levelization.yml.disabled)
|
* [`levelization.yml`](../../.github/workflows/levelization.yml)
|
||||||
Github Actions workflow to test that levelization loops haven't
|
Github Actions workflow to test that levelization loops haven't
|
||||||
changed. Unfortunately, if changes are detected, it can't tell if
|
changed. Unfortunately, if changes are detected, it can't tell if
|
||||||
they are improvements or not, so if you have resolved any issues or
|
they are improvements or not, so if you have resolved any issues or
|
||||||
|
|||||||
@@ -1769,7 +1769,7 @@ pool.ntp.org
|
|||||||
# Unless an absolute path is specified, it will be considered relative to the
|
# Unless an absolute path is specified, it will be considered relative to the
|
||||||
# folder in which the xahaud.cfg file is located.
|
# folder in which the xahaud.cfg file is located.
|
||||||
[validators_file]
|
[validators_file]
|
||||||
validators.txt
|
validators-xahau.txt
|
||||||
|
|
||||||
# Turn down default logging to save disk space in the long run.
|
# Turn down default logging to save disk space in the long run.
|
||||||
# Valid values here are trace, debug, info, warning, error, and fatal
|
# Valid values here are trace, debug, info, warning, error, and fatal
|
||||||
|
|||||||
@@ -1482,9 +1482,13 @@ TxQ::accept(Application& app, OpenView& view)
|
|||||||
{
|
{
|
||||||
uint32_t currentTime =
|
uint32_t currentTime =
|
||||||
view.parentCloseTime().time_since_epoch().count();
|
view.parentCloseTime().time_since_epoch().count();
|
||||||
uint256 klStart = keylet::cron(0, AccountID(beast::zero)).key;
|
bool fixCron = view.rules().enabled(fixCronStacking);
|
||||||
uint256 const klEnd =
|
std::optional<AccountID> accountID = std::nullopt;
|
||||||
keylet::cron(currentTime + 1, AccountID(beast::zero)).key;
|
if (!fixCron)
|
||||||
|
accountID = AccountID(beast::zero);
|
||||||
|
|
||||||
|
uint256 klStart = keylet::cron(0, accountID).key;
|
||||||
|
uint256 const klEnd = keylet::cron(currentTime + 1, accountID).key;
|
||||||
|
|
||||||
std::set<AccountID> cronAccs;
|
std::set<AccountID> cronAccs;
|
||||||
|
|
||||||
|
|||||||
@@ -93,6 +93,16 @@ Cron::doApply()
|
|||||||
auto& view = ctx_.view();
|
auto& view = ctx_.view();
|
||||||
auto const& tx = ctx_.tx;
|
auto const& tx = ctx_.tx;
|
||||||
|
|
||||||
|
if (view.rules().enabled(fixCronStacking))
|
||||||
|
{
|
||||||
|
if (auto const seq = tx.getFieldU32(sfLedgerSequence);
|
||||||
|
seq != view.info().seq)
|
||||||
|
{
|
||||||
|
JLOG(j_.warn()) << "Cron: wrong ledger seq=" << seq;
|
||||||
|
return tefFAILURE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
AccountID const& id = tx.getAccountID(sfOwner);
|
AccountID const& id = tx.getAccountID(sfOwner);
|
||||||
|
|
||||||
auto sle = view.peek(keylet::account(id));
|
auto sle = view.peek(keylet::account(id));
|
||||||
|
|||||||
@@ -74,7 +74,7 @@ namespace detail {
|
|||||||
// Feature.cpp. Because it's only used to reserve storage, and determine how
|
// Feature.cpp. Because it's only used to reserve storage, and determine how
|
||||||
// large to make the FeatureBitset, it MAY be larger. It MUST NOT be less than
|
// large to make the FeatureBitset, it MAY be larger. It MUST NOT be less than
|
||||||
// the actual number of amendments. A LogicError on startup will verify this.
|
// the actual number of amendments. A LogicError on startup will verify this.
|
||||||
static constexpr std::size_t numFeatures = 88;
|
static constexpr std::size_t numFeatures = 89;
|
||||||
|
|
||||||
/** Amendments that this server supports and the default voting behavior.
|
/** Amendments that this server supports and the default voting behavior.
|
||||||
Whether they are enabled depends on the Rules defined in the validated
|
Whether they are enabled depends on the Rules defined in the validated
|
||||||
@@ -376,6 +376,7 @@ extern uint256 const featureIOUIssuerWeakTSH;
|
|||||||
extern uint256 const featureCron;
|
extern uint256 const featureCron;
|
||||||
extern uint256 const fixInvalidTxFlags;
|
extern uint256 const fixInvalidTxFlags;
|
||||||
extern uint256 const featureExtendedHookState;
|
extern uint256 const featureExtendedHookState;
|
||||||
|
extern uint256 const fixCronStacking;
|
||||||
|
|
||||||
} // namespace ripple
|
} // namespace ripple
|
||||||
|
|
||||||
|
|||||||
@@ -298,7 +298,7 @@ Keylet
|
|||||||
uritoken(AccountID const& issuer, Blob const& uri);
|
uritoken(AccountID const& issuer, Blob const& uri);
|
||||||
|
|
||||||
Keylet
|
Keylet
|
||||||
cron(uint32_t timestamp, AccountID const& id);
|
cron(uint32_t timestamp, std::optional<AccountID> const& id = std::nullopt);
|
||||||
|
|
||||||
} // namespace keylet
|
} // namespace keylet
|
||||||
|
|
||||||
|
|||||||
@@ -482,6 +482,7 @@ REGISTER_FEATURE(IOUIssuerWeakTSH, Supported::yes, VoteBehavior::De
|
|||||||
REGISTER_FEATURE(Cron, Supported::yes, VoteBehavior::DefaultNo);
|
REGISTER_FEATURE(Cron, Supported::yes, VoteBehavior::DefaultNo);
|
||||||
REGISTER_FIX (fixInvalidTxFlags, Supported::yes, VoteBehavior::DefaultYes);
|
REGISTER_FIX (fixInvalidTxFlags, Supported::yes, VoteBehavior::DefaultYes);
|
||||||
REGISTER_FEATURE(ExtendedHookState, Supported::yes, VoteBehavior::DefaultNo);
|
REGISTER_FEATURE(ExtendedHookState, Supported::yes, VoteBehavior::DefaultNo);
|
||||||
|
REGISTER_FIX (fixCronStacking, Supported::yes, VoteBehavior::DefaultYes);
|
||||||
|
|
||||||
// The following amendments are obsolete, but must remain supported
|
// The following amendments are obsolete, but must remain supported
|
||||||
// because they could potentially get enabled.
|
// because they could potentially get enabled.
|
||||||
|
|||||||
@@ -466,7 +466,7 @@ uritoken(AccountID const& issuer, Blob const& uri)
|
|||||||
// Examples: 100M → ~5.4e-12, 1B → ~5.4e-11, 10B → ~5.4e-10, 100B → ~5.4e-9
|
// Examples: 100M → ~5.4e-12, 1B → ~5.4e-11, 10B → ~5.4e-10, 100B → ~5.4e-9
|
||||||
// (negligible).
|
// (negligible).
|
||||||
Keylet
|
Keylet
|
||||||
cron(uint32_t timestamp, AccountID const& id)
|
cron(uint32_t timestamp, std::optional<AccountID> const& id)
|
||||||
{
|
{
|
||||||
static const uint256 ns = indexHash(LedgerNameSpace::CRON);
|
static const uint256 ns = indexHash(LedgerNameSpace::CRON);
|
||||||
|
|
||||||
@@ -481,7 +481,14 @@ cron(uint32_t timestamp, AccountID const& id)
|
|||||||
h[10] = static_cast<uint8_t>((timestamp >> 8) & 0xFFU);
|
h[10] = static_cast<uint8_t>((timestamp >> 8) & 0xFFU);
|
||||||
h[11] = static_cast<uint8_t>((timestamp >> 0) & 0xFFU);
|
h[11] = static_cast<uint8_t>((timestamp >> 0) & 0xFFU);
|
||||||
|
|
||||||
const uint256 accHash = indexHash(LedgerNameSpace::CRON, timestamp, id);
|
if (!id.has_value())
|
||||||
|
{
|
||||||
|
// final 20 bytes are zero
|
||||||
|
std::memset(h + 12, 0, 20);
|
||||||
|
return {ltCRON, uint256::fromVoid(h)};
|
||||||
|
}
|
||||||
|
|
||||||
|
const uint256 accHash = indexHash(LedgerNameSpace::CRON, timestamp, *id);
|
||||||
|
|
||||||
// final 20 bytes are account ID
|
// final 20 bytes are account ID
|
||||||
std::memcpy(h + 12, accHash.cdata(), 20);
|
std::memcpy(h + 12, accHash.cdata(), 20);
|
||||||
|
|||||||
@@ -1106,30 +1106,32 @@ chooseLedgerEntryType(Json::Value const& params)
|
|||||||
std::pair<RPC::Status, LedgerEntryType> result{RPC::Status::OK, ltANY};
|
std::pair<RPC::Status, LedgerEntryType> result{RPC::Status::OK, ltANY};
|
||||||
if (params.isMember(jss::type))
|
if (params.isMember(jss::type))
|
||||||
{
|
{
|
||||||
static constexpr std::array<std::pair<char const*, LedgerEntryType>, 22>
|
static constexpr std::array<std::pair<char const*, LedgerEntryType>, 23>
|
||||||
types{
|
types{{
|
||||||
{{jss::account, ltACCOUNT_ROOT},
|
{jss::account, ltACCOUNT_ROOT},
|
||||||
{jss::amendments, ltAMENDMENTS},
|
{jss::amendments, ltAMENDMENTS},
|
||||||
{jss::check, ltCHECK},
|
{jss::check, ltCHECK},
|
||||||
{jss::deposit_preauth, ltDEPOSIT_PREAUTH},
|
{jss::deposit_preauth, ltDEPOSIT_PREAUTH},
|
||||||
{jss::directory, ltDIR_NODE},
|
{jss::directory, ltDIR_NODE},
|
||||||
{jss::escrow, ltESCROW},
|
{jss::escrow, ltESCROW},
|
||||||
{jss::emitted_txn, ltEMITTED_TXN},
|
{jss::emitted_txn, ltEMITTED_TXN},
|
||||||
{jss::hook, ltHOOK},
|
{jss::hook, ltHOOK},
|
||||||
{jss::hook_definition, ltHOOK_DEFINITION},
|
{jss::hook_definition, ltHOOK_DEFINITION},
|
||||||
{jss::hook_state, ltHOOK_STATE},
|
{jss::hook_state, ltHOOK_STATE},
|
||||||
{jss::fee, ltFEE_SETTINGS},
|
{jss::fee, ltFEE_SETTINGS},
|
||||||
{jss::hashes, ltLEDGER_HASHES},
|
{jss::hashes, ltLEDGER_HASHES},
|
||||||
{jss::import_vlseq, ltIMPORT_VLSEQ},
|
{jss::import_vlseq, ltIMPORT_VLSEQ},
|
||||||
{jss::offer, ltOFFER},
|
{jss::offer, ltOFFER},
|
||||||
{jss::payment_channel, ltPAYCHAN},
|
{jss::payment_channel, ltPAYCHAN},
|
||||||
{jss::uri_token, ltURI_TOKEN},
|
{jss::uri_token, ltURI_TOKEN},
|
||||||
{jss::signer_list, ltSIGNER_LIST},
|
{jss::signer_list, ltSIGNER_LIST},
|
||||||
{jss::state, ltRIPPLE_STATE},
|
{jss::state, ltRIPPLE_STATE},
|
||||||
{jss::ticket, ltTICKET},
|
{jss::ticket, ltTICKET},
|
||||||
{jss::nft_offer, ltNFTOKEN_OFFER},
|
{jss::nft_offer, ltNFTOKEN_OFFER},
|
||||||
{jss::nft_page, ltNFTOKEN_PAGE},
|
{jss::nft_page, ltNFTOKEN_PAGE},
|
||||||
{jss::unl_report, ltUNL_REPORT}}};
|
{jss::unl_report, ltUNL_REPORT},
|
||||||
|
{jss::cron, ltCRON},
|
||||||
|
}};
|
||||||
|
|
||||||
auto const& p = params[jss::type];
|
auto const& p = params[jss::type];
|
||||||
if (!p.isString())
|
if (!p.isString())
|
||||||
|
|||||||
@@ -781,6 +781,22 @@ public:
|
|||||||
auto const& hook = resp[jss::result][jss::account_objects][0u];
|
auto const& hook = resp[jss::result][jss::account_objects][0u];
|
||||||
BEAST_EXPECT(hook[sfAccount.jsonName] == gw.human());
|
BEAST_EXPECT(hook[sfAccount.jsonName] == gw.human());
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
// Create a Cron
|
||||||
|
env(cron::set(gw),
|
||||||
|
cron::startTime(env.now().time_since_epoch().count() + 100),
|
||||||
|
cron::delay(100),
|
||||||
|
cron::repeat(200),
|
||||||
|
fee(XRP(1)));
|
||||||
|
env.close();
|
||||||
|
}
|
||||||
|
{
|
||||||
|
// Find the cron.
|
||||||
|
Json::Value const resp = acct_objs(gw, jss::cron);
|
||||||
|
BEAST_EXPECT(acct_objs_is_size(resp, 1));
|
||||||
|
auto const& cron = resp[jss::result][jss::account_objects][0u];
|
||||||
|
BEAST_EXPECT(cron[sfOwner.jsonName] == gw.human());
|
||||||
|
}
|
||||||
{
|
{
|
||||||
// See how "deletion_blockers_only" handles gw's directory.
|
// See how "deletion_blockers_only" handles gw's directory.
|
||||||
Json::Value params;
|
Json::Value params;
|
||||||
|
|||||||
Reference in New Issue
Block a user