mirror of
https://github.com/Xahau/xahaud.git
synced 2025-12-06 17:27:52 +00:00
feat: add custom S3+OverlayFS cache actions with configurable delta support
Implements drop-in replacement for actions/cache using S3 backend and OverlayFS for delta caching: - xahau-actions-cache-restore: Downloads immutable base + optional latest delta - xahau-actions-cache-save: Saves immutable bases (bootstrap/partial-match) or timestamped deltas (exact-match) Key features: - Immutable bases: One static base per key (first-write-wins, GitHub Actions semantics) - Timestamped deltas: Always-timestamped to eliminate concurrency issues - Configurable use-deltas parameter (default true): - true: For symbolic keys (branch-based) - massive bandwidth savings via incremental deltas - false: For content-based keys (hash-based) - base-only mode, no delta complexity - Three cache modes: bootstrap, partial-match (restore-keys), exact-match - OverlayFS integration: Automatic delta extraction via upperdir, whiteout file support - S3 lifecycle ready: Bases tagged 'type=base', deltas tagged 'type=delta-archive' Decision rule for use-deltas: - Content-based discriminator (hashFiles, commit SHA) → use-deltas: false - Symbolic discriminator (branch name, tag, PR) → use-deltas: true Also disables existing workflows temporarily during development.
This commit is contained in:
282
.github/actions/xahau-actions-cache-restore/action.yml
vendored
Normal file
282
.github/actions/xahau-actions-cache-restore/action.yml
vendored
Normal file
@@ -0,0 +1,282 @@
|
||||
name: 'Xahau Cache Restore (S3 + OverlayFS)'
|
||||
description: 'Drop-in replacement for actions/cache/restore using S3 and OverlayFS for delta caching'
|
||||
|
||||
inputs:
|
||||
path:
|
||||
description: 'A list of files, directories, and wildcard patterns to cache (currently only single path supported)'
|
||||
required: true
|
||||
key:
|
||||
description: 'An explicit key for restoring the cache'
|
||||
required: true
|
||||
restore-keys:
|
||||
description: 'An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key'
|
||||
required: false
|
||||
default: ''
|
||||
s3-bucket:
|
||||
description: 'S3 bucket name for cache storage'
|
||||
required: false
|
||||
default: 'xahaud-github-actions-cache-niq'
|
||||
s3-region:
|
||||
description: 'S3 region'
|
||||
required: false
|
||||
default: 'us-east-1'
|
||||
fail-on-cache-miss:
|
||||
description: 'Fail the workflow if cache entry is not found'
|
||||
required: false
|
||||
default: 'false'
|
||||
lookup-only:
|
||||
description: 'Check if a cache entry exists for the given input(s) without downloading it'
|
||||
required: false
|
||||
default: 'false'
|
||||
use-deltas:
|
||||
description: 'Enable delta caching (download/upload incremental changes). Set to false for base-only caching.'
|
||||
required: false
|
||||
default: 'true'
|
||||
# Note: Composite actions can't access secrets.* directly - must be passed from workflow
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 access'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 access'
|
||||
required: true
|
||||
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||
value: ${{ steps.restore-cache.outputs.cache-hit }}
|
||||
cache-primary-key:
|
||||
description: 'The key that was used to restore the cache (may be from restore-keys)'
|
||||
value: ${{ steps.restore-cache.outputs.cache-primary-key }}
|
||||
cache-matched-key:
|
||||
description: 'The key that matched (same as cache-primary-key for compatibility)'
|
||||
value: ${{ steps.restore-cache.outputs.cache-primary-key }}
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Restore cache from S3 with OverlayFS
|
||||
id: restore-cache
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ inputs.aws-access-key-id }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ inputs.aws-secret-access-key }}
|
||||
S3_BUCKET: ${{ inputs.s3-bucket }}
|
||||
S3_REGION: ${{ inputs.s3-region }}
|
||||
CACHE_KEY: ${{ inputs.key }}
|
||||
RESTORE_KEYS: ${{ inputs.restore-keys }}
|
||||
TARGET_PATH: ${{ inputs.path }}
|
||||
FAIL_ON_MISS: ${{ inputs.fail-on-cache-miss }}
|
||||
LOOKUP_ONLY: ${{ inputs.lookup-only }}
|
||||
USE_DELTAS: ${{ inputs.use-deltas }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "=========================================="
|
||||
echo "Xahau Cache Restore (S3 + OverlayFS)"
|
||||
echo "=========================================="
|
||||
echo "Target path: ${TARGET_PATH}"
|
||||
echo "Primary key: ${CACHE_KEY}"
|
||||
echo "S3 bucket: s3://${S3_BUCKET}"
|
||||
echo "Use deltas: ${USE_DELTAS}"
|
||||
echo ""
|
||||
|
||||
# Generate unique cache workspace
|
||||
CACHE_HASH=$(echo "${CACHE_KEY}" | md5sum | cut -d' ' -f1)
|
||||
CACHE_WORKSPACE="/tmp/xahau-cache-${CACHE_HASH}"
|
||||
|
||||
echo "Cache workspace: ${CACHE_WORKSPACE}"
|
||||
|
||||
# Create OverlayFS directory structure
|
||||
mkdir -p "${CACHE_WORKSPACE}"/{base,upper,work,merged}
|
||||
|
||||
# Function to try downloading from S3
|
||||
try_restore_key() {
|
||||
local try_key="$1"
|
||||
local s3_base="s3://${S3_BUCKET}/${try_key}-base.tar.zst"
|
||||
|
||||
echo "Trying cache key: ${try_key}"
|
||||
|
||||
# Check if base exists (one base per key, immutable)
|
||||
echo "Checking for base layer..."
|
||||
if aws s3 ls "${s3_base}" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
echo "✓ Found base layer: ${s3_base}"
|
||||
|
||||
if [ "${LOOKUP_ONLY}" = "true" ]; then
|
||||
echo "Lookup-only mode: cache exists, skipping download"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Download base layer
|
||||
echo "Downloading base layer..."
|
||||
aws s3 cp "${s3_base}" /tmp/cache-base.tar.zst --region "${S3_REGION}" --quiet
|
||||
|
||||
# Extract base layer
|
||||
echo "Extracting base layer..."
|
||||
tar -xf /tmp/cache-base.tar.zst -C "${CACHE_WORKSPACE}/base"
|
||||
rm /tmp/cache-base.tar.zst
|
||||
|
||||
# Query for latest timestamped delta (only if use-deltas enabled)
|
||||
if [ "${USE_DELTAS}" = "true" ]; then
|
||||
echo "Querying for latest delta..."
|
||||
LATEST_DELTA=$(aws s3api list-objects-v2 \
|
||||
--bucket "${S3_BUCKET}" \
|
||||
--prefix "${try_key}-delta-" \
|
||||
--region "${S3_REGION}" \
|
||||
--query 'sort_by(Contents, &LastModified)[-1].Key' \
|
||||
--output text 2>/dev/null || echo "")
|
||||
|
||||
if [ -n "${LATEST_DELTA}" ] && [ "${LATEST_DELTA}" != "None" ]; then
|
||||
echo "✓ Found latest delta: ${LATEST_DELTA}"
|
||||
echo "Downloading delta layer..."
|
||||
aws s3 cp "s3://${S3_BUCKET}/${LATEST_DELTA}" /tmp/cache-delta.tar.zst --region "${S3_REGION}" --quiet
|
||||
|
||||
echo "Extracting delta layer..."
|
||||
tar -xf /tmp/cache-delta.tar.zst -C "${CACHE_WORKSPACE}/upper" 2>/dev/null || true
|
||||
rm /tmp/cache-delta.tar.zst
|
||||
else
|
||||
echo "ℹ No delta layer found (this is fine for first build)"
|
||||
fi
|
||||
else
|
||||
echo "ℹ Delta caching disabled (use-deltas: false)"
|
||||
fi
|
||||
|
||||
return 0
|
||||
else
|
||||
echo "✗ No base layer found for key: ${try_key}"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Try primary key first
|
||||
MATCHED_KEY=""
|
||||
EXACT_MATCH="false"
|
||||
|
||||
if try_restore_key "${CACHE_KEY}"; then
|
||||
MATCHED_KEY="${CACHE_KEY}"
|
||||
EXACT_MATCH="true"
|
||||
echo ""
|
||||
echo "🎯 Exact cache hit for key: ${CACHE_KEY}"
|
||||
else
|
||||
# Try restore-keys (prefix matching)
|
||||
if [ -n "${RESTORE_KEYS}" ]; then
|
||||
echo ""
|
||||
echo "Primary key not found, trying restore-keys..."
|
||||
|
||||
# Split restore-keys by newline
|
||||
while IFS= read -r restore_key; do
|
||||
# Skip empty lines
|
||||
[ -z "${restore_key}" ] && continue
|
||||
|
||||
# Trim whitespace
|
||||
restore_key=$(echo "${restore_key}" | xargs)
|
||||
|
||||
if try_restore_key "${restore_key}"; then
|
||||
MATCHED_KEY="${restore_key}"
|
||||
EXACT_MATCH="false"
|
||||
echo ""
|
||||
echo "✓ Cache restored from fallback key: ${restore_key}"
|
||||
break
|
||||
fi
|
||||
done <<< "${RESTORE_KEYS}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check if we found anything
|
||||
if [ -z "${MATCHED_KEY}" ]; then
|
||||
echo ""
|
||||
echo "❌ No cache found for key: ${CACHE_KEY}"
|
||||
echo "This is BOOTSTRAP mode - first build for this cache key"
|
||||
|
||||
if [ "${FAIL_ON_MISS}" = "true" ]; then
|
||||
echo "fail-on-cache-miss is enabled, failing workflow"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Set outputs for cache miss
|
||||
echo "cache-hit=false" >> $GITHUB_OUTPUT
|
||||
echo "cache-primary-key=" >> $GITHUB_OUTPUT
|
||||
|
||||
# Create empty cache directory for bootstrap
|
||||
mkdir -p "${TARGET_PATH}"
|
||||
|
||||
# Record bootstrap mode for save action
|
||||
# Format: path:workspace:matched_key:primary_key:exact_match:use_deltas
|
||||
# For bootstrap: workspace="bootstrap", matched_key=primary_key, exact_match=false
|
||||
MOUNT_REGISTRY="/tmp/xahau-cache-mounts.txt"
|
||||
echo "${TARGET_PATH}:bootstrap:${CACHE_KEY}:${CACHE_KEY}:false:${USE_DELTAS}" >> "${MOUNT_REGISTRY}"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache restore completed (bootstrap mode)"
|
||||
echo "Created empty cache directory: ${TARGET_PATH}"
|
||||
echo "=========================================="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# If lookup-only, we're done
|
||||
if [ "${LOOKUP_ONLY}" = "true" ]; then
|
||||
echo "cache-hit=${EXACT_MATCH}" >> $GITHUB_OUTPUT
|
||||
echo "cache-primary-key=${MATCHED_KEY}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Clean up workspace
|
||||
rm -rf "${CACHE_WORKSPACE}"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache lookup completed (lookup-only mode)"
|
||||
echo "=========================================="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Mount OverlayFS
|
||||
echo ""
|
||||
echo "Mounting OverlayFS..."
|
||||
sudo mount -t overlay overlay \
|
||||
-o lowerdir="${CACHE_WORKSPACE}/base",upperdir="${CACHE_WORKSPACE}/upper",workdir="${CACHE_WORKSPACE}/work" \
|
||||
"${CACHE_WORKSPACE}/merged"
|
||||
|
||||
# Verify mount
|
||||
if mount | grep -q "${CACHE_WORKSPACE}/merged"; then
|
||||
echo "✓ OverlayFS mounted successfully"
|
||||
else
|
||||
echo "❌ Failed to mount OverlayFS"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create target directory parent if needed
|
||||
TARGET_PARENT=$(dirname "${TARGET_PATH}")
|
||||
mkdir -p "${TARGET_PARENT}"
|
||||
|
||||
# Remove existing target if it exists
|
||||
if [ -e "${TARGET_PATH}" ]; then
|
||||
echo "Removing existing target: ${TARGET_PATH}"
|
||||
rm -rf "${TARGET_PATH}"
|
||||
fi
|
||||
|
||||
# Symlink target path to merged view
|
||||
echo "Creating symlink: ${TARGET_PATH} -> ${CACHE_WORKSPACE}/merged"
|
||||
ln -s "${CACHE_WORKSPACE}/merged" "${TARGET_PATH}"
|
||||
|
||||
# Save mount info for cleanup/save later
|
||||
# Format: path:workspace:matched_key:primary_key:exact_match:use_deltas
|
||||
# This tells save action whether to create new base (partial match) or just delta (exact match)
|
||||
MOUNT_REGISTRY="/tmp/xahau-cache-mounts.txt"
|
||||
echo "${TARGET_PATH}:${CACHE_WORKSPACE}:${MATCHED_KEY}:${CACHE_KEY}:${EXACT_MATCH}:${USE_DELTAS}" >> "${MOUNT_REGISTRY}"
|
||||
|
||||
# Set outputs
|
||||
echo "cache-hit=${EXACT_MATCH}" >> $GITHUB_OUTPUT
|
||||
echo "cache-primary-key=${MATCHED_KEY}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Show statistics
|
||||
echo ""
|
||||
echo "Cache statistics:"
|
||||
echo " Base layer size: $(du -sh ${CACHE_WORKSPACE}/base 2>/dev/null | cut -f1 || echo '0')"
|
||||
echo " Delta layer size: $(du -sh ${CACHE_WORKSPACE}/upper 2>/dev/null | cut -f1 || echo '0')"
|
||||
echo " Merged view size: $(du -sh ${CACHE_WORKSPACE}/merged 2>/dev/null | cut -f1 || echo '0')"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache restore completed successfully"
|
||||
echo "Exact match: ${EXACT_MATCH}"
|
||||
echo "Matched key: ${MATCHED_KEY}"
|
||||
echo "=========================================="
|
||||
342
.github/actions/xahau-actions-cache-save/action.yml
vendored
Normal file
342
.github/actions/xahau-actions-cache-save/action.yml
vendored
Normal file
@@ -0,0 +1,342 @@
|
||||
name: 'Xahau Cache Save (S3 + OverlayFS)'
|
||||
description: 'Drop-in replacement for actions/cache/save using S3 and OverlayFS for delta caching'
|
||||
|
||||
inputs:
|
||||
path:
|
||||
description: 'A list of files, directories, and wildcard patterns to cache (currently only single path supported)'
|
||||
required: true
|
||||
key:
|
||||
description: 'An explicit key for saving the cache'
|
||||
required: true
|
||||
s3-bucket:
|
||||
description: 'S3 bucket name for cache storage'
|
||||
required: false
|
||||
default: 'xahaud-github-actions-cache-niq'
|
||||
s3-region:
|
||||
description: 'S3 region'
|
||||
required: false
|
||||
default: 'us-east-1'
|
||||
use-deltas:
|
||||
description: 'Enable delta caching (download/upload incremental changes). Set to false for base-only caching.'
|
||||
required: false
|
||||
default: 'true'
|
||||
# Note: Composite actions can't access secrets.* directly - must be passed from workflow
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 access'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 access'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Save cache to S3 with OverlayFS delta
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ inputs.aws-access-key-id }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ inputs.aws-secret-access-key }}
|
||||
S3_BUCKET: ${{ inputs.s3-bucket }}
|
||||
S3_REGION: ${{ inputs.s3-region }}
|
||||
CACHE_KEY: ${{ inputs.key }}
|
||||
TARGET_PATH: ${{ inputs.path }}
|
||||
USE_DELTAS: ${{ inputs.use-deltas }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "=========================================="
|
||||
echo "Xahau Cache Save (S3 + OverlayFS)"
|
||||
echo "=========================================="
|
||||
echo "Target path: ${TARGET_PATH}"
|
||||
echo "Cache key: ${CACHE_KEY}"
|
||||
echo "S3 bucket: s3://${S3_BUCKET}"
|
||||
echo ""
|
||||
|
||||
# Find the cache workspace from mount registry
|
||||
MOUNT_REGISTRY="/tmp/xahau-cache-mounts.txt"
|
||||
|
||||
if [ ! -f "${MOUNT_REGISTRY}" ]; then
|
||||
echo "⚠️ No cache mounts found (mount registry doesn't exist)"
|
||||
echo "This usually means cache restore was not called, or there was no cache to restore."
|
||||
echo "Skipping cache save."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Find entry for this path
|
||||
# Format: path:workspace:matched_key:primary_key:exact_match:use_deltas
|
||||
# Bootstrap mode: path:bootstrap:key:key:false:true/false (workspace="bootstrap")
|
||||
CACHE_WORKSPACE=""
|
||||
MATCHED_KEY=""
|
||||
PRIMARY_KEY=""
|
||||
EXACT_MATCH=""
|
||||
REGISTRY_USE_DELTAS=""
|
||||
|
||||
while IFS=: read -r mount_path mount_workspace mount_matched_key mount_primary_key mount_exact_match mount_use_deltas; do
|
||||
if [ "${mount_path}" = "${TARGET_PATH}" ]; then
|
||||
CACHE_WORKSPACE="${mount_workspace}"
|
||||
MATCHED_KEY="${mount_matched_key}"
|
||||
PRIMARY_KEY="${mount_primary_key}"
|
||||
EXACT_MATCH="${mount_exact_match}"
|
||||
REGISTRY_USE_DELTAS="${mount_use_deltas}"
|
||||
break
|
||||
fi
|
||||
done < "${MOUNT_REGISTRY}"
|
||||
|
||||
if [ -z "${CACHE_WORKSPACE}" ] && [ -z "${MATCHED_KEY}" ]; then
|
||||
echo "⚠️ No cache entry found for path: ${TARGET_PATH}"
|
||||
echo "This usually means cache restore was not called for this path."
|
||||
echo "Skipping cache save."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Determine cache mode
|
||||
if [ "${CACHE_WORKSPACE}" = "bootstrap" ]; then
|
||||
CACHE_MODE="bootstrap"
|
||||
PRIMARY_KEY="${MATCHED_KEY}" # In bootstrap, matched_key field contains primary key
|
||||
echo "Cache mode: BOOTSTRAP (first build for this key)"
|
||||
echo "Primary key: ${PRIMARY_KEY}"
|
||||
elif [ "${EXACT_MATCH}" = "false" ]; then
|
||||
CACHE_MODE="partial-match"
|
||||
echo "Cache mode: PARTIAL MATCH (restore-key used)"
|
||||
echo "Cache workspace: ${CACHE_WORKSPACE}"
|
||||
echo "Matched key from restore: ${MATCHED_KEY}"
|
||||
echo "Primary key (will save new base): ${PRIMARY_KEY}"
|
||||
else
|
||||
CACHE_MODE="exact-match"
|
||||
echo "Cache mode: EXACT MATCH (cache hit)"
|
||||
echo "Cache workspace: ${CACHE_WORKSPACE}"
|
||||
echo "Matched key: ${MATCHED_KEY}"
|
||||
fi
|
||||
echo "Use deltas: ${REGISTRY_USE_DELTAS}"
|
||||
echo ""
|
||||
|
||||
# Handle different cache modes
|
||||
if [ "${CACHE_MODE}" = "bootstrap" ]; then
|
||||
# Bootstrap: Save entire cache as base layer (no OverlayFS was used)
|
||||
echo "Bootstrap mode: Creating initial base layer from ${TARGET_PATH}"
|
||||
|
||||
BASE_TARBALL="/tmp/xahau-cache-base-$$.tar.zst"
|
||||
echo "Creating base tarball..."
|
||||
tar -cf - -C "${TARGET_PATH}" . | zstd -3 -T0 -q -o "${BASE_TARBALL}"
|
||||
|
||||
BASE_SIZE=$(du -h "${BASE_TARBALL}" | cut -f1)
|
||||
echo "✓ Base tarball created: ${BASE_SIZE}"
|
||||
echo ""
|
||||
|
||||
# Use static base name (one base per key, immutable)
|
||||
S3_BASE_KEY="s3://${S3_BUCKET}/${PRIMARY_KEY}-base.tar.zst"
|
||||
|
||||
# Check if base already exists (immutability - first write wins)
|
||||
if aws s3 ls "${S3_BASE_KEY}" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
echo "⚠️ Base layer already exists: ${S3_BASE_KEY}"
|
||||
echo "Skipping upload (immutability - first write wins, like GitHub Actions)"
|
||||
else
|
||||
echo "Uploading base layer to S3..."
|
||||
echo " Key: ${PRIMARY_KEY}-base.tar.zst"
|
||||
|
||||
aws s3 cp "${BASE_TARBALL}" "${S3_BASE_KEY}" \
|
||||
--region "${S3_REGION}" \
|
||||
--tagging "type=base" \
|
||||
--quiet
|
||||
|
||||
echo "✓ Uploaded: ${S3_BASE_KEY}"
|
||||
fi
|
||||
|
||||
# Cleanup
|
||||
rm -f "${BASE_TARBALL}"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Bootstrap cache save completed"
|
||||
echo "Base size: ${BASE_SIZE}"
|
||||
echo "Cache key: ${PRIMARY_KEY}"
|
||||
echo "=========================================="
|
||||
exit 0
|
||||
|
||||
elif [ "${CACHE_MODE}" = "partial-match" ]; then
|
||||
# Partial match: Save merged view as new base ONLY (no delta)
|
||||
# The delta is relative to the OLD base, not the NEW base we're creating
|
||||
echo "Partial match mode: Saving new base layer for primary key"
|
||||
echo "Note: Delta will NOT be saved (it's relative to old base)"
|
||||
|
||||
BASE_TARBALL="/tmp/xahau-cache-base-$$.tar.zst"
|
||||
echo "Creating base tarball from merged view..."
|
||||
tar -cf - -C "${CACHE_WORKSPACE}/merged" . | zstd -3 -T0 -q -o "${BASE_TARBALL}"
|
||||
|
||||
BASE_SIZE=$(du -h "${BASE_TARBALL}" | cut -f1)
|
||||
echo "✓ Base tarball created: ${BASE_SIZE}"
|
||||
echo ""
|
||||
|
||||
# Use static base name (one base per key, immutable)
|
||||
S3_BASE_KEY="s3://${S3_BUCKET}/${PRIMARY_KEY}-base.tar.zst"
|
||||
|
||||
# Check if base already exists (immutability - first write wins)
|
||||
if aws s3 ls "${S3_BASE_KEY}" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
echo "⚠️ Base layer already exists: ${S3_BASE_KEY}"
|
||||
echo "Skipping upload (immutability - first write wins, like GitHub Actions)"
|
||||
else
|
||||
echo "Uploading new base layer to S3..."
|
||||
echo " Key: ${PRIMARY_KEY}-base.tar.zst"
|
||||
|
||||
aws s3 cp "${BASE_TARBALL}" "${S3_BASE_KEY}" \
|
||||
--region "${S3_REGION}" \
|
||||
--tagging "type=base" \
|
||||
--quiet
|
||||
|
||||
echo "✓ Uploaded: ${S3_BASE_KEY}"
|
||||
fi
|
||||
|
||||
# Cleanup
|
||||
rm -f "${BASE_TARBALL}"
|
||||
|
||||
# Unmount and cleanup
|
||||
echo ""
|
||||
echo "Cleaning up..."
|
||||
if mount | grep -q "${CACHE_WORKSPACE}/merged"; then
|
||||
sudo umount "${CACHE_WORKSPACE}/merged" || {
|
||||
echo "⚠️ Warning: Failed to unmount ${CACHE_WORKSPACE}/merged"
|
||||
echo "Attempting lazy unmount..."
|
||||
sudo umount -l "${CACHE_WORKSPACE}/merged" || true
|
||||
}
|
||||
fi
|
||||
rm -rf "${CACHE_WORKSPACE}"
|
||||
|
||||
# Remove from registry
|
||||
if [ -f "${MOUNT_REGISTRY}" ]; then
|
||||
grep -v "^${TARGET_PATH}:" "${MOUNT_REGISTRY}" > "${MOUNT_REGISTRY}.tmp" 2>/dev/null || true
|
||||
mv "${MOUNT_REGISTRY}.tmp" "${MOUNT_REGISTRY}" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
echo "✓ Cleanup completed"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Partial match cache save completed"
|
||||
echo "New base created for: ${PRIMARY_KEY}"
|
||||
echo "Base size: ${BASE_SIZE}"
|
||||
if [ "${REGISTRY_USE_DELTAS}" = "true" ]; then
|
||||
echo "Next exact-match build will create deltas from this base"
|
||||
else
|
||||
echo "Next exact-match build will reuse this base (base-only mode)"
|
||||
fi
|
||||
echo "=========================================="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# For exact-match ONLY: Save delta (if use-deltas enabled)
|
||||
if [ "${CACHE_MODE}" = "exact-match" ]; then
|
||||
# If deltas are disabled, just cleanup and exit
|
||||
if [ "${REGISTRY_USE_DELTAS}" != "true" ]; then
|
||||
echo "ℹ️ Delta caching disabled (use-deltas: false)"
|
||||
echo "Base already exists for this key, nothing to save."
|
||||
|
||||
# Unmount and cleanup
|
||||
echo ""
|
||||
echo "Cleaning up..."
|
||||
if mount | grep -q "${CACHE_WORKSPACE}/merged"; then
|
||||
sudo umount "${CACHE_WORKSPACE}/merged" 2>/dev/null || true
|
||||
fi
|
||||
rm -rf "${CACHE_WORKSPACE}"
|
||||
|
||||
# Remove from registry
|
||||
if [ -f "${MOUNT_REGISTRY}" ]; then
|
||||
grep -v "^${TARGET_PATH}:" "${MOUNT_REGISTRY}" > "${MOUNT_REGISTRY}.tmp" 2>/dev/null || true
|
||||
mv "${MOUNT_REGISTRY}.tmp" "${MOUNT_REGISTRY}" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache save completed (base-only mode)"
|
||||
echo "=========================================="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Check if upper layer has any changes
|
||||
if [ -z "$(ls -A ${CACHE_WORKSPACE}/upper 2>/dev/null)" ]; then
|
||||
echo "ℹ️ No changes detected in upper layer (cache is unchanged)"
|
||||
echo "Skipping delta upload to save bandwidth."
|
||||
|
||||
# Still unmount and cleanup
|
||||
echo ""
|
||||
echo "Cleaning up..."
|
||||
sudo umount "${CACHE_WORKSPACE}/merged" 2>/dev/null || true
|
||||
rm -rf "${CACHE_WORKSPACE}"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache save completed (no changes)"
|
||||
echo "=========================================="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Show delta statistics
|
||||
echo "Delta layer statistics:"
|
||||
echo " Files changed: $(find ${CACHE_WORKSPACE}/upper -type f 2>/dev/null | wc -l)"
|
||||
echo " Delta size: $(du -sh ${CACHE_WORKSPACE}/upper 2>/dev/null | cut -f1)"
|
||||
echo ""
|
||||
|
||||
# Create delta tarball from upper layer
|
||||
echo "Creating delta tarball..."
|
||||
DELTA_TARBALL="/tmp/xahau-cache-delta-$$.tar.zst"
|
||||
|
||||
tar -cf - -C "${CACHE_WORKSPACE}/upper" . | zstd -3 -T0 -q -o "${DELTA_TARBALL}"
|
||||
|
||||
DELTA_SIZE=$(du -h "${DELTA_TARBALL}" | cut -f1)
|
||||
echo "✓ Delta tarball created: ${DELTA_SIZE}"
|
||||
echo ""
|
||||
|
||||
# Upload timestamped delta (no overwrites = zero concurrency issues)
|
||||
TIMESTAMP=$(date +%Y%m%d%H%M%S)
|
||||
COMMIT_SHA=$(git rev-parse --short HEAD 2>/dev/null || echo "unknown")
|
||||
|
||||
# Use PRIMARY_KEY for delta (ensures deltas match their base)
|
||||
S3_DELTA_TIMESTAMPED="s3://${S3_BUCKET}/${PRIMARY_KEY}-delta-${TIMESTAMP}-${COMMIT_SHA}.tar.zst"
|
||||
|
||||
echo "Uploading timestamped delta to S3..."
|
||||
echo " Key: ${PRIMARY_KEY}-delta-${TIMESTAMP}-${COMMIT_SHA}.tar.zst"
|
||||
|
||||
# Upload with tag for auto-deletion after 7 days
|
||||
aws s3 cp "${DELTA_TARBALL}" "${S3_DELTA_TIMESTAMPED}" \
|
||||
--region "${S3_REGION}" \
|
||||
--tagging "type=delta-archive" \
|
||||
--quiet
|
||||
|
||||
echo "✓ Uploaded: ${S3_DELTA_TIMESTAMPED}"
|
||||
echo " (tagged for auto-deletion after 7 days)"
|
||||
|
||||
# Cleanup delta tarball
|
||||
rm -f "${DELTA_TARBALL}"
|
||||
|
||||
# Cleanup: Unmount OverlayFS and remove workspace
|
||||
echo ""
|
||||
echo "Cleaning up..."
|
||||
|
||||
if mount | grep -q "${CACHE_WORKSPACE}/merged"; then
|
||||
sudo umount "${CACHE_WORKSPACE}/merged" || {
|
||||
echo "⚠️ Warning: Failed to unmount ${CACHE_WORKSPACE}/merged"
|
||||
echo "Attempting lazy unmount..."
|
||||
sudo umount -l "${CACHE_WORKSPACE}/merged" || true
|
||||
}
|
||||
fi
|
||||
|
||||
# Remove workspace
|
||||
rm -rf "${CACHE_WORKSPACE}"
|
||||
fi
|
||||
|
||||
# Remove from registry
|
||||
if [ -f "${MOUNT_REGISTRY}" ]; then
|
||||
grep -v "^${TARGET_PATH}:" "${MOUNT_REGISTRY}" > "${MOUNT_REGISTRY}.tmp" 2>/dev/null || true
|
||||
mv "${MOUNT_REGISTRY}.tmp" "${MOUNT_REGISTRY}" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
echo "✓ Cleanup completed"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache save completed successfully"
|
||||
echo "Mode: ${CACHE_MODE}"
|
||||
echo "Cache key: ${PRIMARY_KEY}"
|
||||
if [ -n "${DELTA_SIZE:-}" ]; then
|
||||
echo "Delta size: ${DELTA_SIZE}"
|
||||
fi
|
||||
echo "=========================================="
|
||||
95
.github/workflows/build-in-docker.yml.disabled
vendored
Normal file
95
.github/workflows/build-in-docker.yml.disabled
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
name: Build using Docker
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["dev", "candidate", "release", "jshooks"]
|
||||
pull_request:
|
||||
branches: ["dev", "candidate", "release", "jshooks"]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP: 1
|
||||
|
||||
jobs:
|
||||
checkout:
|
||||
runs-on: [self-hosted, vanity]
|
||||
outputs:
|
||||
checkout_path: ${{ steps.vars.outputs.checkout_path }}
|
||||
steps:
|
||||
- name: Prepare checkout path
|
||||
id: vars
|
||||
run: |
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | sed -e 's/[^a-zA-Z0-9._-]/-/g')
|
||||
CHECKOUT_PATH="${SAFE_BRANCH}-${{ github.sha }}"
|
||||
echo "checkout_path=${CHECKOUT_PATH}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
path: ${{ steps.vars.outputs.checkout_path }}
|
||||
clean: true
|
||||
fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history
|
||||
|
||||
build:
|
||||
runs-on: [self-hosted, vanity]
|
||||
needs: [checkout]
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ needs.checkout.outputs.checkout_path }}
|
||||
steps:
|
||||
- name: Set Cleanup Script Path
|
||||
run: |
|
||||
echo "JOB_CLEANUP_SCRIPT=$(mktemp)" >> $GITHUB_ENV
|
||||
|
||||
- name: Build using Docker
|
||||
run: /bin/bash release-builder.sh
|
||||
|
||||
- name: Stop Container (Cleanup)
|
||||
if: always()
|
||||
run: |
|
||||
echo "Running cleanup script: $JOB_CLEANUP_SCRIPT"
|
||||
/bin/bash -e -x "$JOB_CLEANUP_SCRIPT"
|
||||
CLEANUP_EXIT_CODE=$?
|
||||
|
||||
if [[ "$CLEANUP_EXIT_CODE" -eq 0 ]]; then
|
||||
echo "Cleanup script succeeded."
|
||||
rm -f "$JOB_CLEANUP_SCRIPT"
|
||||
echo "Cleanup script removed."
|
||||
else
|
||||
echo "⚠️ Cleanup script failed! Keeping for debugging: $JOB_CLEANUP_SCRIPT"
|
||||
fi
|
||||
|
||||
if [[ "${DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP}" == "1" ]]; then
|
||||
echo "🔍 Checking for leftover containers..."
|
||||
BUILD_CONTAINERS=$(docker ps --format '{{.Names}}' | grep '^xahaud_cached_builder' || echo "")
|
||||
|
||||
if [[ -n "$BUILD_CONTAINERS" ]]; then
|
||||
echo "⚠️ WARNING: Some build containers are still running"
|
||||
echo "$BUILD_CONTAINERS"
|
||||
else
|
||||
echo "✅ No build containers found"
|
||||
fi
|
||||
fi
|
||||
|
||||
tests:
|
||||
runs-on: [self-hosted, vanity]
|
||||
needs: [build, checkout]
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ needs.checkout.outputs.checkout_path }}
|
||||
steps:
|
||||
- name: Unit tests
|
||||
run: /bin/bash docker-unit-tests.sh
|
||||
|
||||
cleanup:
|
||||
runs-on: [self-hosted, vanity]
|
||||
needs: [tests, checkout]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Cleanup workspace
|
||||
run: |
|
||||
CHECKOUT_PATH="${{ needs.checkout.outputs.checkout_path }}"
|
||||
echo "Cleaning workspace for ${CHECKOUT_PATH}"
|
||||
rm -rf "${{ github.workspace }}/${CHECKOUT_PATH}"
|
||||
72
.github/workflows/clang-format.yml.disabled
vendored
Normal file
72
.github/workflows/clang-format.yml.disabled
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
name: clang-format
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
CLANG_VERSION: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
# - name: Install clang-format
|
||||
# run: |
|
||||
# codename=$( lsb_release --codename --short )
|
||||
# sudo tee /etc/apt/sources.list.d/llvm.list >/dev/null <<EOF
|
||||
# deb http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
||||
# deb-src http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
||||
# EOF
|
||||
# wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add
|
||||
# sudo apt-get update -y
|
||||
# sudo apt-get install -y clang-format-${CLANG_VERSION}
|
||||
|
||||
# Temporary fix until this commit is merged
|
||||
# https://github.com/XRPLF/rippled/commit/552377c76f55b403a1c876df873a23d780fcc81c
|
||||
- name: Download and install clang-format
|
||||
run: |
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y libtinfo5
|
||||
curl -LO https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/clang+llvm-10.0.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz
|
||||
tar -xf clang+llvm-10.0.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz
|
||||
sudo mv clang+llvm-10.0.1-x86_64-linux-gnu-ubuntu-16.04 /opt/clang-10
|
||||
sudo ln -s /opt/clang-10/bin/clang-format /usr/local/bin/clang-format-10
|
||||
- name: Format src/ripple
|
||||
run: find src/ripple -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -print0 | xargs -0 clang-format-${CLANG_VERSION} -i
|
||||
- name: Format src/test
|
||||
run: find src/test -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -print0 | xargs -0 clang-format-${CLANG_VERSION} -i
|
||||
- name: Check for differences
|
||||
id: assert
|
||||
run: |
|
||||
set -o pipefail
|
||||
git diff --exit-code | tee "clang-format.patch"
|
||||
- name: Upload patch
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
uses: actions/upload-artifact@v4
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: clang-format.patch
|
||||
if-no-files-found: ignore
|
||||
path: clang-format.patch
|
||||
- name: What happened?
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
env:
|
||||
PREAMBLE: |
|
||||
If you are reading this, you are looking at a failed Github Actions
|
||||
job. That means you pushed one or more files that did not conform
|
||||
to the formatting specified in .clang-format. That may be because
|
||||
you neglected to run 'git clang-format' or 'clang-format' before
|
||||
committing, or that your version of clang-format has an
|
||||
incompatibility with the one on this
|
||||
machine, which is:
|
||||
SUGGESTION: |
|
||||
|
||||
To fix it, you can do one of two things:
|
||||
1. Download and apply the patch generated as an artifact of this
|
||||
job to your repo, commit, and push.
|
||||
2. Run 'git-clang-format --extensions c,cpp,h,cxx,ipp dev'
|
||||
in your repo, commit, and push.
|
||||
run: |
|
||||
echo "${PREAMBLE}"
|
||||
clang-format-${CLANG_VERSION} --version
|
||||
echo "${SUGGESTION}"
|
||||
exit 1
|
||||
49
.github/workflows/levelization.yml.disabled
vendored
Normal file
49
.github/workflows/levelization.yml.disabled
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: levelization
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CLANG_VERSION: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Check levelization
|
||||
run: Builds/levelization/levelization.sh
|
||||
- name: Check for differences
|
||||
id: assert
|
||||
run: |
|
||||
set -o pipefail
|
||||
git diff --exit-code | tee "levelization.patch"
|
||||
- name: Upload patch
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
uses: actions/upload-artifact@v4
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: levelization.patch
|
||||
if-no-files-found: ignore
|
||||
path: levelization.patch
|
||||
- name: What happened?
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
env:
|
||||
MESSAGE: |
|
||||
If you are reading this, you are looking at a failed Github
|
||||
Actions job. That means you changed the dependency relationships
|
||||
between the modules in rippled. That may be an improvement or a
|
||||
regression. This check doesn't judge.
|
||||
|
||||
A rule of thumb, though, is that if your changes caused
|
||||
something to be removed from loops.txt, that's probably an
|
||||
improvement. If something was added, it's probably a regression.
|
||||
|
||||
To fix it, you can do one of two things:
|
||||
1. Download and apply the patch generated as an artifact of this
|
||||
job to your repo, commit, and push.
|
||||
2. Run './Builds/levelization/levelization.sh' in your repo,
|
||||
commit, and push.
|
||||
|
||||
See Builds/levelization/README.md for more info.
|
||||
run: |
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
36
.github/workflows/verify-generated-headers.yml.disabled
vendored
Normal file
36
.github/workflows/verify-generated-headers.yml.disabled
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: Verify Generated Hook Headers
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
verify-generated-headers:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- target: hook/error.h
|
||||
generator: ./hook/generate_error.sh
|
||||
- target: hook/extern.h
|
||||
generator: ./hook/generate_extern.sh
|
||||
- target: hook/sfcodes.h
|
||||
generator: bash ./hook/generate_sfcodes.sh
|
||||
- target: hook/tts.h
|
||||
generator: ./hook/generate_tts.sh
|
||||
runs-on: ubuntu-latest
|
||||
name: ${{ matrix.target }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Verify ${{ matrix.target }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
chmod +x hook/generate_*.sh || true
|
||||
|
||||
tmp=$(mktemp)
|
||||
trap 'rm -f "$tmp"' EXIT
|
||||
|
||||
${{ matrix.generator }} > "$tmp"
|
||||
diff -u ${{ matrix.target }} "$tmp"
|
||||
149
.github/workflows/xahau-ga-macos.yml.disabled
vendored
Normal file
149
.github/workflows/xahau-ga-macos.yml.disabled
vendored
Normal file
@@ -0,0 +1,149 @@
|
||||
name: MacOS - GA Runner
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
pull_request:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
matrix:
|
||||
generator:
|
||||
- Ninja
|
||||
configuration:
|
||||
- Debug
|
||||
runs-on: macos-15
|
||||
env:
|
||||
build_dir: .build
|
||||
# Bump this number to invalidate all caches globally.
|
||||
CACHE_VERSION: 1
|
||||
MAIN_BRANCH_NAME: dev
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Conan
|
||||
run: |
|
||||
brew install conan
|
||||
# Verify Conan 2 is installed
|
||||
conan --version
|
||||
|
||||
- name: Install Coreutils
|
||||
run: |
|
||||
brew install coreutils
|
||||
echo "Num proc: $(nproc)"
|
||||
|
||||
- name: Install Ninja
|
||||
if: matrix.generator == 'Ninja'
|
||||
run: brew install ninja
|
||||
|
||||
- name: Install Python
|
||||
run: |
|
||||
if which python3 > /dev/null 2>&1; then
|
||||
echo "Python 3 executable exists"
|
||||
python3 --version
|
||||
else
|
||||
brew install python@3.12
|
||||
fi
|
||||
# Create 'python' symlink if it doesn't exist (for tools expecting 'python')
|
||||
if ! which python > /dev/null 2>&1; then
|
||||
sudo ln -sf $(which python3) /usr/local/bin/python
|
||||
fi
|
||||
|
||||
- name: Install CMake
|
||||
run: |
|
||||
# Install CMake 3.x to match local dev environments
|
||||
# With Conan 2 and the policy args passed to CMake, newer versions
|
||||
# can have issues with dependencies that require cmake_minimum_required < 3.5
|
||||
brew uninstall cmake --ignore-dependencies 2>/dev/null || true
|
||||
|
||||
# Download and install CMake 3.31.7 directly
|
||||
curl -L https://github.com/Kitware/CMake/releases/download/v3.31.7/cmake-3.31.7-macos-universal.tar.gz -o cmake.tar.gz
|
||||
tar -xzf cmake.tar.gz
|
||||
|
||||
# Move the entire CMake.app to /Applications
|
||||
sudo mv cmake-3.31.7-macos-universal/CMake.app /Applications/
|
||||
|
||||
echo "/Applications/CMake.app/Contents/bin" >> $GITHUB_PATH
|
||||
/Applications/CMake.app/Contents/bin/cmake --version
|
||||
|
||||
- name: Install ccache
|
||||
run: brew install ccache
|
||||
|
||||
- name: Configure ccache
|
||||
uses: ./.github/actions/xahau-configure-ccache
|
||||
with:
|
||||
max_size: 2G
|
||||
hash_dir: true
|
||||
compiler_check: content
|
||||
is_main_branch: ${{ github.ref_name == env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
echo "PATH:"
|
||||
echo "${PATH}" | tr ':' '\n'
|
||||
which python && python --version || echo "Python not found"
|
||||
which conan && conan --version || echo "Conan not found"
|
||||
which cmake && cmake --version || echo "CMake not found"
|
||||
clang --version
|
||||
ccache --version
|
||||
echo "---- Full Environment ----"
|
||||
env
|
||||
|
||||
- name: Configure Conan
|
||||
run: |
|
||||
# Create the default profile directory if it doesn't exist
|
||||
mkdir -p ~/.conan2/profiles
|
||||
|
||||
# Detect compiler version
|
||||
COMPILER_VERSION=$(clang --version | grep -oE 'version [0-9]+' | grep -oE '[0-9]+')
|
||||
|
||||
# Create profile with our specific settings
|
||||
cat > ~/.conan2/profiles/default <<EOF
|
||||
[settings]
|
||||
arch=armv8
|
||||
build_type=Release
|
||||
compiler=apple-clang
|
||||
compiler.cppstd=20
|
||||
compiler.libcxx=libc++
|
||||
compiler.version=${COMPILER_VERSION}
|
||||
os=Macos
|
||||
|
||||
[conf]
|
||||
# Workaround for gRPC with newer Apple Clang
|
||||
tools.build:cxxflags=["-Wno-missing-template-arg-list-after-template-kw"]
|
||||
EOF
|
||||
|
||||
# Display profile for verification
|
||||
conan profile show
|
||||
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/xahau-ga-dependencies
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
build_dir: ${{ env.build_dir }}
|
||||
compiler-id: clang
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Build
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
with:
|
||||
generator: ${{ matrix.generator }}
|
||||
configuration: ${{ matrix.configuration }}
|
||||
build_dir: ${{ env.build_dir }}
|
||||
compiler-id: clang
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
${{ env.build_dir }}/rippled --unittest --unittest-jobs $(nproc)
|
||||
332
.github/workflows/xahau-ga-nix.yml.disabled
vendored
Normal file
332
.github/workflows/xahau-ga-nix.yml.disabled
vendored
Normal file
@@ -0,0 +1,332 @@
|
||||
name: Nix - GA Runner
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
pull_request:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
matrix-setup:
|
||||
runs-on: ubuntu-latest
|
||||
container: python:3-slim
|
||||
outputs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
steps:
|
||||
- name: Generate build matrix
|
||||
id: set-matrix
|
||||
shell: python
|
||||
run: |
|
||||
import json
|
||||
import os
|
||||
|
||||
# Full matrix with all 6 compiler configurations
|
||||
# Each configuration includes all parameters needed by the build job
|
||||
full_matrix = [
|
||||
{
|
||||
"compiler_id": "gcc-11-libstdcxx",
|
||||
"compiler": "gcc",
|
||||
"cc": "gcc-11",
|
||||
"cxx": "g++-11",
|
||||
"compiler_version": 11,
|
||||
"stdlib": "libstdcxx",
|
||||
"configuration": "Debug"
|
||||
},
|
||||
{
|
||||
"compiler_id": "gcc-13-libstdcxx",
|
||||
"compiler": "gcc",
|
||||
"cc": "gcc-13",
|
||||
"cxx": "g++-13",
|
||||
"compiler_version": 13,
|
||||
"stdlib": "libstdcxx",
|
||||
"configuration": "Debug"
|
||||
},
|
||||
{
|
||||
"compiler_id": "clang-14-libstdcxx-gcc11",
|
||||
"compiler": "clang",
|
||||
"cc": "clang-14",
|
||||
"cxx": "clang++-14",
|
||||
"compiler_version": 14,
|
||||
"stdlib": "libstdcxx",
|
||||
"clang_gcc_toolchain": 11,
|
||||
"configuration": "Debug"
|
||||
},
|
||||
{
|
||||
"compiler_id": "clang-16-libstdcxx-gcc13",
|
||||
"compiler": "clang",
|
||||
"cc": "clang-16",
|
||||
"cxx": "clang++-16",
|
||||
"compiler_version": 16,
|
||||
"stdlib": "libstdcxx",
|
||||
"clang_gcc_toolchain": 13,
|
||||
"configuration": "Debug"
|
||||
},
|
||||
{
|
||||
"compiler_id": "clang-17-libcxx",
|
||||
"compiler": "clang",
|
||||
"cc": "clang-17",
|
||||
"cxx": "clang++-17",
|
||||
"compiler_version": 17,
|
||||
"stdlib": "libcxx",
|
||||
"configuration": "Debug"
|
||||
},
|
||||
{
|
||||
# Clang 18 - testing if it's faster than Clang 17 with libc++
|
||||
# Requires patching Conan v1 settings.yml to add version 18
|
||||
"compiler_id": "clang-18-libcxx",
|
||||
"compiler": "clang",
|
||||
"cc": "clang-18",
|
||||
"cxx": "clang++-18",
|
||||
"compiler_version": 18,
|
||||
"stdlib": "libcxx",
|
||||
"configuration": "Debug"
|
||||
}
|
||||
]
|
||||
|
||||
# Minimal matrix for PRs and feature branches
|
||||
minimal_matrix = [
|
||||
full_matrix[1], # gcc-13 (middle-ground gcc)
|
||||
full_matrix[2] # clang-14 (mature, stable clang)
|
||||
]
|
||||
|
||||
# Determine which matrix to use based on the target branch
|
||||
ref = "${{ github.ref }}"
|
||||
base_ref = "${{ github.base_ref }}" # For PRs, this is the target branch
|
||||
event_name = "${{ github.event_name }}"
|
||||
commit_message = """${{ github.event.head_commit.message }}"""
|
||||
pr_title = """${{ github.event.pull_request.title }}"""
|
||||
|
||||
# Debug logging
|
||||
print(f"Event: {event_name}")
|
||||
print(f"Ref: {ref}")
|
||||
print(f"Base ref: {base_ref}")
|
||||
print(f"PR title: {pr_title}")
|
||||
print(f"Commit message: {commit_message}")
|
||||
|
||||
# Check for override tags in commit message or PR title
|
||||
force_full = "[ci-nix-full-matrix]" in commit_message or "[ci-nix-full-matrix]" in pr_title
|
||||
print(f"Force full matrix: {force_full}")
|
||||
|
||||
# Check if this is targeting a main branch
|
||||
# For PRs: check base_ref (target branch)
|
||||
# For pushes: check ref (current branch)
|
||||
main_branches = ["refs/heads/dev", "refs/heads/release", "refs/heads/candidate"]
|
||||
|
||||
if force_full:
|
||||
# Override: always use full matrix if tag is present
|
||||
use_full = True
|
||||
elif event_name == "pull_request":
|
||||
# For PRs, base_ref is just the branch name (e.g., "dev", not "refs/heads/dev")
|
||||
# Check if the PR targets release or candidate (more critical branches)
|
||||
use_full = base_ref in ["release", "candidate"]
|
||||
else:
|
||||
# For pushes, ref is the full reference (e.g., "refs/heads/dev")
|
||||
use_full = ref in main_branches
|
||||
|
||||
# Select the appropriate matrix
|
||||
if use_full:
|
||||
if force_full:
|
||||
print(f"Using FULL matrix (6 configs) - forced by [ci-nix-full-matrix] tag")
|
||||
else:
|
||||
print(f"Using FULL matrix (6 configs) - targeting main branch")
|
||||
matrix = full_matrix
|
||||
else:
|
||||
print(f"Using MINIMAL matrix (2 configs) - feature branch/PR")
|
||||
matrix = minimal_matrix
|
||||
|
||||
# Output the matrix as JSON
|
||||
output = json.dumps({"include": matrix})
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write(f"matrix={output}\n")
|
||||
|
||||
build:
|
||||
needs: matrix-setup
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
artifact_name: ${{ steps.set-artifact-name.outputs.artifact_name }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJSON(needs.matrix-setup.outputs.matrix) }}
|
||||
env:
|
||||
build_dir: .build
|
||||
# Bump this number to invalidate all caches globally.
|
||||
CACHE_VERSION: 2
|
||||
MAIN_BRANCH_NAME: dev
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y ninja-build ${{ matrix.cc }} ${{ matrix.cxx }} ccache
|
||||
|
||||
# Install the specific GCC version needed for Clang
|
||||
if [ -n "${{ matrix.clang_gcc_toolchain }}" ]; then
|
||||
echo "=== Installing GCC ${{ matrix.clang_gcc_toolchain }} for Clang ==="
|
||||
sudo apt-get install -y gcc-${{ matrix.clang_gcc_toolchain }} g++-${{ matrix.clang_gcc_toolchain }} libstdc++-${{ matrix.clang_gcc_toolchain }}-dev
|
||||
|
||||
echo "=== GCC versions available after installation ==="
|
||||
ls -la /usr/lib/gcc/x86_64-linux-gnu/ | grep -E "^d"
|
||||
fi
|
||||
|
||||
# For Clang < 16 with --gcc-toolchain, hide newer GCC versions
|
||||
# This is needed because --gcc-toolchain still picks the highest version
|
||||
#
|
||||
# THE GREAT GCC HIDING TRICK (for Clang < 16):
|
||||
# Clang versions before 16 don't have --gcc-install-dir, only --gcc-toolchain
|
||||
# which is deprecated and still uses discovery heuristics that ALWAYS pick
|
||||
# the highest version number. So we play a sneaky game...
|
||||
#
|
||||
# We rename newer GCC versions to very low integers (1, 2, 3...) which makes
|
||||
# Clang think they're ancient GCC versions. Since 11 > 3 > 2 > 1, Clang will
|
||||
# pick GCC 11 over our renamed versions. It's dumb but it works!
|
||||
#
|
||||
# Example: GCC 12→1, GCC 13→2, GCC 14→3, so Clang picks 11 (highest number)
|
||||
if [ -n "${{ matrix.clang_gcc_toolchain }}" ] && [ "${{ matrix.compiler_version }}" -lt "16" ]; then
|
||||
echo "=== Hiding GCC versions newer than ${{ matrix.clang_gcc_toolchain }} for Clang < 16 ==="
|
||||
target_version=${{ matrix.clang_gcc_toolchain }}
|
||||
counter=1 # Start with 1 - these will be seen as "GCC version 1, 2, 3" etc
|
||||
for dir in /usr/lib/gcc/x86_64-linux-gnu/*/; do
|
||||
if [ -d "$dir" ]; then
|
||||
version=$(basename "$dir")
|
||||
# Check if version is numeric and greater than target
|
||||
if [[ "$version" =~ ^[0-9]+$ ]] && [ "$version" -gt "$target_version" ]; then
|
||||
echo "Hiding GCC $version -> renaming to $counter (will be seen as GCC version $counter)"
|
||||
# Safety check: ensure target doesn't already exist
|
||||
if [ ! -e "/usr/lib/gcc/x86_64-linux-gnu/$counter" ]; then
|
||||
sudo mv "$dir" "/usr/lib/gcc/x86_64-linux-gnu/$counter"
|
||||
else
|
||||
echo "ERROR: Cannot rename GCC $version - /usr/lib/gcc/x86_64-linux-gnu/$counter already exists"
|
||||
exit 1
|
||||
fi
|
||||
counter=$((counter + 1))
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Verify what Clang will use
|
||||
if [ -n "${{ matrix.clang_gcc_toolchain }}" ]; then
|
||||
echo "=== Verifying GCC toolchain selection ==="
|
||||
echo "Available GCC versions:"
|
||||
ls -la /usr/lib/gcc/x86_64-linux-gnu/ | grep -E "^d.*[0-9]+$" || true
|
||||
|
||||
echo ""
|
||||
echo "Clang's detected GCC installation:"
|
||||
${{ matrix.cxx }} -v -E -x c++ /dev/null -o /dev/null 2>&1 | grep "Found candidate GCC installation" || true
|
||||
fi
|
||||
|
||||
# Install libc++ dev packages if using libc++ (not needed for libstdc++)
|
||||
if [ "${{ matrix.stdlib }}" = "libcxx" ]; then
|
||||
sudo apt-get install -y libc++-${{ matrix.compiler_version }}-dev libc++abi-${{ matrix.compiler_version }}-dev
|
||||
fi
|
||||
|
||||
# Install Conan 2
|
||||
pip install --upgrade "conan>=2.0,<3"
|
||||
|
||||
- name: Configure ccache
|
||||
uses: ./.github/actions/xahau-configure-ccache
|
||||
with:
|
||||
max_size: 2G
|
||||
hash_dir: true
|
||||
compiler_check: content
|
||||
is_main_branch: ${{ github.ref_name == env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Configure Conan
|
||||
run: |
|
||||
# Create the default profile directory if it doesn't exist
|
||||
mkdir -p ~/.conan2/profiles
|
||||
|
||||
# Determine the correct libcxx based on stdlib parameter
|
||||
if [ "${{ matrix.stdlib }}" = "libcxx" ]; then
|
||||
LIBCXX="libc++"
|
||||
else
|
||||
LIBCXX="libstdc++11"
|
||||
fi
|
||||
|
||||
# Create profile with our specific settings
|
||||
cat > ~/.conan2/profiles/default <<EOF
|
||||
[settings]
|
||||
arch=x86_64
|
||||
build_type=${{ matrix.configuration }}
|
||||
compiler=${{ matrix.compiler }}
|
||||
compiler.cppstd=20
|
||||
compiler.libcxx=${LIBCXX}
|
||||
compiler.version=${{ matrix.compiler_version }}
|
||||
os=Linux
|
||||
|
||||
[buildenv]
|
||||
CC=/usr/bin/${{ matrix.cc }}
|
||||
CXX=/usr/bin/${{ matrix.cxx }}
|
||||
|
||||
[conf]
|
||||
tools.build:compiler_executables={"c": "/usr/bin/${{ matrix.cc }}", "cpp": "/usr/bin/${{ matrix.cxx }}"}
|
||||
EOF
|
||||
|
||||
# Display profile for verification
|
||||
conan profile show
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
echo "PATH:"
|
||||
echo "${PATH}" | tr ':' '\n'
|
||||
which conan && conan --version || echo "Conan not found"
|
||||
which cmake && cmake --version || echo "CMake not found"
|
||||
which ${{ matrix.cc }} && ${{ matrix.cc }} --version || echo "${{ matrix.cc }} not found"
|
||||
which ${{ matrix.cxx }} && ${{ matrix.cxx }} --version || echo "${{ matrix.cxx }} not found"
|
||||
which ccache && ccache --version || echo "ccache not found"
|
||||
echo "---- Full Environment ----"
|
||||
env
|
||||
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/xahau-ga-dependencies
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
build_dir: ${{ env.build_dir }}
|
||||
compiler-id: ${{ matrix.compiler_id }}
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
stdlib: ${{ matrix.stdlib }}
|
||||
|
||||
- name: Build
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
with:
|
||||
generator: Ninja
|
||||
configuration: ${{ matrix.configuration }}
|
||||
build_dir: ${{ env.build_dir }}
|
||||
cc: ${{ matrix.cc }}
|
||||
cxx: ${{ matrix.cxx }}
|
||||
compiler-id: ${{ matrix.compiler_id }}
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
stdlib: ${{ matrix.stdlib }}
|
||||
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
|
||||
|
||||
- name: Set artifact name
|
||||
id: set-artifact-name
|
||||
run: |
|
||||
ARTIFACT_NAME="build-output-nix-${{ github.run_id }}-${{ matrix.compiler }}-${{ matrix.configuration }}"
|
||||
echo "artifact_name=${ARTIFACT_NAME}" >> "$GITHUB_OUTPUT"
|
||||
echo "Using artifact name: ${ARTIFACT_NAME}"
|
||||
|
||||
- name: Debug build directory
|
||||
run: |
|
||||
echo "Checking build directory contents: ${{ env.build_dir }}"
|
||||
ls -la ${{ env.build_dir }} || echo "Build directory not found or empty"
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
# Ensure the binary exists before trying to run
|
||||
if [ -f "${{ env.build_dir }}/rippled" ]; then
|
||||
${{ env.build_dir }}/rippled --unittest --unittest-jobs $(nproc)
|
||||
else
|
||||
echo "Error: rippled executable not found in ${{ env.build_dir }}"
|
||||
exit 1
|
||||
fi
|
||||
@@ -72,15 +72,15 @@ It generates many files of [results](results):
|
||||
desired as described above. In a perfect repo, this file will be
|
||||
empty.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../.github/workflows/levelization.yml) to validate
|
||||
Github workflow](../../.github/workflows/levelization.yml.disabled) to validate
|
||||
that nothing changed.
|
||||
* [`ordering.txt`](results/ordering.txt): A list showing relationships
|
||||
between modules where there are no loops as they actually exist, as
|
||||
opposed to how they are desired as described above.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../.github/workflows/levelization.yml) to validate
|
||||
Github workflow](../../.github/workflows/levelization.yml.disabled) to validate
|
||||
that nothing changed.
|
||||
* [`levelization.yml`](../../.github/workflows/levelization.yml)
|
||||
* [`levelization.yml`](../../.github/workflows/levelization.yml.disabled)
|
||||
Github Actions workflow to test that levelization loops haven't
|
||||
changed. Unfortunately, if changes are detected, it can't tell if
|
||||
they are improvements or not, so if you have resolved any issues or
|
||||
|
||||
Reference in New Issue
Block a user