mirror of
https://github.com/Xahau/xahaud.git
synced 2025-11-08 04:35:49 +00:00
Compare commits
4 Commits
hook-api-u
...
actions-ca
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8b39d0915f | ||
|
|
9ed20a4f1c | ||
|
|
89ffc1969b | ||
|
|
79fdafe638 |
@@ -1,63 +0,0 @@
|
||||
name: 'Configure ccache'
|
||||
description: 'Sets up ccache with consistent configuration'
|
||||
|
||||
inputs:
|
||||
max_size:
|
||||
description: 'Maximum cache size'
|
||||
required: false
|
||||
default: '2G'
|
||||
hash_dir:
|
||||
description: 'Whether to include directory paths in hash'
|
||||
required: false
|
||||
default: 'true'
|
||||
compiler_check:
|
||||
description: 'How to check compiler for changes'
|
||||
required: false
|
||||
default: 'content'
|
||||
is_main_branch:
|
||||
description: 'Whether the current branch is the main branch'
|
||||
required: false
|
||||
default: 'false'
|
||||
main_cache_dir:
|
||||
description: 'Path to the main branch cache directory'
|
||||
required: false
|
||||
default: '~/.ccache-main'
|
||||
current_cache_dir:
|
||||
description: 'Path to the current branch cache directory'
|
||||
required: false
|
||||
default: '~/.ccache-current'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Configure ccache
|
||||
shell: bash
|
||||
run: |
|
||||
# Create cache directories
|
||||
mkdir -p ${{ inputs.main_cache_dir }} ${{ inputs.current_cache_dir }}
|
||||
|
||||
# Set compiler check globally
|
||||
ccache -o compiler_check=${{ inputs.compiler_check }}
|
||||
|
||||
# Use a single config file location
|
||||
mkdir -p ~/.ccache
|
||||
export CONF_PATH="$HOME/.ccache/ccache.conf"
|
||||
|
||||
# Apply common settings
|
||||
echo "max_size = ${{ inputs.max_size }}" > "$CONF_PATH"
|
||||
echo "hash_dir = ${{ inputs.hash_dir }}" >> "$CONF_PATH"
|
||||
echo "compiler_check = ${{ inputs.compiler_check }}" >> "$CONF_PATH"
|
||||
|
||||
if [ "${{ inputs.is_main_branch }}" == "true" ]; then
|
||||
# Main branch: use main branch cache
|
||||
ccache --set-config=cache_dir="${{ inputs.main_cache_dir }}"
|
||||
echo "CCACHE_DIR=${{ inputs.main_cache_dir }}" >> $GITHUB_ENV
|
||||
else
|
||||
# Feature branch: use current branch cache with main as secondary
|
||||
ccache --set-config=cache_dir="${{ inputs.current_cache_dir }}"
|
||||
ccache --set-config=secondary_storage="file:${{ inputs.main_cache_dir }}"
|
||||
echo "CCACHE_DIR=${{ inputs.current_cache_dir }}" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
ccache -p # Print config for verification
|
||||
ccache -z # Zero statistics before the build
|
||||
129
.github/actions/xahau-ga-build/action.yml
vendored
129
.github/actions/xahau-ga-build/action.yml
vendored
@@ -47,6 +47,24 @@ inputs:
|
||||
description: 'GCC version to use for Clang toolchain (e.g. 11, 13)'
|
||||
required: false
|
||||
default: ''
|
||||
ccache_max_size:
|
||||
description: 'Maximum ccache size'
|
||||
required: false
|
||||
default: '2G'
|
||||
ccache_hash_dir:
|
||||
description: 'Whether to include directory paths in hash'
|
||||
required: false
|
||||
default: 'true'
|
||||
ccache_compiler_check:
|
||||
description: 'How to check compiler for changes'
|
||||
required: false
|
||||
default: 'content'
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 cache storage'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 cache storage'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
@@ -59,28 +77,44 @@ runs:
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Restore ccache directory for default branch
|
||||
- name: Restore ccache directory
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
id: ccache-restore
|
||||
uses: actions/cache/restore@v4
|
||||
uses: ./.github/actions/xahau-ga-cache-restore
|
||||
with:
|
||||
path: ~/.ccache-main
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
|
||||
- name: Restore ccache directory for current branch
|
||||
if: inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
|
||||
id: ccache-restore-current-branch
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ~/.ccache-current
|
||||
path: ~/.ccache
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
|
||||
- name: Configure ccache
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
# Use ccache's default cache_dir (~/.ccache) - don't override it
|
||||
# This avoids tilde expansion issues when setting it explicitly
|
||||
|
||||
# Create cache directory using ccache's default
|
||||
mkdir -p ~/.ccache
|
||||
|
||||
# Configure ccache settings (but NOT cache_dir - use default)
|
||||
# This overwrites any cached config to ensure fresh configuration
|
||||
ccache --set-config=max_size=${{ inputs.ccache_max_size }}
|
||||
ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }}
|
||||
ccache --set-config=compiler_check=${{ inputs.ccache_compiler_check }}
|
||||
|
||||
# Note: Not setting CCACHE_DIR - let ccache use its default (~/.ccache)
|
||||
|
||||
# Print config for verification
|
||||
echo "=== ccache configuration ==="
|
||||
ccache -p
|
||||
|
||||
# Zero statistics before the build
|
||||
ccache -z
|
||||
|
||||
- name: Configure project
|
||||
shell: bash
|
||||
@@ -96,14 +130,27 @@ runs:
|
||||
if [ -n "${{ inputs.cxx }}" ]; then
|
||||
export CXX="${{ inputs.cxx }}"
|
||||
fi
|
||||
|
||||
|
||||
# Configure ccache launcher args
|
||||
CCACHE_ARGS=""
|
||||
|
||||
# Create wrapper toolchain that overlays ccache on top of Conan's toolchain
|
||||
# This enables ccache for the main app build without affecting Conan dependency builds
|
||||
if [ "${{ inputs.ccache_enabled }}" = "true" ]; then
|
||||
CCACHE_ARGS="-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||
cat > wrapper_toolchain.cmake <<'EOF'
|
||||
# Include Conan's generated toolchain first (sets compiler, flags, etc.)
|
||||
# Note: CMAKE_CURRENT_LIST_DIR is the directory containing this wrapper (.build/)
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/build/generators/conan_toolchain.cmake)
|
||||
|
||||
# Overlay ccache configuration for main application build
|
||||
# This does NOT affect Conan dependency builds (already completed)
|
||||
set(CMAKE_C_COMPILER_LAUNCHER ccache CACHE STRING "C compiler launcher" FORCE)
|
||||
set(CMAKE_CXX_COMPILER_LAUNCHER ccache CACHE STRING "C++ compiler launcher" FORCE)
|
||||
EOF
|
||||
TOOLCHAIN_FILE="wrapper_toolchain.cmake"
|
||||
echo "✅ Created wrapper toolchain with ccache enabled"
|
||||
else
|
||||
TOOLCHAIN_FILE="build/generators/conan_toolchain.cmake"
|
||||
echo "ℹ️ Using Conan toolchain directly (ccache disabled)"
|
||||
fi
|
||||
|
||||
|
||||
# Configure C++ standard library if specified
|
||||
# libstdcxx used for clang-14/16 to work around missing lexicographical_compare_three_way in libc++
|
||||
# libcxx can be used with clang-17+ which has full C++20 support
|
||||
@@ -143,32 +190,44 @@ runs:
|
||||
# So we get: .build/build/generators/ with our non-standard folder name
|
||||
cmake .. \
|
||||
-G "${{ inputs.generator }}" \
|
||||
$CCACHE_ARGS \
|
||||
${CMAKE_CXX_FLAGS:+-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS"} \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=${TOOLCHAIN_FILE} \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }}
|
||||
|
||||
- name: Show ccache config before build
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "ccache configuration before build"
|
||||
echo "=========================================="
|
||||
ccache -p
|
||||
echo ""
|
||||
|
||||
- name: Build project
|
||||
shell: bash
|
||||
run: |
|
||||
cd ${{ inputs.build_dir }}
|
||||
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc)
|
||||
|
||||
# Check for verbose build flag in commit message
|
||||
VERBOSE_FLAG=""
|
||||
if echo "${XAHAU_GA_COMMIT_MSG}" | grep -q '\[ci-ga-cmake-verbose\]'; then
|
||||
echo "🔊 [ci-ga-cmake-verbose] detected - enabling verbose output"
|
||||
VERBOSE_FLAG="-- -v"
|
||||
fi
|
||||
|
||||
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc) ${VERBOSE_FLAG}
|
||||
|
||||
- name: Show ccache statistics
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
shell: bash
|
||||
run: ccache -s
|
||||
|
||||
- name: Save ccache directory for default branch
|
||||
if: always() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name == inputs.main_branch
|
||||
uses: actions/cache/save@v4
|
||||
- name: Save ccache directory
|
||||
if: success() && inputs.ccache_enabled == 'true'
|
||||
uses: ./.github/actions/xahau-ga-cache-save
|
||||
with:
|
||||
path: ~/.ccache-main
|
||||
key: ${{ steps.ccache-restore.outputs.cache-primary-key }}
|
||||
|
||||
- name: Save ccache directory for current branch
|
||||
if: always() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ~/.ccache-current
|
||||
key: ${{ steps.ccache-restore-current-branch.outputs.cache-primary-key }}
|
||||
path: ~/.ccache
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
|
||||
291
.github/actions/xahau-ga-cache-restore/action.yml
vendored
Normal file
291
.github/actions/xahau-ga-cache-restore/action.yml
vendored
Normal file
@@ -0,0 +1,291 @@
|
||||
name: 'Xahau Cache Restore (S3)'
|
||||
bump: 1
|
||||
description: 'Drop-in replacement for actions/cache/restore using S3 storage'
|
||||
|
||||
inputs:
|
||||
path:
|
||||
description: 'A list of files, directories, and wildcard patterns to cache (currently only single path supported)'
|
||||
required: true
|
||||
key:
|
||||
description: 'An explicit key for restoring the cache'
|
||||
required: true
|
||||
restore-keys:
|
||||
description: 'An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key'
|
||||
required: false
|
||||
default: ''
|
||||
s3-bucket:
|
||||
description: 'S3 bucket name for cache storage'
|
||||
required: false
|
||||
default: 'xahaud-github-actions-cache-niq'
|
||||
s3-region:
|
||||
description: 'S3 region'
|
||||
required: false
|
||||
default: 'us-east-1'
|
||||
fail-on-cache-miss:
|
||||
description: 'Fail the workflow if cache entry is not found'
|
||||
required: false
|
||||
default: 'false'
|
||||
lookup-only:
|
||||
description: 'Check if a cache entry exists for the given input(s) without downloading it'
|
||||
required: false
|
||||
default: 'false'
|
||||
# Note: Composite actions can't access secrets.* directly - must be passed from workflow
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 access'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 access'
|
||||
required: true
|
||||
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||
value: ${{ steps.restore-cache.outputs.cache-hit }}
|
||||
cache-primary-key:
|
||||
description: 'The key that was used to restore the cache (may be from restore-keys)'
|
||||
value: ${{ steps.restore-cache.outputs.cache-primary-key }}
|
||||
cache-matched-key:
|
||||
description: 'The key that was used to restore the cache (exact or prefix match)'
|
||||
value: ${{ steps.restore-cache.outputs.cache-matched-key }}
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Restore cache from S3
|
||||
id: restore-cache
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ inputs.aws-access-key-id }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ inputs.aws-secret-access-key }}
|
||||
S3_BUCKET: ${{ inputs.s3-bucket }}
|
||||
S3_REGION: ${{ inputs.s3-region }}
|
||||
CACHE_KEY: ${{ inputs.key }}
|
||||
RESTORE_KEYS: ${{ inputs.restore-keys }}
|
||||
TARGET_PATH: ${{ inputs.path }}
|
||||
FAIL_ON_MISS: ${{ inputs.fail-on-cache-miss }}
|
||||
LOOKUP_ONLY: ${{ inputs.lookup-only }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "=========================================="
|
||||
echo "Xahau Cache Restore (S3)"
|
||||
echo "=========================================="
|
||||
echo "Target path: ${TARGET_PATH}"
|
||||
echo "Cache key: ${CACHE_KEY}"
|
||||
echo "S3 bucket: s3://${S3_BUCKET}"
|
||||
echo ""
|
||||
|
||||
# Normalize target path (expand tilde)
|
||||
if [[ "${TARGET_PATH}" == ~* ]]; then
|
||||
TARGET_PATH="${HOME}${TARGET_PATH:1}"
|
||||
fi
|
||||
|
||||
# Canonicalize path (Linux only - macOS realpath doesn't support -m)
|
||||
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
|
||||
TARGET_PATH=$(realpath -m "${TARGET_PATH}")
|
||||
fi
|
||||
|
||||
echo "Normalized target path: ${TARGET_PATH}"
|
||||
echo ""
|
||||
|
||||
# Debug: Show commit message
|
||||
echo "=========================================="
|
||||
echo "DEBUG: Cache clear tag detection"
|
||||
echo "=========================================="
|
||||
echo "Raw commit message:"
|
||||
echo "${XAHAU_GA_COMMIT_MSG}"
|
||||
echo ""
|
||||
echo "Searching for: [ci-ga-clear-cache] or [ci-ga-clear-cache:*]"
|
||||
echo ""
|
||||
|
||||
# Check for [ci-ga-clear-cache] tag in commit message (with optional search terms)
|
||||
# Examples:
|
||||
# [ci-ga-clear-cache] - Clear this job's cache
|
||||
# [ci-ga-clear-cache:ccache] - Clear only if key contains "ccache"
|
||||
# [ci-ga-clear-cache:gcc Debug] - Clear only if key contains both "gcc" AND "Debug"
|
||||
|
||||
# Extract search terms if present (e.g., "ccache" from "[ci-ga-clear-cache:ccache]")
|
||||
SEARCH_TERMS=$(echo "${XAHAU_GA_COMMIT_MSG}" | grep -o '\[ci-ga-clear-cache:[^]]*\]' | sed 's/\[ci-ga-clear-cache://;s/\]//' || echo "")
|
||||
|
||||
SHOULD_CLEAR=false
|
||||
|
||||
if [ -n "${SEARCH_TERMS}" ]; then
|
||||
# Search terms provided - check if THIS cache key matches ALL terms (AND logic)
|
||||
echo "🔍 [ci-ga-clear-cache:${SEARCH_TERMS}] detected"
|
||||
echo "Checking if cache key matches search terms..."
|
||||
echo " Cache key: ${CACHE_KEY}"
|
||||
echo " Search terms: ${SEARCH_TERMS}"
|
||||
echo ""
|
||||
|
||||
MATCHES=true
|
||||
for term in ${SEARCH_TERMS}; do
|
||||
if ! echo "${CACHE_KEY}" | grep -q "${term}"; then
|
||||
MATCHES=false
|
||||
echo " ✗ Key does not contain '${term}'"
|
||||
break
|
||||
else
|
||||
echo " ✓ Key contains '${term}'"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "${MATCHES}" = "true" ]; then
|
||||
echo ""
|
||||
echo "✅ Cache key matches all search terms - will clear cache"
|
||||
SHOULD_CLEAR=true
|
||||
else
|
||||
echo ""
|
||||
echo "⏭️ Cache key doesn't match search terms - skipping cache clear"
|
||||
fi
|
||||
elif echo "${XAHAU_GA_COMMIT_MSG}" | grep -q '\[ci-ga-clear-cache\]'; then
|
||||
# No search terms - always clear this job's cache
|
||||
echo "🗑️ [ci-ga-clear-cache] detected in commit message"
|
||||
echo "Clearing cache for key: ${CACHE_KEY}"
|
||||
SHOULD_CLEAR=true
|
||||
fi
|
||||
|
||||
if [ "${SHOULD_CLEAR}" = "true" ]; then
|
||||
echo ""
|
||||
|
||||
# Delete base layer
|
||||
S3_BASE_KEY="s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst"
|
||||
if aws s3 ls "${S3_BASE_KEY}" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
echo "Deleting base layer: ${S3_BASE_KEY}"
|
||||
aws s3 rm "${S3_BASE_KEY}" --region "${S3_REGION}" 2>/dev/null || true
|
||||
echo "✓ Base layer deleted"
|
||||
else
|
||||
echo "ℹ️ No base layer found to delete"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "✅ Cache cleared successfully"
|
||||
echo "Build will proceed from scratch (bootstrap mode)"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Function to try restoring a cache key
|
||||
try_restore_key() {
|
||||
local key=$1
|
||||
local s3_key="s3://${S3_BUCKET}/${key}-base.tar.zst"
|
||||
|
||||
echo "Checking for key: ${key}"
|
||||
|
||||
if aws s3 ls "${s3_key}" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
echo "✓ Found cache: ${s3_key}"
|
||||
return 0
|
||||
else
|
||||
echo "✗ Not found: ${key}"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Try exact match first
|
||||
MATCHED_KEY=""
|
||||
EXACT_MATCH="false"
|
||||
|
||||
if try_restore_key "${CACHE_KEY}"; then
|
||||
MATCHED_KEY="${CACHE_KEY}"
|
||||
EXACT_MATCH="true"
|
||||
echo ""
|
||||
echo "🎯 Exact cache hit for key: ${CACHE_KEY}"
|
||||
else
|
||||
# Try restore-keys (prefix matching)
|
||||
if [ -n "${RESTORE_KEYS}" ]; then
|
||||
echo ""
|
||||
echo "Primary key not found, trying restore-keys..."
|
||||
|
||||
while IFS= read -r restore_key; do
|
||||
[ -z "${restore_key}" ] && continue
|
||||
restore_key=$(echo "${restore_key}" | xargs)
|
||||
|
||||
if try_restore_key "${restore_key}"; then
|
||||
MATCHED_KEY="${restore_key}"
|
||||
EXACT_MATCH="false"
|
||||
echo ""
|
||||
echo "✓ Cache restored from fallback key: ${restore_key}"
|
||||
break
|
||||
fi
|
||||
done <<< "${RESTORE_KEYS}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check if we found anything
|
||||
if [ -z "${MATCHED_KEY}" ]; then
|
||||
echo ""
|
||||
echo "❌ No cache found for key: ${CACHE_KEY}"
|
||||
|
||||
if [ "${FAIL_ON_MISS}" = "true" ]; then
|
||||
echo "fail-on-cache-miss is enabled, failing workflow"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Set outputs for cache miss
|
||||
echo "cache-hit=false" >> $GITHUB_OUTPUT
|
||||
echo "cache-primary-key=" >> $GITHUB_OUTPUT
|
||||
echo "cache-matched-key=" >> $GITHUB_OUTPUT
|
||||
|
||||
# Create empty cache directory
|
||||
mkdir -p "${TARGET_PATH}"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache restore completed (bootstrap mode)"
|
||||
echo "Created empty cache directory: ${TARGET_PATH}"
|
||||
echo "=========================================="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# If lookup-only, we're done
|
||||
if [ "${LOOKUP_ONLY}" = "true" ]; then
|
||||
echo "cache-hit=${EXACT_MATCH}" >> $GITHUB_OUTPUT
|
||||
echo "cache-primary-key=${CACHE_KEY}" >> $GITHUB_OUTPUT
|
||||
echo "cache-matched-key=${MATCHED_KEY}" >> $GITHUB_OUTPUT
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache lookup completed (lookup-only mode)"
|
||||
echo "Cache exists: ${MATCHED_KEY}"
|
||||
echo "=========================================="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Download and extract cache
|
||||
S3_KEY="s3://${S3_BUCKET}/${MATCHED_KEY}-base.tar.zst"
|
||||
TEMP_TARBALL="/tmp/xahau-cache-restore-$$.tar.zst"
|
||||
|
||||
echo ""
|
||||
echo "Downloading cache..."
|
||||
aws s3 cp "${S3_KEY}" "${TEMP_TARBALL}" --region "${S3_REGION}" --no-progress
|
||||
|
||||
TARBALL_SIZE=$(du -h "${TEMP_TARBALL}" | cut -f1)
|
||||
echo "✓ Downloaded: ${TARBALL_SIZE}"
|
||||
|
||||
# Create parent directory if needed
|
||||
mkdir -p "$(dirname "${TARGET_PATH}")"
|
||||
|
||||
# Remove existing target if it exists
|
||||
if [ -e "${TARGET_PATH}" ]; then
|
||||
echo "Removing existing target: ${TARGET_PATH}"
|
||||
rm -rf "${TARGET_PATH}"
|
||||
fi
|
||||
|
||||
# Create target directory and extract
|
||||
mkdir -p "${TARGET_PATH}"
|
||||
echo ""
|
||||
echo "Extracting cache..."
|
||||
zstd -d -c "${TEMP_TARBALL}" | tar -xf - -C "${TARGET_PATH}"
|
||||
echo "✓ Cache extracted to: ${TARGET_PATH}"
|
||||
|
||||
# Cleanup
|
||||
rm -f "${TEMP_TARBALL}"
|
||||
|
||||
# Set outputs
|
||||
echo "cache-hit=${EXACT_MATCH}" >> $GITHUB_OUTPUT
|
||||
echo "cache-primary-key=${CACHE_KEY}" >> $GITHUB_OUTPUT
|
||||
echo "cache-matched-key=${MATCHED_KEY}" >> $GITHUB_OUTPUT
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache restore completed successfully"
|
||||
echo "Cache hit: ${EXACT_MATCH}"
|
||||
echo "Matched key: ${MATCHED_KEY}"
|
||||
echo "=========================================="
|
||||
110
.github/actions/xahau-ga-cache-save/action.yml
vendored
Normal file
110
.github/actions/xahau-ga-cache-save/action.yml
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
name: 'Xahau Cache Save (S3)'
|
||||
description: 'Drop-in replacement for actions/cache/save using S3 storage'
|
||||
|
||||
inputs:
|
||||
path:
|
||||
description: 'A list of files, directories, and wildcard patterns to cache (currently only single path supported)'
|
||||
required: true
|
||||
key:
|
||||
description: 'An explicit key for saving the cache'
|
||||
required: true
|
||||
s3-bucket:
|
||||
description: 'S3 bucket name for cache storage'
|
||||
required: false
|
||||
default: 'xahaud-github-actions-cache-niq'
|
||||
s3-region:
|
||||
description: 'S3 region'
|
||||
required: false
|
||||
default: 'us-east-1'
|
||||
# Note: Composite actions can't access secrets.* directly - must be passed from workflow
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 access'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 access'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Save cache to S3
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ inputs.aws-access-key-id }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ inputs.aws-secret-access-key }}
|
||||
S3_BUCKET: ${{ inputs.s3-bucket }}
|
||||
S3_REGION: ${{ inputs.s3-region }}
|
||||
CACHE_KEY: ${{ inputs.key }}
|
||||
TARGET_PATH: ${{ inputs.path }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "=========================================="
|
||||
echo "Xahau Cache Save (S3)"
|
||||
echo "=========================================="
|
||||
echo "Target path: ${TARGET_PATH}"
|
||||
echo "Cache key: ${CACHE_KEY}"
|
||||
echo "S3 bucket: s3://${S3_BUCKET}"
|
||||
echo ""
|
||||
|
||||
# Normalize target path (expand tilde and resolve to absolute path)
|
||||
if [[ "${TARGET_PATH}" == ~* ]]; then
|
||||
TARGET_PATH="${HOME}${TARGET_PATH:1}"
|
||||
fi
|
||||
echo "Normalized target path: ${TARGET_PATH}"
|
||||
echo ""
|
||||
|
||||
# Check if target directory exists
|
||||
if [ ! -d "${TARGET_PATH}" ]; then
|
||||
echo "⚠️ Target directory does not exist: ${TARGET_PATH}"
|
||||
echo "Skipping cache save."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Use static base name (one base per key, immutable)
|
||||
S3_BASE_KEY="s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst"
|
||||
|
||||
# Check if base already exists (immutability - first write wins)
|
||||
if aws s3 ls "${S3_BASE_KEY}" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
echo "⚠️ Cache already exists: ${S3_BASE_KEY}"
|
||||
echo "Skipping upload (immutability - first write wins, like GitHub Actions)"
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache save completed (already exists)"
|
||||
echo "=========================================="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create tarball
|
||||
BASE_TARBALL="/tmp/xahau-cache-base-$$.tar.zst"
|
||||
|
||||
echo "Creating cache tarball..."
|
||||
tar -cf - -C "${TARGET_PATH}" . | zstd -3 -T0 -q -o "${BASE_TARBALL}"
|
||||
|
||||
BASE_SIZE=$(du -h "${BASE_TARBALL}" | cut -f1)
|
||||
echo "✓ Cache tarball created: ${BASE_SIZE}"
|
||||
echo ""
|
||||
|
||||
# Upload to S3
|
||||
echo "Uploading cache to S3..."
|
||||
echo " Key: ${CACHE_KEY}-base.tar.zst"
|
||||
|
||||
aws s3api put-object \
|
||||
--bucket "${S3_BUCKET}" \
|
||||
--key "${CACHE_KEY}-base.tar.zst" \
|
||||
--body "${BASE_TARBALL}" \
|
||||
--tagging 'type=base' \
|
||||
--region "${S3_REGION}" \
|
||||
>/dev/null 2>&1
|
||||
|
||||
echo "✓ Uploaded: ${S3_BASE_KEY}"
|
||||
|
||||
# Cleanup
|
||||
rm -f "${BASE_TARBALL}"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache save completed successfully"
|
||||
echo "Cache size: ${BASE_SIZE}"
|
||||
echo "Cache key: ${CACHE_KEY}"
|
||||
echo "=========================================="
|
||||
127
.github/actions/xahau-ga-dependencies/action.yml
vendored
127
.github/actions/xahau-ga-dependencies/action.yml
vendored
@@ -25,6 +25,28 @@ inputs:
|
||||
description: 'Main branch name for restore keys'
|
||||
required: false
|
||||
default: 'dev'
|
||||
os:
|
||||
description: 'Operating system (Linux, Macos)'
|
||||
required: false
|
||||
default: 'Linux'
|
||||
arch:
|
||||
description: 'Architecture (x86_64, armv8)'
|
||||
required: false
|
||||
default: 'x86_64'
|
||||
compiler:
|
||||
description: 'Compiler type (gcc, clang, apple-clang)'
|
||||
required: true
|
||||
compiler_version:
|
||||
description: 'Compiler version (11, 13, 14, etc.)'
|
||||
required: true
|
||||
cc:
|
||||
description: 'C compiler executable (gcc-13, clang-14, etc.), empty for macOS'
|
||||
required: false
|
||||
default: ''
|
||||
cxx:
|
||||
description: 'C++ compiler executable (g++-14, clang++-14, etc.), empty for macOS'
|
||||
required: false
|
||||
default: ''
|
||||
stdlib:
|
||||
description: 'C++ standard library for Conan configuration (note: also in compiler-id)'
|
||||
required: true
|
||||
@@ -32,6 +54,12 @@ inputs:
|
||||
options:
|
||||
- libstdcxx
|
||||
- libcxx
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 cache storage'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 cache storage'
|
||||
required: true
|
||||
|
||||
outputs:
|
||||
cache-hit:
|
||||
@@ -41,47 +69,72 @@ outputs:
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Generate safe branch name
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: safe-branch
|
||||
shell: bash
|
||||
run: |
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Check conanfile changes
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: check-conanfile-changes
|
||||
shell: bash
|
||||
run: |
|
||||
# Check if we're on the main branch
|
||||
if [ "${{ github.ref_name }}" == "${{ inputs.main_branch }}" ]; then
|
||||
echo "should-save-conan-cache=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# Fetch main branch for comparison
|
||||
git fetch origin ${{ inputs.main_branch }}
|
||||
|
||||
# Check if conanfile.txt or conanfile.py has changed compared to main branch
|
||||
if git diff --quiet origin/${{ inputs.main_branch }}..HEAD -- '**/conanfile.txt' '**/conanfile.py'; then
|
||||
echo "should-save-conan-cache=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "should-save-conan-cache=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Restore Conan cache
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: cache-restore-conan
|
||||
uses: actions/cache/restore@v4
|
||||
uses: ./.github/actions/xahau-ga-cache-restore
|
||||
with:
|
||||
path: |
|
||||
~/.conan
|
||||
~/.conan2
|
||||
path: ~/.conan2
|
||||
# Note: compiler-id format is compiler-version-stdlib[-gccversion]
|
||||
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-${{ inputs.configuration }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
|
||||
- name: Configure Conan
|
||||
shell: bash
|
||||
run: |
|
||||
# Create the default profile directory if it doesn't exist
|
||||
mkdir -p ~/.conan2/profiles
|
||||
|
||||
# Determine the correct libcxx based on stdlib parameter
|
||||
if [ "${{ inputs.stdlib }}" = "libcxx" ]; then
|
||||
LIBCXX="libc++"
|
||||
else
|
||||
LIBCXX="libstdc++11"
|
||||
fi
|
||||
|
||||
# Create profile with our specific settings
|
||||
# This overwrites any cached profile to ensure fresh configuration
|
||||
cat > ~/.conan2/profiles/default <<EOF
|
||||
[settings]
|
||||
arch=${{ inputs.arch }}
|
||||
build_type=${{ inputs.configuration }}
|
||||
compiler=${{ inputs.compiler }}
|
||||
compiler.cppstd=20
|
||||
compiler.libcxx=${LIBCXX}
|
||||
compiler.version=${{ inputs.compiler_version }}
|
||||
os=${{ inputs.os }}
|
||||
EOF
|
||||
|
||||
# Add buildenv and conf sections for Linux (not needed for macOS)
|
||||
if [ "${{ inputs.os }}" = "Linux" ] && [ -n "${{ inputs.cc }}" ]; then
|
||||
cat >> ~/.conan2/profiles/default <<EOF
|
||||
|
||||
[buildenv]
|
||||
CC=/usr/bin/${{ inputs.cc }}
|
||||
CXX=/usr/bin/${{ inputs.cxx }}
|
||||
|
||||
[conf]
|
||||
tools.build:compiler_executables={"c": "/usr/bin/${{ inputs.cc }}", "cpp": "/usr/bin/${{ inputs.cxx }}"}
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Add macOS-specific conf if needed
|
||||
if [ "${{ inputs.os }}" = "Macos" ]; then
|
||||
cat >> ~/.conan2/profiles/default <<EOF
|
||||
|
||||
[conf]
|
||||
# Workaround for gRPC with newer Apple Clang
|
||||
tools.build:cxxflags=["-Wno-missing-template-arg-list-after-template-kw"]
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Display profile for verification
|
||||
conan profile show
|
||||
|
||||
- name: Export custom recipes
|
||||
shell: bash
|
||||
@@ -107,10 +160,10 @@ runs:
|
||||
..
|
||||
|
||||
- name: Save Conan cache
|
||||
if: always() && inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true' && steps.check-conanfile-changes.outputs.should-save-conan-cache == 'true'
|
||||
uses: actions/cache/save@v4
|
||||
if: success() && inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true'
|
||||
uses: ./.github/actions/xahau-ga-cache-save
|
||||
with:
|
||||
path: |
|
||||
~/.conan
|
||||
~/.conan2
|
||||
key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }}
|
||||
path: ~/.conan2
|
||||
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-${{ inputs.configuration }}
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
|
||||
73
.github/actions/xahau-ga-get-commit-message/action.yml
vendored
Normal file
73
.github/actions/xahau-ga-get-commit-message/action.yml
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
name: 'Get Commit Message'
|
||||
description: 'Gets commit message for both push and pull_request events and sets XAHAU_GA_COMMIT_MSG env var'
|
||||
|
||||
inputs:
|
||||
event-name:
|
||||
description: 'The event name (push or pull_request)'
|
||||
required: true
|
||||
head-commit-message:
|
||||
description: 'The head commit message (for push events)'
|
||||
required: false
|
||||
default: ''
|
||||
pr-head-sha:
|
||||
description: 'The PR head SHA (for pull_request events)'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Get commit message and set environment variable
|
||||
shell: python
|
||||
run: |
|
||||
import os
|
||||
import subprocess
|
||||
import secrets
|
||||
|
||||
event_name = "${{ inputs.event-name }}"
|
||||
pr_head_sha = "${{ inputs.pr-head-sha }}"
|
||||
|
||||
print("==========================================")
|
||||
print("Setting XAHAU_GA_COMMIT_MSG environment variable")
|
||||
print("==========================================")
|
||||
print(f"Event: {event_name}")
|
||||
|
||||
if event_name == 'push':
|
||||
# For push events, use the input directly
|
||||
message = """${{ inputs.head-commit-message }}"""
|
||||
print("Source: workflow input (github.event.head_commit.message)")
|
||||
elif event_name == 'pull_request':
|
||||
# For PR events, fetch the specific SHA
|
||||
print(f"Source: git show {pr_head_sha} (fetching PR head commit)")
|
||||
|
||||
# Fetch the PR head commit
|
||||
subprocess.run(
|
||||
['git', 'fetch', 'origin', pr_head_sha],
|
||||
check=True
|
||||
)
|
||||
|
||||
# Get commit message from the fetched SHA
|
||||
result = subprocess.run(
|
||||
['git', 'show', '-s', '--format=%B', pr_head_sha],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
message = result.stdout.strip()
|
||||
else:
|
||||
message = ""
|
||||
print(f"Warning: Unknown event type: {event_name}")
|
||||
|
||||
print(f"Commit message (first 100 chars): {message[:100]}")
|
||||
|
||||
# Write to GITHUB_ENV using heredoc with random delimiter (prevents injection attacks)
|
||||
# See: https://securitylab.github.com/resources/github-actions-untrusted-input/
|
||||
delimiter = f"EOF_{secrets.token_hex(16)}"
|
||||
|
||||
with open(os.environ['GITHUB_ENV'], 'a') as f:
|
||||
f.write(f'XAHAU_GA_COMMIT_MSG<<{delimiter}\n')
|
||||
f.write(message)
|
||||
f.write(f'\n{delimiter}\n')
|
||||
|
||||
print(f"✓ XAHAU_GA_COMMIT_MSG set (available to all subsequent steps)")
|
||||
print("==========================================")
|
||||
54
.github/workflows/xahau-ga-macos.yml
vendored
54
.github/workflows/xahau-ga-macos.yml
vendored
@@ -30,6 +30,14 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get commit message
|
||||
id: get-commit-message
|
||||
uses: ./.github/actions/xahau-ga-get-commit-message
|
||||
with:
|
||||
event-name: ${{ github.event_name }}
|
||||
head-commit-message: ${{ github.event.head_commit.message }}
|
||||
pr-head-sha: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Install Conan
|
||||
run: |
|
||||
brew install conan
|
||||
@@ -78,14 +86,6 @@ jobs:
|
||||
- name: Install ccache
|
||||
run: brew install ccache
|
||||
|
||||
- name: Configure ccache
|
||||
uses: ./.github/actions/xahau-configure-ccache
|
||||
with:
|
||||
max_size: 2G
|
||||
hash_dir: true
|
||||
compiler_check: content
|
||||
is_main_branch: ${{ github.ref_name == env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
echo "PATH:"
|
||||
@@ -98,32 +98,12 @@ jobs:
|
||||
echo "---- Full Environment ----"
|
||||
env
|
||||
|
||||
- name: Configure Conan
|
||||
- name: Detect compiler version
|
||||
id: detect-compiler
|
||||
run: |
|
||||
# Create the default profile directory if it doesn't exist
|
||||
mkdir -p ~/.conan2/profiles
|
||||
|
||||
# Detect compiler version
|
||||
COMPILER_VERSION=$(clang --version | grep -oE 'version [0-9]+' | grep -oE '[0-9]+')
|
||||
|
||||
# Create profile with our specific settings
|
||||
cat > ~/.conan2/profiles/default <<EOF
|
||||
[settings]
|
||||
arch=armv8
|
||||
build_type=Release
|
||||
compiler=apple-clang
|
||||
compiler.cppstd=20
|
||||
compiler.libcxx=libc++
|
||||
compiler.version=${COMPILER_VERSION}
|
||||
os=Macos
|
||||
|
||||
[conf]
|
||||
# Workaround for gRPC with newer Apple Clang
|
||||
tools.build:cxxflags=["-Wno-missing-template-arg-list-after-template-kw"]
|
||||
EOF
|
||||
|
||||
# Display profile for verification
|
||||
conan profile show
|
||||
echo "compiler_version=${COMPILER_VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "Detected Apple Clang version: ${COMPILER_VERSION}"
|
||||
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/xahau-ga-dependencies
|
||||
@@ -133,6 +113,13 @@ jobs:
|
||||
compiler-id: clang
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
os: Macos
|
||||
arch: armv8
|
||||
compiler: apple-clang
|
||||
compiler_version: ${{ steps.detect-compiler.outputs.compiler_version }}
|
||||
stdlib: libcxx
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Build
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
@@ -143,6 +130,9 @@ jobs:
|
||||
compiler-id: clang
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
stdlib: libcxx
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
|
||||
62
.github/workflows/xahau-ga-nix.yml
vendored
62
.github/workflows/xahau-ga-nix.yml
vendored
@@ -2,7 +2,7 @@ name: Nix - GA Runner
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
branches: ["dev", "candidate", "release", "nd-experiment-overlayfs-2025-10-29"]
|
||||
pull_request:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
schedule:
|
||||
@@ -156,12 +156,20 @@ jobs:
|
||||
env:
|
||||
build_dir: .build
|
||||
# Bump this number to invalidate all caches globally.
|
||||
CACHE_VERSION: 2
|
||||
CACHE_VERSION: 3
|
||||
MAIN_BRANCH_NAME: dev
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get commit message
|
||||
id: get-commit-message
|
||||
uses: ./.github/actions/xahau-ga-get-commit-message
|
||||
with:
|
||||
event-name: ${{ github.event_name }}
|
||||
head-commit-message: ${{ github.event.head_commit.message }}
|
||||
pr-head-sha: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Install build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
@@ -231,48 +239,6 @@ jobs:
|
||||
# Install Conan 2
|
||||
pip install --upgrade "conan>=2.0,<3"
|
||||
|
||||
- name: Configure ccache
|
||||
uses: ./.github/actions/xahau-configure-ccache
|
||||
with:
|
||||
max_size: 2G
|
||||
hash_dir: true
|
||||
compiler_check: content
|
||||
is_main_branch: ${{ github.ref_name == env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Configure Conan
|
||||
run: |
|
||||
# Create the default profile directory if it doesn't exist
|
||||
mkdir -p ~/.conan2/profiles
|
||||
|
||||
# Determine the correct libcxx based on stdlib parameter
|
||||
if [ "${{ matrix.stdlib }}" = "libcxx" ]; then
|
||||
LIBCXX="libc++"
|
||||
else
|
||||
LIBCXX="libstdc++11"
|
||||
fi
|
||||
|
||||
# Create profile with our specific settings
|
||||
cat > ~/.conan2/profiles/default <<EOF
|
||||
[settings]
|
||||
arch=x86_64
|
||||
build_type=${{ matrix.configuration }}
|
||||
compiler=${{ matrix.compiler }}
|
||||
compiler.cppstd=20
|
||||
compiler.libcxx=${LIBCXX}
|
||||
compiler.version=${{ matrix.compiler_version }}
|
||||
os=Linux
|
||||
|
||||
[buildenv]
|
||||
CC=/usr/bin/${{ matrix.cc }}
|
||||
CXX=/usr/bin/${{ matrix.cxx }}
|
||||
|
||||
[conf]
|
||||
tools.build:compiler_executables={"c": "/usr/bin/${{ matrix.cc }}", "cpp": "/usr/bin/${{ matrix.cxx }}"}
|
||||
EOF
|
||||
|
||||
# Display profile for verification
|
||||
conan profile show
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
echo "PATH:"
|
||||
@@ -293,7 +259,13 @@ jobs:
|
||||
compiler-id: ${{ matrix.compiler_id }}
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
compiler: ${{ matrix.compiler }}
|
||||
compiler_version: ${{ matrix.compiler_version }}
|
||||
cc: ${{ matrix.cc }}
|
||||
cxx: ${{ matrix.cxx }}
|
||||
stdlib: ${{ matrix.stdlib }}
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Build
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
@@ -308,6 +280,8 @@ jobs:
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
stdlib: ${{ matrix.stdlib }}
|
||||
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Set artifact name
|
||||
id: set-artifact-name
|
||||
|
||||
@@ -458,6 +458,7 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/app/tx/impl/CreateOffer.cpp
|
||||
src/ripple/app/tx/impl/CreateTicket.cpp
|
||||
src/ripple/app/tx/impl/Cron.cpp
|
||||
src/ripple/app/tx/impl/CronSet.cpp
|
||||
src/ripple/app/tx/impl/DeleteAccount.cpp
|
||||
src/ripple/app/tx/impl/DepositPreauth.cpp
|
||||
src/ripple/app/tx/impl/Escrow.cpp
|
||||
@@ -475,7 +476,6 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/app/tx/impl/Payment.cpp
|
||||
src/ripple/app/tx/impl/Remit.cpp
|
||||
src/ripple/app/tx/impl/SetAccount.cpp
|
||||
src/ripple/app/tx/impl/SetCron.cpp
|
||||
src/ripple/app/tx/impl/SetHook.cpp
|
||||
src/ripple/app/tx/impl/SetRemarks.cpp
|
||||
src/ripple/app/tx/impl/SetRegularKey.cpp
|
||||
|
||||
@@ -37,6 +37,7 @@
|
||||
#define KEYLET_NFT_OFFER 23
|
||||
#define KEYLET_HOOK_DEFINITION 24
|
||||
#define KEYLET_HOOK_STATE_DIR 25
|
||||
#define KEYLET_CRON 26
|
||||
|
||||
#define COMPARE_EQUAL 1U
|
||||
#define COMPARE_LESS 2U
|
||||
|
||||
@@ -278,8 +278,7 @@ enum keylet_code : uint32_t {
|
||||
NFT_OFFER = 23,
|
||||
HOOK_DEFINITION = 24,
|
||||
HOOK_STATE_DIR = 25,
|
||||
LAST_KLTYPE_V0 = HOOK_DEFINITION,
|
||||
LAST_KLTYPE_V1 = HOOK_STATE_DIR,
|
||||
CRON = 26
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -2903,17 +2903,6 @@ DEFINE_HOOK_FUNCTION(
|
||||
if (write_len < 34)
|
||||
return TOO_SMALL;
|
||||
|
||||
bool const v1 = applyCtx.view().rules().enabled(featureHooksUpdate1);
|
||||
|
||||
if (keylet_type == 0)
|
||||
return INVALID_ARGUMENT;
|
||||
|
||||
auto const last =
|
||||
v1 ? keylet_code::LAST_KLTYPE_V1 : keylet_code::LAST_KLTYPE_V0;
|
||||
|
||||
if (keylet_type > last)
|
||||
return INVALID_ARGUMENT;
|
||||
|
||||
try
|
||||
{
|
||||
switch (keylet_type)
|
||||
@@ -3015,7 +3004,8 @@ DEFINE_HOOK_FUNCTION(
|
||||
return serialize_keylet(kl, memory, write_ptr, write_len);
|
||||
}
|
||||
|
||||
// keylets that take 20 byte account id, and 4 byte uint
|
||||
// keylets that take 20 byte account id, and (4 byte uint for 32
|
||||
// byte hash)
|
||||
case keylet_code::OFFER:
|
||||
case keylet_code::CHECK:
|
||||
case keylet_code::ESCROW:
|
||||
@@ -3058,6 +3048,33 @@ DEFINE_HOOK_FUNCTION(
|
||||
return serialize_keylet(kl, memory, write_ptr, write_len);
|
||||
}
|
||||
|
||||
// keylets that take 20 byte account id, and 4 byte uint
|
||||
case keylet_code::CRON: {
|
||||
if (!applyCtx.view().rules().enabled(featureCron))
|
||||
return INVALID_ARGUMENT;
|
||||
|
||||
if (a == 0 || b == 0)
|
||||
return INVALID_ARGUMENT;
|
||||
if (e != 0 || f != 0 || d != 0)
|
||||
return INVALID_ARGUMENT;
|
||||
|
||||
uint32_t read_ptr = a, read_len = b;
|
||||
|
||||
if (NOT_IN_BOUNDS(read_ptr, read_len, memory_length))
|
||||
return OUT_OF_BOUNDS;
|
||||
|
||||
if (read_len != 20)
|
||||
return INVALID_ARGUMENT;
|
||||
|
||||
ripple::AccountID id = AccountID::fromVoid(memory + read_ptr);
|
||||
|
||||
uint32_t seq = c;
|
||||
|
||||
ripple::Keylet kl = ripple::keylet::cron(seq, id);
|
||||
|
||||
return serialize_keylet(kl, memory, write_ptr, write_len);
|
||||
}
|
||||
|
||||
// keylets that take a 32 byte uint and an 8byte uint64
|
||||
case keylet_code::PAGE: {
|
||||
if (a == 0 || b == 0)
|
||||
@@ -3105,6 +3122,9 @@ DEFINE_HOOK_FUNCTION(
|
||||
}
|
||||
|
||||
case keylet_code::HOOK_STATE_DIR: {
|
||||
if (!applyCtx.view().rules().enabled(featureHooksUpdate1))
|
||||
return INVALID_ARGUMENT;
|
||||
|
||||
if (a == 0 || b == 0 || c == 0 || d == 0)
|
||||
return INVALID_ARGUMENT;
|
||||
|
||||
@@ -3279,7 +3299,7 @@ DEFINE_HOOK_FUNCTION(
|
||||
return INTERNAL_ERROR;
|
||||
}
|
||||
|
||||
return NO_SUCH_KEYLET;
|
||||
return INVALID_ARGUMENT;
|
||||
|
||||
HOOK_TEARDOWN();
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#include <ripple/app/tx/impl/SetCron.h>
|
||||
#include <ripple/app/tx/impl/CronSet.h>
|
||||
#include <ripple/basics/Log.h>
|
||||
#include <ripple/ledger/View.h>
|
||||
#include <ripple/protocol/Feature.h>
|
||||
@@ -28,13 +28,13 @@
|
||||
namespace ripple {
|
||||
|
||||
TxConsequences
|
||||
SetCron::makeTxConsequences(PreflightContext const& ctx)
|
||||
CronSet::makeTxConsequences(PreflightContext const& ctx)
|
||||
{
|
||||
return TxConsequences{ctx.tx, TxConsequences::normal};
|
||||
}
|
||||
|
||||
NotTEC
|
||||
SetCron::preflight(PreflightContext const& ctx)
|
||||
CronSet::preflight(PreflightContext const& ctx)
|
||||
{
|
||||
if (!ctx.rules.enabled(featureCron))
|
||||
return temDISABLED;
|
||||
@@ -47,7 +47,7 @@ SetCron::preflight(PreflightContext const& ctx)
|
||||
|
||||
if (tx.getFlags() & tfCronSetMask)
|
||||
{
|
||||
JLOG(j.warn()) << "SetCron: Invalid flags set.";
|
||||
JLOG(j.warn()) << "CronSet: Invalid flags set.";
|
||||
return temINVALID_FLAG;
|
||||
}
|
||||
|
||||
@@ -69,7 +69,7 @@ SetCron::preflight(PreflightContext const& ctx)
|
||||
// delete operation
|
||||
if (hasDelay || hasRepeat || hasStartTime)
|
||||
{
|
||||
JLOG(j.debug()) << "SetCron: tfCronUnset flag cannot be used with "
|
||||
JLOG(j.debug()) << "CronSet: tfCronUnset flag cannot be used with "
|
||||
"DelaySeconds, RepeatCount or StartTime.";
|
||||
return temMALFORMED;
|
||||
}
|
||||
@@ -81,7 +81,7 @@ SetCron::preflight(PreflightContext const& ctx)
|
||||
if (!hasStartTime)
|
||||
{
|
||||
JLOG(j.debug())
|
||||
<< "SetCron: StartTime is required. Use StartTime=0 for "
|
||||
<< "CronSet: StartTime is required. Use StartTime=0 for "
|
||||
"immediate execution, or specify a future timestamp.";
|
||||
return temMALFORMED;
|
||||
}
|
||||
@@ -89,7 +89,7 @@ SetCron::preflight(PreflightContext const& ctx)
|
||||
if ((!hasDelay && hasRepeat) || (hasDelay && !hasRepeat))
|
||||
{
|
||||
JLOG(j.debug())
|
||||
<< "SetCron: DelaySeconds and RepeatCount must both be present "
|
||||
<< "CronSet: DelaySeconds and RepeatCount must both be present "
|
||||
"for recurring crons, or both absent for one-off crons.";
|
||||
return temMALFORMED;
|
||||
}
|
||||
@@ -101,7 +101,7 @@ SetCron::preflight(PreflightContext const& ctx)
|
||||
if (delay > 31536000UL /* 365 days in seconds */)
|
||||
{
|
||||
JLOG(j.debug())
|
||||
<< "SetCron: DelaySeconds was too high. (max 365 "
|
||||
<< "CronSet: DelaySeconds was too high. (max 365 "
|
||||
"days in seconds).";
|
||||
return temMALFORMED;
|
||||
}
|
||||
@@ -114,7 +114,7 @@ SetCron::preflight(PreflightContext const& ctx)
|
||||
if (recur == 0)
|
||||
{
|
||||
JLOG(j.debug())
|
||||
<< "SetCron: RepeatCount must be greater than 0."
|
||||
<< "CronSet: RepeatCount must be greater than 0."
|
||||
"For one-time execution, omit DelaySeconds and "
|
||||
"RepeatCount.";
|
||||
return temMALFORMED;
|
||||
@@ -122,8 +122,8 @@ SetCron::preflight(PreflightContext const& ctx)
|
||||
if (recur > 256)
|
||||
{
|
||||
JLOG(j.debug())
|
||||
<< "SetCron: RepeatCount too high. Limit is 256. Issue "
|
||||
"new SetCron to increase.";
|
||||
<< "CronSet: RepeatCount too high. Limit is 256. Issue "
|
||||
"new CronSet to increase.";
|
||||
return temMALFORMED;
|
||||
}
|
||||
}
|
||||
@@ -133,7 +133,7 @@ SetCron::preflight(PreflightContext const& ctx)
|
||||
}
|
||||
|
||||
TER
|
||||
SetCron::preclaim(PreclaimContext const& ctx)
|
||||
CronSet::preclaim(PreclaimContext const& ctx)
|
||||
{
|
||||
if (ctx.tx.isFieldPresent(sfStartTime) &&
|
||||
ctx.tx.getFieldU32(sfStartTime) != 0)
|
||||
@@ -146,7 +146,7 @@ SetCron::preclaim(PreclaimContext const& ctx)
|
||||
|
||||
if (startTime < parentCloseTime)
|
||||
{
|
||||
JLOG(ctx.j.debug()) << "SetCron: StartTime must be in the future "
|
||||
JLOG(ctx.j.debug()) << "CronSet: StartTime must be in the future "
|
||||
"(or 0 for immediate execution)";
|
||||
return tecEXPIRED;
|
||||
}
|
||||
@@ -154,7 +154,7 @@ SetCron::preclaim(PreclaimContext const& ctx)
|
||||
if (startTime > ctx.view.parentCloseTime().time_since_epoch().count() +
|
||||
365 * 24 * 60 * 60)
|
||||
{
|
||||
JLOG(ctx.j.debug()) << "SetCron: StartTime is too far in the "
|
||||
JLOG(ctx.j.debug()) << "CronSet: StartTime is too far in the "
|
||||
"future (max 365 days).";
|
||||
return tecEXPIRED;
|
||||
}
|
||||
@@ -163,7 +163,7 @@ SetCron::preclaim(PreclaimContext const& ctx)
|
||||
}
|
||||
|
||||
TER
|
||||
SetCron::doApply()
|
||||
CronSet::doApply()
|
||||
{
|
||||
auto& view = ctx_.view();
|
||||
auto const& tx = ctx_.tx;
|
||||
@@ -205,21 +205,21 @@ SetCron::doApply()
|
||||
auto sleCron = view.peek(klOld);
|
||||
if (!sleCron)
|
||||
{
|
||||
JLOG(j_.warn()) << "SetCron: Cron object didn't exist.";
|
||||
JLOG(j_.warn()) << "CronSet: Cron object didn't exist.";
|
||||
return tefBAD_LEDGER;
|
||||
}
|
||||
|
||||
if (safe_cast<LedgerEntryType>(
|
||||
sleCron->getFieldU16(sfLedgerEntryType)) != ltCRON)
|
||||
{
|
||||
JLOG(j_.warn()) << "SetCron: sfCron pointed to non-cron object!!";
|
||||
JLOG(j_.warn()) << "CronSet: sfCron pointed to non-cron object!!";
|
||||
return tefBAD_LEDGER;
|
||||
}
|
||||
|
||||
if (!view.dirRemove(
|
||||
keylet::ownerDir(id), (*sleCron)[sfOwnerNode], klOld, false))
|
||||
{
|
||||
JLOG(j_.warn()) << "SetCron: Ownerdir bad. " << id;
|
||||
JLOG(j_.warn()) << "CronSet: Ownerdir bad. " << id;
|
||||
return tefBAD_LEDGER;
|
||||
}
|
||||
|
||||
@@ -278,7 +278,7 @@ SetCron::doApply()
|
||||
}
|
||||
|
||||
XRPAmount
|
||||
SetCron::calculateBaseFee(ReadView const& view, STTx const& tx)
|
||||
CronSet::calculateBaseFee(ReadView const& view, STTx const& tx)
|
||||
{
|
||||
auto const baseFee = Transactor::calculateBaseFee(view, tx);
|
||||
|
||||
@@ -290,7 +290,7 @@ SetCron::calculateBaseFee(ReadView const& view, STTx const& tx)
|
||||
tx.isFieldPresent(sfRepeatCount) ? tx.getFieldU32(sfRepeatCount) : 0;
|
||||
|
||||
// factor a cost based on the total number of txns expected
|
||||
// for RepeatCount of 0 we have this txn (SetCron) and the
|
||||
// for RepeatCount of 0 we have this txn (CronSet) and the
|
||||
// single Cron txn (2). For a RepeatCount of 1 we have this txn,
|
||||
// the first time the cron executes, and the second time (3).
|
||||
uint32_t const additionalExpectedExecutions = 1 + repeatCount;
|
||||
@@ -17,8 +17,8 @@
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifndef RIPPLE_TX_SETCRON_H_INCLUDED
|
||||
#define RIPPLE_TX_SETCRON_H_INCLUDED
|
||||
#ifndef RIPPLE_TX_CRONSET_H_INCLUDED
|
||||
#define RIPPLE_TX_CRONSET_H_INCLUDED
|
||||
|
||||
#include <ripple/app/tx/impl/Transactor.h>
|
||||
#include <ripple/basics/Log.h>
|
||||
@@ -26,12 +26,12 @@
|
||||
|
||||
namespace ripple {
|
||||
|
||||
class SetCron : public Transactor
|
||||
class CronSet : public Transactor
|
||||
{
|
||||
public:
|
||||
static constexpr ConsequencesFactoryType ConsequencesFactory{Custom};
|
||||
|
||||
explicit SetCron(ApplyContext& ctx) : Transactor(ctx)
|
||||
explicit CronSet(ApplyContext& ctx) : Transactor(ctx)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -29,6 +29,7 @@
|
||||
#include <ripple/app/tx/impl/CreateOffer.h>
|
||||
#include <ripple/app/tx/impl/CreateTicket.h>
|
||||
#include <ripple/app/tx/impl/Cron.h>
|
||||
#include <ripple/app/tx/impl/CronSet.h>
|
||||
#include <ripple/app/tx/impl/DeleteAccount.h>
|
||||
#include <ripple/app/tx/impl/DepositPreauth.h>
|
||||
#include <ripple/app/tx/impl/Escrow.h>
|
||||
@@ -44,7 +45,6 @@
|
||||
#include <ripple/app/tx/impl/Payment.h>
|
||||
#include <ripple/app/tx/impl/Remit.h>
|
||||
#include <ripple/app/tx/impl/SetAccount.h>
|
||||
#include <ripple/app/tx/impl/SetCron.h>
|
||||
#include <ripple/app/tx/impl/SetHook.h>
|
||||
#include <ripple/app/tx/impl/SetRegularKey.h>
|
||||
#include <ripple/app/tx/impl/SetRemarks.h>
|
||||
@@ -184,7 +184,7 @@ invoke_preflight(PreflightContext const& ctx)
|
||||
case ttURITOKEN_CANCEL_SELL_OFFER:
|
||||
return invoke_preflight_helper<URIToken>(ctx);
|
||||
case ttCRON_SET:
|
||||
return invoke_preflight_helper<SetCron>(ctx);
|
||||
return invoke_preflight_helper<CronSet>(ctx);
|
||||
case ttCRON:
|
||||
return invoke_preflight_helper<Cron>(ctx);
|
||||
default:
|
||||
@@ -313,7 +313,7 @@ invoke_preclaim(PreclaimContext const& ctx)
|
||||
case ttURITOKEN_CANCEL_SELL_OFFER:
|
||||
return invoke_preclaim<URIToken>(ctx);
|
||||
case ttCRON_SET:
|
||||
return invoke_preclaim<SetCron>(ctx);
|
||||
return invoke_preclaim<CronSet>(ctx);
|
||||
case ttCRON:
|
||||
return invoke_preclaim<Cron>(ctx);
|
||||
default:
|
||||
@@ -404,7 +404,7 @@ invoke_calculateBaseFee(ReadView const& view, STTx const& tx)
|
||||
case ttURITOKEN_CANCEL_SELL_OFFER:
|
||||
return URIToken::calculateBaseFee(view, tx);
|
||||
case ttCRON_SET:
|
||||
return SetCron::calculateBaseFee(view, tx);
|
||||
return CronSet::calculateBaseFee(view, tx);
|
||||
case ttCRON:
|
||||
return Cron::calculateBaseFee(view, tx);
|
||||
default:
|
||||
@@ -601,7 +601,7 @@ invoke_apply(ApplyContext& ctx)
|
||||
return p();
|
||||
}
|
||||
case ttCRON_SET: {
|
||||
SetCron p(ctx);
|
||||
CronSet p(ctx);
|
||||
return p();
|
||||
}
|
||||
case ttCRON: {
|
||||
|
||||
@@ -376,6 +376,8 @@ LedgerFormats::LedgerFormats()
|
||||
{sfDelaySeconds, soeREQUIRED},
|
||||
{sfRepeatCount, soeREQUIRED},
|
||||
{sfOwnerNode, soeREQUIRED},
|
||||
{sfPreviousTxnID, soeREQUIRED},
|
||||
{sfPreviousTxnLgrSeq, soeREQUIRED}
|
||||
},
|
||||
commonFields);
|
||||
|
||||
|
||||
@@ -11147,6 +11147,7 @@ public:
|
||||
#define KEYLET_PAYCHAN 21
|
||||
#define KEYLET_EMITTED_TXN 22
|
||||
#define KEYLET_NFT_OFFER 23
|
||||
#define KEYLET_CRON 26
|
||||
#define ASSERT(x)\
|
||||
if (!(x))\
|
||||
rollback((uint32_t)#x, sizeof(#x), __LINE__);
|
||||
@@ -11209,6 +11210,9 @@ public:
|
||||
// Test min size
|
||||
ASSERT(util_keylet((uint32_t)buf, 33, KEYLET_SKIP, 0,0,0,0,0,0) == TOO_SMALL);
|
||||
|
||||
// Invalid keylet type
|
||||
ASSERT(util_keylet((uint32_t)buf, 34, 0, 0,0,0,0,0,0) == INVALID_ARGUMENT);
|
||||
ASSERT(util_keylet((uint32_t)buf, 34, 0x99999999, 0,0,0,0,0,0) == INVALID_ARGUMENT);
|
||||
|
||||
// Test one of each type
|
||||
ASSERT(34 == (e=util_keylet(buf, 34, KEYLET_HOOK,
|
||||
@@ -11651,6 +11655,17 @@ public:
|
||||
0,0
|
||||
)));
|
||||
|
||||
ASSERT(34 == (e=util_keylet(buf, 34, KEYLET_CRON, SBUF(a), 1, 0, 0, 0)));
|
||||
{
|
||||
uint8_t ans[] =
|
||||
{
|
||||
0x00U,0x41U,0xF7U,0xB6U,0x45U,0x43U,0x61U,0x87U,0xCCU,0x61U,
|
||||
0x00U,0x00U,0x00U,0x01U,0x0AU,0x45U,0x80U,0x75U,0x7CU,0xDAU,
|
||||
0xD9U,0x16U,0x7EU,0xEEU,0xC1U,0x3CU,0x6CU,0x15U,0xD5U,0x17U,
|
||||
0xE2U,0x72U,0x9EU,0xC8
|
||||
};
|
||||
ASSERT_KL_EQ(ans);
|
||||
}
|
||||
accept(0,0,0);
|
||||
}
|
||||
)[test.hook]"];
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user