mirror of
https://github.com/Xahau/xahaud.git
synced 2025-11-05 11:15:48 +00:00
Compare commits
1 Commits
actions-ca
...
nd-add-pyt
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
58ea69a96a |
63
.github/actions/xahau-configure-ccache/action.yml
vendored
Normal file
63
.github/actions/xahau-configure-ccache/action.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
name: 'Configure ccache'
|
||||
description: 'Sets up ccache with consistent configuration'
|
||||
|
||||
inputs:
|
||||
max_size:
|
||||
description: 'Maximum cache size'
|
||||
required: false
|
||||
default: '2G'
|
||||
hash_dir:
|
||||
description: 'Whether to include directory paths in hash'
|
||||
required: false
|
||||
default: 'true'
|
||||
compiler_check:
|
||||
description: 'How to check compiler for changes'
|
||||
required: false
|
||||
default: 'content'
|
||||
is_main_branch:
|
||||
description: 'Whether the current branch is the main branch'
|
||||
required: false
|
||||
default: 'false'
|
||||
main_cache_dir:
|
||||
description: 'Path to the main branch cache directory'
|
||||
required: false
|
||||
default: '~/.ccache-main'
|
||||
current_cache_dir:
|
||||
description: 'Path to the current branch cache directory'
|
||||
required: false
|
||||
default: '~/.ccache-current'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Configure ccache
|
||||
shell: bash
|
||||
run: |
|
||||
# Create cache directories
|
||||
mkdir -p ${{ inputs.main_cache_dir }} ${{ inputs.current_cache_dir }}
|
||||
|
||||
# Set compiler check globally
|
||||
ccache -o compiler_check=${{ inputs.compiler_check }}
|
||||
|
||||
# Use a single config file location
|
||||
mkdir -p ~/.ccache
|
||||
export CONF_PATH="$HOME/.ccache/ccache.conf"
|
||||
|
||||
# Apply common settings
|
||||
echo "max_size = ${{ inputs.max_size }}" > "$CONF_PATH"
|
||||
echo "hash_dir = ${{ inputs.hash_dir }}" >> "$CONF_PATH"
|
||||
echo "compiler_check = ${{ inputs.compiler_check }}" >> "$CONF_PATH"
|
||||
|
||||
if [ "${{ inputs.is_main_branch }}" == "true" ]; then
|
||||
# Main branch: use main branch cache
|
||||
ccache --set-config=cache_dir="${{ inputs.main_cache_dir }}"
|
||||
echo "CCACHE_DIR=${{ inputs.main_cache_dir }}" >> $GITHUB_ENV
|
||||
else
|
||||
# Feature branch: use current branch cache with main as secondary
|
||||
ccache --set-config=cache_dir="${{ inputs.current_cache_dir }}"
|
||||
ccache --set-config=secondary_storage="file:${{ inputs.main_cache_dir }}"
|
||||
echo "CCACHE_DIR=${{ inputs.current_cache_dir }}" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
ccache -p # Print config for verification
|
||||
ccache -z # Zero statistics before the build
|
||||
129
.github/actions/xahau-ga-build/action.yml
vendored
129
.github/actions/xahau-ga-build/action.yml
vendored
@@ -47,24 +47,6 @@ inputs:
|
||||
description: 'GCC version to use for Clang toolchain (e.g. 11, 13)'
|
||||
required: false
|
||||
default: ''
|
||||
ccache_max_size:
|
||||
description: 'Maximum ccache size'
|
||||
required: false
|
||||
default: '2G'
|
||||
ccache_hash_dir:
|
||||
description: 'Whether to include directory paths in hash'
|
||||
required: false
|
||||
default: 'true'
|
||||
ccache_compiler_check:
|
||||
description: 'How to check compiler for changes'
|
||||
required: false
|
||||
default: 'content'
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 cache storage'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 cache storage'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
@@ -77,44 +59,28 @@ runs:
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Restore ccache directory
|
||||
- name: Restore ccache directory for default branch
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
id: ccache-restore
|
||||
uses: ./.github/actions/xahau-ga-cache-restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ~/.ccache
|
||||
path: ~/.ccache-main
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
|
||||
- name: Restore ccache directory for current branch
|
||||
if: inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
|
||||
id: ccache-restore-current-branch
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ~/.ccache-current
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
|
||||
- name: Configure ccache
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
# Use ccache's default cache_dir (~/.ccache) - don't override it
|
||||
# This avoids tilde expansion issues when setting it explicitly
|
||||
|
||||
# Create cache directory using ccache's default
|
||||
mkdir -p ~/.ccache
|
||||
|
||||
# Configure ccache settings (but NOT cache_dir - use default)
|
||||
# This overwrites any cached config to ensure fresh configuration
|
||||
ccache --set-config=max_size=${{ inputs.ccache_max_size }}
|
||||
ccache --set-config=hash_dir=${{ inputs.ccache_hash_dir }}
|
||||
ccache --set-config=compiler_check=${{ inputs.ccache_compiler_check }}
|
||||
|
||||
# Note: Not setting CCACHE_DIR - let ccache use its default (~/.ccache)
|
||||
|
||||
# Print config for verification
|
||||
echo "=== ccache configuration ==="
|
||||
ccache -p
|
||||
|
||||
# Zero statistics before the build
|
||||
ccache -z
|
||||
|
||||
- name: Configure project
|
||||
shell: bash
|
||||
@@ -130,27 +96,14 @@ runs:
|
||||
if [ -n "${{ inputs.cxx }}" ]; then
|
||||
export CXX="${{ inputs.cxx }}"
|
||||
fi
|
||||
|
||||
# Create wrapper toolchain that overlays ccache on top of Conan's toolchain
|
||||
# This enables ccache for the main app build without affecting Conan dependency builds
|
||||
|
||||
|
||||
# Configure ccache launcher args
|
||||
CCACHE_ARGS=""
|
||||
if [ "${{ inputs.ccache_enabled }}" = "true" ]; then
|
||||
cat > wrapper_toolchain.cmake <<'EOF'
|
||||
# Include Conan's generated toolchain first (sets compiler, flags, etc.)
|
||||
# Note: CMAKE_CURRENT_LIST_DIR is the directory containing this wrapper (.build/)
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/build/generators/conan_toolchain.cmake)
|
||||
|
||||
# Overlay ccache configuration for main application build
|
||||
# This does NOT affect Conan dependency builds (already completed)
|
||||
set(CMAKE_C_COMPILER_LAUNCHER ccache CACHE STRING "C compiler launcher" FORCE)
|
||||
set(CMAKE_CXX_COMPILER_LAUNCHER ccache CACHE STRING "C++ compiler launcher" FORCE)
|
||||
EOF
|
||||
TOOLCHAIN_FILE="wrapper_toolchain.cmake"
|
||||
echo "✅ Created wrapper toolchain with ccache enabled"
|
||||
else
|
||||
TOOLCHAIN_FILE="build/generators/conan_toolchain.cmake"
|
||||
echo "ℹ️ Using Conan toolchain directly (ccache disabled)"
|
||||
CCACHE_ARGS="-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||
fi
|
||||
|
||||
|
||||
# Configure C++ standard library if specified
|
||||
# libstdcxx used for clang-14/16 to work around missing lexicographical_compare_three_way in libc++
|
||||
# libcxx can be used with clang-17+ which has full C++20 support
|
||||
@@ -190,44 +143,32 @@ runs:
|
||||
# So we get: .build/build/generators/ with our non-standard folder name
|
||||
cmake .. \
|
||||
-G "${{ inputs.generator }}" \
|
||||
$CCACHE_ARGS \
|
||||
${CMAKE_CXX_FLAGS:+-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS"} \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=${TOOLCHAIN_FILE} \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }}
|
||||
|
||||
- name: Show ccache config before build
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "ccache configuration before build"
|
||||
echo "=========================================="
|
||||
ccache -p
|
||||
echo ""
|
||||
|
||||
- name: Build project
|
||||
shell: bash
|
||||
run: |
|
||||
cd ${{ inputs.build_dir }}
|
||||
|
||||
# Check for verbose build flag in commit message
|
||||
VERBOSE_FLAG=""
|
||||
if echo "${XAHAU_GA_COMMIT_MSG}" | grep -q '\[ci-ga-cmake-verbose\]'; then
|
||||
echo "🔊 [ci-ga-cmake-verbose] detected - enabling verbose output"
|
||||
VERBOSE_FLAG="-- -v"
|
||||
fi
|
||||
|
||||
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc) ${VERBOSE_FLAG}
|
||||
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc)
|
||||
|
||||
- name: Show ccache statistics
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
shell: bash
|
||||
run: ccache -s
|
||||
|
||||
- name: Save ccache directory
|
||||
if: success() && inputs.ccache_enabled == 'true'
|
||||
uses: ./.github/actions/xahau-ga-cache-save
|
||||
- name: Save ccache directory for default branch
|
||||
if: always() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name == inputs.main_branch
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ~/.ccache
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
path: ~/.ccache-main
|
||||
key: ${{ steps.ccache-restore.outputs.cache-primary-key }}
|
||||
|
||||
- name: Save ccache directory for current branch
|
||||
if: always() && inputs.ccache_enabled == 'true' && steps.safe-branch.outputs.name != inputs.main_branch
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ~/.ccache-current
|
||||
key: ${{ steps.ccache-restore-current-branch.outputs.cache-primary-key }}
|
||||
|
||||
291
.github/actions/xahau-ga-cache-restore/action.yml
vendored
291
.github/actions/xahau-ga-cache-restore/action.yml
vendored
@@ -1,291 +0,0 @@
|
||||
name: 'Xahau Cache Restore (S3)'
|
||||
bump: 1
|
||||
description: 'Drop-in replacement for actions/cache/restore using S3 storage'
|
||||
|
||||
inputs:
|
||||
path:
|
||||
description: 'A list of files, directories, and wildcard patterns to cache (currently only single path supported)'
|
||||
required: true
|
||||
key:
|
||||
description: 'An explicit key for restoring the cache'
|
||||
required: true
|
||||
restore-keys:
|
||||
description: 'An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key'
|
||||
required: false
|
||||
default: ''
|
||||
s3-bucket:
|
||||
description: 'S3 bucket name for cache storage'
|
||||
required: false
|
||||
default: 'xahaud-github-actions-cache-niq'
|
||||
s3-region:
|
||||
description: 'S3 region'
|
||||
required: false
|
||||
default: 'us-east-1'
|
||||
fail-on-cache-miss:
|
||||
description: 'Fail the workflow if cache entry is not found'
|
||||
required: false
|
||||
default: 'false'
|
||||
lookup-only:
|
||||
description: 'Check if a cache entry exists for the given input(s) without downloading it'
|
||||
required: false
|
||||
default: 'false'
|
||||
# Note: Composite actions can't access secrets.* directly - must be passed from workflow
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 access'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 access'
|
||||
required: true
|
||||
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||
value: ${{ steps.restore-cache.outputs.cache-hit }}
|
||||
cache-primary-key:
|
||||
description: 'The key that was used to restore the cache (may be from restore-keys)'
|
||||
value: ${{ steps.restore-cache.outputs.cache-primary-key }}
|
||||
cache-matched-key:
|
||||
description: 'The key that was used to restore the cache (exact or prefix match)'
|
||||
value: ${{ steps.restore-cache.outputs.cache-matched-key }}
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Restore cache from S3
|
||||
id: restore-cache
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ inputs.aws-access-key-id }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ inputs.aws-secret-access-key }}
|
||||
S3_BUCKET: ${{ inputs.s3-bucket }}
|
||||
S3_REGION: ${{ inputs.s3-region }}
|
||||
CACHE_KEY: ${{ inputs.key }}
|
||||
RESTORE_KEYS: ${{ inputs.restore-keys }}
|
||||
TARGET_PATH: ${{ inputs.path }}
|
||||
FAIL_ON_MISS: ${{ inputs.fail-on-cache-miss }}
|
||||
LOOKUP_ONLY: ${{ inputs.lookup-only }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "=========================================="
|
||||
echo "Xahau Cache Restore (S3)"
|
||||
echo "=========================================="
|
||||
echo "Target path: ${TARGET_PATH}"
|
||||
echo "Cache key: ${CACHE_KEY}"
|
||||
echo "S3 bucket: s3://${S3_BUCKET}"
|
||||
echo ""
|
||||
|
||||
# Normalize target path (expand tilde)
|
||||
if [[ "${TARGET_PATH}" == ~* ]]; then
|
||||
TARGET_PATH="${HOME}${TARGET_PATH:1}"
|
||||
fi
|
||||
|
||||
# Canonicalize path (Linux only - macOS realpath doesn't support -m)
|
||||
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
|
||||
TARGET_PATH=$(realpath -m "${TARGET_PATH}")
|
||||
fi
|
||||
|
||||
echo "Normalized target path: ${TARGET_PATH}"
|
||||
echo ""
|
||||
|
||||
# Debug: Show commit message
|
||||
echo "=========================================="
|
||||
echo "DEBUG: Cache clear tag detection"
|
||||
echo "=========================================="
|
||||
echo "Raw commit message:"
|
||||
echo "${XAHAU_GA_COMMIT_MSG}"
|
||||
echo ""
|
||||
echo "Searching for: [ci-ga-clear-cache] or [ci-ga-clear-cache:*]"
|
||||
echo ""
|
||||
|
||||
# Check for [ci-ga-clear-cache] tag in commit message (with optional search terms)
|
||||
# Examples:
|
||||
# [ci-ga-clear-cache] - Clear this job's cache
|
||||
# [ci-ga-clear-cache:ccache] - Clear only if key contains "ccache"
|
||||
# [ci-ga-clear-cache:gcc Debug] - Clear only if key contains both "gcc" AND "Debug"
|
||||
|
||||
# Extract search terms if present (e.g., "ccache" from "[ci-ga-clear-cache:ccache]")
|
||||
SEARCH_TERMS=$(echo "${XAHAU_GA_COMMIT_MSG}" | grep -o '\[ci-ga-clear-cache:[^]]*\]' | sed 's/\[ci-ga-clear-cache://;s/\]//' || echo "")
|
||||
|
||||
SHOULD_CLEAR=false
|
||||
|
||||
if [ -n "${SEARCH_TERMS}" ]; then
|
||||
# Search terms provided - check if THIS cache key matches ALL terms (AND logic)
|
||||
echo "🔍 [ci-ga-clear-cache:${SEARCH_TERMS}] detected"
|
||||
echo "Checking if cache key matches search terms..."
|
||||
echo " Cache key: ${CACHE_KEY}"
|
||||
echo " Search terms: ${SEARCH_TERMS}"
|
||||
echo ""
|
||||
|
||||
MATCHES=true
|
||||
for term in ${SEARCH_TERMS}; do
|
||||
if ! echo "${CACHE_KEY}" | grep -q "${term}"; then
|
||||
MATCHES=false
|
||||
echo " ✗ Key does not contain '${term}'"
|
||||
break
|
||||
else
|
||||
echo " ✓ Key contains '${term}'"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "${MATCHES}" = "true" ]; then
|
||||
echo ""
|
||||
echo "✅ Cache key matches all search terms - will clear cache"
|
||||
SHOULD_CLEAR=true
|
||||
else
|
||||
echo ""
|
||||
echo "⏭️ Cache key doesn't match search terms - skipping cache clear"
|
||||
fi
|
||||
elif echo "${XAHAU_GA_COMMIT_MSG}" | grep -q '\[ci-ga-clear-cache\]'; then
|
||||
# No search terms - always clear this job's cache
|
||||
echo "🗑️ [ci-ga-clear-cache] detected in commit message"
|
||||
echo "Clearing cache for key: ${CACHE_KEY}"
|
||||
SHOULD_CLEAR=true
|
||||
fi
|
||||
|
||||
if [ "${SHOULD_CLEAR}" = "true" ]; then
|
||||
echo ""
|
||||
|
||||
# Delete base layer
|
||||
S3_BASE_KEY="s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst"
|
||||
if aws s3 ls "${S3_BASE_KEY}" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
echo "Deleting base layer: ${S3_BASE_KEY}"
|
||||
aws s3 rm "${S3_BASE_KEY}" --region "${S3_REGION}" 2>/dev/null || true
|
||||
echo "✓ Base layer deleted"
|
||||
else
|
||||
echo "ℹ️ No base layer found to delete"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "✅ Cache cleared successfully"
|
||||
echo "Build will proceed from scratch (bootstrap mode)"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Function to try restoring a cache key
|
||||
try_restore_key() {
|
||||
local key=$1
|
||||
local s3_key="s3://${S3_BUCKET}/${key}-base.tar.zst"
|
||||
|
||||
echo "Checking for key: ${key}"
|
||||
|
||||
if aws s3 ls "${s3_key}" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
echo "✓ Found cache: ${s3_key}"
|
||||
return 0
|
||||
else
|
||||
echo "✗ Not found: ${key}"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Try exact match first
|
||||
MATCHED_KEY=""
|
||||
EXACT_MATCH="false"
|
||||
|
||||
if try_restore_key "${CACHE_KEY}"; then
|
||||
MATCHED_KEY="${CACHE_KEY}"
|
||||
EXACT_MATCH="true"
|
||||
echo ""
|
||||
echo "🎯 Exact cache hit for key: ${CACHE_KEY}"
|
||||
else
|
||||
# Try restore-keys (prefix matching)
|
||||
if [ -n "${RESTORE_KEYS}" ]; then
|
||||
echo ""
|
||||
echo "Primary key not found, trying restore-keys..."
|
||||
|
||||
while IFS= read -r restore_key; do
|
||||
[ -z "${restore_key}" ] && continue
|
||||
restore_key=$(echo "${restore_key}" | xargs)
|
||||
|
||||
if try_restore_key "${restore_key}"; then
|
||||
MATCHED_KEY="${restore_key}"
|
||||
EXACT_MATCH="false"
|
||||
echo ""
|
||||
echo "✓ Cache restored from fallback key: ${restore_key}"
|
||||
break
|
||||
fi
|
||||
done <<< "${RESTORE_KEYS}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check if we found anything
|
||||
if [ -z "${MATCHED_KEY}" ]; then
|
||||
echo ""
|
||||
echo "❌ No cache found for key: ${CACHE_KEY}"
|
||||
|
||||
if [ "${FAIL_ON_MISS}" = "true" ]; then
|
||||
echo "fail-on-cache-miss is enabled, failing workflow"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Set outputs for cache miss
|
||||
echo "cache-hit=false" >> $GITHUB_OUTPUT
|
||||
echo "cache-primary-key=" >> $GITHUB_OUTPUT
|
||||
echo "cache-matched-key=" >> $GITHUB_OUTPUT
|
||||
|
||||
# Create empty cache directory
|
||||
mkdir -p "${TARGET_PATH}"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache restore completed (bootstrap mode)"
|
||||
echo "Created empty cache directory: ${TARGET_PATH}"
|
||||
echo "=========================================="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# If lookup-only, we're done
|
||||
if [ "${LOOKUP_ONLY}" = "true" ]; then
|
||||
echo "cache-hit=${EXACT_MATCH}" >> $GITHUB_OUTPUT
|
||||
echo "cache-primary-key=${CACHE_KEY}" >> $GITHUB_OUTPUT
|
||||
echo "cache-matched-key=${MATCHED_KEY}" >> $GITHUB_OUTPUT
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache lookup completed (lookup-only mode)"
|
||||
echo "Cache exists: ${MATCHED_KEY}"
|
||||
echo "=========================================="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Download and extract cache
|
||||
S3_KEY="s3://${S3_BUCKET}/${MATCHED_KEY}-base.tar.zst"
|
||||
TEMP_TARBALL="/tmp/xahau-cache-restore-$$.tar.zst"
|
||||
|
||||
echo ""
|
||||
echo "Downloading cache..."
|
||||
aws s3 cp "${S3_KEY}" "${TEMP_TARBALL}" --region "${S3_REGION}" --no-progress
|
||||
|
||||
TARBALL_SIZE=$(du -h "${TEMP_TARBALL}" | cut -f1)
|
||||
echo "✓ Downloaded: ${TARBALL_SIZE}"
|
||||
|
||||
# Create parent directory if needed
|
||||
mkdir -p "$(dirname "${TARGET_PATH}")"
|
||||
|
||||
# Remove existing target if it exists
|
||||
if [ -e "${TARGET_PATH}" ]; then
|
||||
echo "Removing existing target: ${TARGET_PATH}"
|
||||
rm -rf "${TARGET_PATH}"
|
||||
fi
|
||||
|
||||
# Create target directory and extract
|
||||
mkdir -p "${TARGET_PATH}"
|
||||
echo ""
|
||||
echo "Extracting cache..."
|
||||
zstd -d -c "${TEMP_TARBALL}" | tar -xf - -C "${TARGET_PATH}"
|
||||
echo "✓ Cache extracted to: ${TARGET_PATH}"
|
||||
|
||||
# Cleanup
|
||||
rm -f "${TEMP_TARBALL}"
|
||||
|
||||
# Set outputs
|
||||
echo "cache-hit=${EXACT_MATCH}" >> $GITHUB_OUTPUT
|
||||
echo "cache-primary-key=${CACHE_KEY}" >> $GITHUB_OUTPUT
|
||||
echo "cache-matched-key=${MATCHED_KEY}" >> $GITHUB_OUTPUT
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache restore completed successfully"
|
||||
echo "Cache hit: ${EXACT_MATCH}"
|
||||
echo "Matched key: ${MATCHED_KEY}"
|
||||
echo "=========================================="
|
||||
110
.github/actions/xahau-ga-cache-save/action.yml
vendored
110
.github/actions/xahau-ga-cache-save/action.yml
vendored
@@ -1,110 +0,0 @@
|
||||
name: 'Xahau Cache Save (S3)'
|
||||
description: 'Drop-in replacement for actions/cache/save using S3 storage'
|
||||
|
||||
inputs:
|
||||
path:
|
||||
description: 'A list of files, directories, and wildcard patterns to cache (currently only single path supported)'
|
||||
required: true
|
||||
key:
|
||||
description: 'An explicit key for saving the cache'
|
||||
required: true
|
||||
s3-bucket:
|
||||
description: 'S3 bucket name for cache storage'
|
||||
required: false
|
||||
default: 'xahaud-github-actions-cache-niq'
|
||||
s3-region:
|
||||
description: 'S3 region'
|
||||
required: false
|
||||
default: 'us-east-1'
|
||||
# Note: Composite actions can't access secrets.* directly - must be passed from workflow
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 access'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 access'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Save cache to S3
|
||||
shell: bash
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ inputs.aws-access-key-id }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ inputs.aws-secret-access-key }}
|
||||
S3_BUCKET: ${{ inputs.s3-bucket }}
|
||||
S3_REGION: ${{ inputs.s3-region }}
|
||||
CACHE_KEY: ${{ inputs.key }}
|
||||
TARGET_PATH: ${{ inputs.path }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
echo "=========================================="
|
||||
echo "Xahau Cache Save (S3)"
|
||||
echo "=========================================="
|
||||
echo "Target path: ${TARGET_PATH}"
|
||||
echo "Cache key: ${CACHE_KEY}"
|
||||
echo "S3 bucket: s3://${S3_BUCKET}"
|
||||
echo ""
|
||||
|
||||
# Normalize target path (expand tilde and resolve to absolute path)
|
||||
if [[ "${TARGET_PATH}" == ~* ]]; then
|
||||
TARGET_PATH="${HOME}${TARGET_PATH:1}"
|
||||
fi
|
||||
echo "Normalized target path: ${TARGET_PATH}"
|
||||
echo ""
|
||||
|
||||
# Check if target directory exists
|
||||
if [ ! -d "${TARGET_PATH}" ]; then
|
||||
echo "⚠️ Target directory does not exist: ${TARGET_PATH}"
|
||||
echo "Skipping cache save."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Use static base name (one base per key, immutable)
|
||||
S3_BASE_KEY="s3://${S3_BUCKET}/${CACHE_KEY}-base.tar.zst"
|
||||
|
||||
# Check if base already exists (immutability - first write wins)
|
||||
if aws s3 ls "${S3_BASE_KEY}" --region "${S3_REGION}" >/dev/null 2>&1; then
|
||||
echo "⚠️ Cache already exists: ${S3_BASE_KEY}"
|
||||
echo "Skipping upload (immutability - first write wins, like GitHub Actions)"
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache save completed (already exists)"
|
||||
echo "=========================================="
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create tarball
|
||||
BASE_TARBALL="/tmp/xahau-cache-base-$$.tar.zst"
|
||||
|
||||
echo "Creating cache tarball..."
|
||||
tar -cf - -C "${TARGET_PATH}" . | zstd -3 -T0 -q -o "${BASE_TARBALL}"
|
||||
|
||||
BASE_SIZE=$(du -h "${BASE_TARBALL}" | cut -f1)
|
||||
echo "✓ Cache tarball created: ${BASE_SIZE}"
|
||||
echo ""
|
||||
|
||||
# Upload to S3
|
||||
echo "Uploading cache to S3..."
|
||||
echo " Key: ${CACHE_KEY}-base.tar.zst"
|
||||
|
||||
aws s3api put-object \
|
||||
--bucket "${S3_BUCKET}" \
|
||||
--key "${CACHE_KEY}-base.tar.zst" \
|
||||
--body "${BASE_TARBALL}" \
|
||||
--tagging 'type=base' \
|
||||
--region "${S3_REGION}" \
|
||||
>/dev/null 2>&1
|
||||
|
||||
echo "✓ Uploaded: ${S3_BASE_KEY}"
|
||||
|
||||
# Cleanup
|
||||
rm -f "${BASE_TARBALL}"
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Cache save completed successfully"
|
||||
echo "Cache size: ${BASE_SIZE}"
|
||||
echo "Cache key: ${CACHE_KEY}"
|
||||
echo "=========================================="
|
||||
127
.github/actions/xahau-ga-dependencies/action.yml
vendored
127
.github/actions/xahau-ga-dependencies/action.yml
vendored
@@ -25,28 +25,6 @@ inputs:
|
||||
description: 'Main branch name for restore keys'
|
||||
required: false
|
||||
default: 'dev'
|
||||
os:
|
||||
description: 'Operating system (Linux, Macos)'
|
||||
required: false
|
||||
default: 'Linux'
|
||||
arch:
|
||||
description: 'Architecture (x86_64, armv8)'
|
||||
required: false
|
||||
default: 'x86_64'
|
||||
compiler:
|
||||
description: 'Compiler type (gcc, clang, apple-clang)'
|
||||
required: true
|
||||
compiler_version:
|
||||
description: 'Compiler version (11, 13, 14, etc.)'
|
||||
required: true
|
||||
cc:
|
||||
description: 'C compiler executable (gcc-13, clang-14, etc.), empty for macOS'
|
||||
required: false
|
||||
default: ''
|
||||
cxx:
|
||||
description: 'C++ compiler executable (g++-14, clang++-14, etc.), empty for macOS'
|
||||
required: false
|
||||
default: ''
|
||||
stdlib:
|
||||
description: 'C++ standard library for Conan configuration (note: also in compiler-id)'
|
||||
required: true
|
||||
@@ -54,12 +32,6 @@ inputs:
|
||||
options:
|
||||
- libstdcxx
|
||||
- libcxx
|
||||
aws-access-key-id:
|
||||
description: 'AWS Access Key ID for S3 cache storage'
|
||||
required: true
|
||||
aws-secret-access-key:
|
||||
description: 'AWS Secret Access Key for S3 cache storage'
|
||||
required: true
|
||||
|
||||
outputs:
|
||||
cache-hit:
|
||||
@@ -69,72 +41,47 @@ outputs:
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Generate safe branch name
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: safe-branch
|
||||
shell: bash
|
||||
run: |
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Check conanfile changes
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: check-conanfile-changes
|
||||
shell: bash
|
||||
run: |
|
||||
# Check if we're on the main branch
|
||||
if [ "${{ github.ref_name }}" == "${{ inputs.main_branch }}" ]; then
|
||||
echo "should-save-conan-cache=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# Fetch main branch for comparison
|
||||
git fetch origin ${{ inputs.main_branch }}
|
||||
|
||||
# Check if conanfile.txt or conanfile.py has changed compared to main branch
|
||||
if git diff --quiet origin/${{ inputs.main_branch }}..HEAD -- '**/conanfile.txt' '**/conanfile.py'; then
|
||||
echo "should-save-conan-cache=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "should-save-conan-cache=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Restore Conan cache
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: cache-restore-conan
|
||||
uses: ./.github/actions/xahau-ga-cache-restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ~/.conan2
|
||||
path: |
|
||||
~/.conan
|
||||
~/.conan2
|
||||
# Note: compiler-id format is compiler-version-stdlib[-gccversion]
|
||||
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-${{ inputs.configuration }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
|
||||
- name: Configure Conan
|
||||
shell: bash
|
||||
run: |
|
||||
# Create the default profile directory if it doesn't exist
|
||||
mkdir -p ~/.conan2/profiles
|
||||
|
||||
# Determine the correct libcxx based on stdlib parameter
|
||||
if [ "${{ inputs.stdlib }}" = "libcxx" ]; then
|
||||
LIBCXX="libc++"
|
||||
else
|
||||
LIBCXX="libstdc++11"
|
||||
fi
|
||||
|
||||
# Create profile with our specific settings
|
||||
# This overwrites any cached profile to ensure fresh configuration
|
||||
cat > ~/.conan2/profiles/default <<EOF
|
||||
[settings]
|
||||
arch=${{ inputs.arch }}
|
||||
build_type=${{ inputs.configuration }}
|
||||
compiler=${{ inputs.compiler }}
|
||||
compiler.cppstd=20
|
||||
compiler.libcxx=${LIBCXX}
|
||||
compiler.version=${{ inputs.compiler_version }}
|
||||
os=${{ inputs.os }}
|
||||
EOF
|
||||
|
||||
# Add buildenv and conf sections for Linux (not needed for macOS)
|
||||
if [ "${{ inputs.os }}" = "Linux" ] && [ -n "${{ inputs.cc }}" ]; then
|
||||
cat >> ~/.conan2/profiles/default <<EOF
|
||||
|
||||
[buildenv]
|
||||
CC=/usr/bin/${{ inputs.cc }}
|
||||
CXX=/usr/bin/${{ inputs.cxx }}
|
||||
|
||||
[conf]
|
||||
tools.build:compiler_executables={"c": "/usr/bin/${{ inputs.cc }}", "cpp": "/usr/bin/${{ inputs.cxx }}"}
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Add macOS-specific conf if needed
|
||||
if [ "${{ inputs.os }}" = "Macos" ]; then
|
||||
cat >> ~/.conan2/profiles/default <<EOF
|
||||
|
||||
[conf]
|
||||
# Workaround for gRPC with newer Apple Clang
|
||||
tools.build:cxxflags=["-Wno-missing-template-arg-list-after-template-kw"]
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Display profile for verification
|
||||
conan profile show
|
||||
|
||||
- name: Export custom recipes
|
||||
shell: bash
|
||||
@@ -160,10 +107,10 @@ runs:
|
||||
..
|
||||
|
||||
- name: Save Conan cache
|
||||
if: success() && inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true'
|
||||
uses: ./.github/actions/xahau-ga-cache-save
|
||||
if: always() && inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true' && steps.check-conanfile-changes.outputs.should-save-conan-cache == 'true'
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ~/.conan2
|
||||
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-${{ inputs.configuration }}
|
||||
aws-access-key-id: ${{ inputs.aws-access-key-id }}
|
||||
aws-secret-access-key: ${{ inputs.aws-secret-access-key }}
|
||||
path: |
|
||||
~/.conan
|
||||
~/.conan2
|
||||
key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }}
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
name: 'Get Commit Message'
|
||||
description: 'Gets commit message for both push and pull_request events and sets XAHAU_GA_COMMIT_MSG env var'
|
||||
|
||||
inputs:
|
||||
event-name:
|
||||
description: 'The event name (push or pull_request)'
|
||||
required: true
|
||||
head-commit-message:
|
||||
description: 'The head commit message (for push events)'
|
||||
required: false
|
||||
default: ''
|
||||
pr-head-sha:
|
||||
description: 'The PR head SHA (for pull_request events)'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Get commit message and set environment variable
|
||||
shell: python
|
||||
run: |
|
||||
import os
|
||||
import subprocess
|
||||
import secrets
|
||||
|
||||
event_name = "${{ inputs.event-name }}"
|
||||
pr_head_sha = "${{ inputs.pr-head-sha }}"
|
||||
|
||||
print("==========================================")
|
||||
print("Setting XAHAU_GA_COMMIT_MSG environment variable")
|
||||
print("==========================================")
|
||||
print(f"Event: {event_name}")
|
||||
|
||||
if event_name == 'push':
|
||||
# For push events, use the input directly
|
||||
message = """${{ inputs.head-commit-message }}"""
|
||||
print("Source: workflow input (github.event.head_commit.message)")
|
||||
elif event_name == 'pull_request':
|
||||
# For PR events, fetch the specific SHA
|
||||
print(f"Source: git show {pr_head_sha} (fetching PR head commit)")
|
||||
|
||||
# Fetch the PR head commit
|
||||
subprocess.run(
|
||||
['git', 'fetch', 'origin', pr_head_sha],
|
||||
check=True
|
||||
)
|
||||
|
||||
# Get commit message from the fetched SHA
|
||||
result = subprocess.run(
|
||||
['git', 'show', '-s', '--format=%B', pr_head_sha],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
message = result.stdout.strip()
|
||||
else:
|
||||
message = ""
|
||||
print(f"Warning: Unknown event type: {event_name}")
|
||||
|
||||
print(f"Commit message (first 100 chars): {message[:100]}")
|
||||
|
||||
# Write to GITHUB_ENV using heredoc with random delimiter (prevents injection attacks)
|
||||
# See: https://securitylab.github.com/resources/github-actions-untrusted-input/
|
||||
delimiter = f"EOF_{secrets.token_hex(16)}"
|
||||
|
||||
with open(os.environ['GITHUB_ENV'], 'a') as f:
|
||||
f.write(f'XAHAU_GA_COMMIT_MSG<<{delimiter}\n')
|
||||
f.write(message)
|
||||
f.write(f'\n{delimiter}\n')
|
||||
|
||||
print(f"✓ XAHAU_GA_COMMIT_MSG set (available to all subsequent steps)")
|
||||
print("==========================================")
|
||||
54
.github/workflows/xahau-ga-macos.yml
vendored
54
.github/workflows/xahau-ga-macos.yml
vendored
@@ -30,14 +30,6 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get commit message
|
||||
id: get-commit-message
|
||||
uses: ./.github/actions/xahau-ga-get-commit-message
|
||||
with:
|
||||
event-name: ${{ github.event_name }}
|
||||
head-commit-message: ${{ github.event.head_commit.message }}
|
||||
pr-head-sha: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Install Conan
|
||||
run: |
|
||||
brew install conan
|
||||
@@ -86,6 +78,14 @@ jobs:
|
||||
- name: Install ccache
|
||||
run: brew install ccache
|
||||
|
||||
- name: Configure ccache
|
||||
uses: ./.github/actions/xahau-configure-ccache
|
||||
with:
|
||||
max_size: 2G
|
||||
hash_dir: true
|
||||
compiler_check: content
|
||||
is_main_branch: ${{ github.ref_name == env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
echo "PATH:"
|
||||
@@ -98,12 +98,32 @@ jobs:
|
||||
echo "---- Full Environment ----"
|
||||
env
|
||||
|
||||
- name: Detect compiler version
|
||||
id: detect-compiler
|
||||
- name: Configure Conan
|
||||
run: |
|
||||
# Create the default profile directory if it doesn't exist
|
||||
mkdir -p ~/.conan2/profiles
|
||||
|
||||
# Detect compiler version
|
||||
COMPILER_VERSION=$(clang --version | grep -oE 'version [0-9]+' | grep -oE '[0-9]+')
|
||||
echo "compiler_version=${COMPILER_VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "Detected Apple Clang version: ${COMPILER_VERSION}"
|
||||
|
||||
# Create profile with our specific settings
|
||||
cat > ~/.conan2/profiles/default <<EOF
|
||||
[settings]
|
||||
arch=armv8
|
||||
build_type=Release
|
||||
compiler=apple-clang
|
||||
compiler.cppstd=20
|
||||
compiler.libcxx=libc++
|
||||
compiler.version=${COMPILER_VERSION}
|
||||
os=Macos
|
||||
|
||||
[conf]
|
||||
# Workaround for gRPC with newer Apple Clang
|
||||
tools.build:cxxflags=["-Wno-missing-template-arg-list-after-template-kw"]
|
||||
EOF
|
||||
|
||||
# Display profile for verification
|
||||
conan profile show
|
||||
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/xahau-ga-dependencies
|
||||
@@ -113,13 +133,6 @@ jobs:
|
||||
compiler-id: clang
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
os: Macos
|
||||
arch: armv8
|
||||
compiler: apple-clang
|
||||
compiler_version: ${{ steps.detect-compiler.outputs.compiler_version }}
|
||||
stdlib: libcxx
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Build
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
@@ -130,9 +143,6 @@ jobs:
|
||||
compiler-id: clang
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
stdlib: libcxx
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
|
||||
62
.github/workflows/xahau-ga-nix.yml
vendored
62
.github/workflows/xahau-ga-nix.yml
vendored
@@ -2,7 +2,7 @@ name: Nix - GA Runner
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["dev", "candidate", "release", "nd-experiment-overlayfs-2025-10-29"]
|
||||
branches: ["dev", "candidate", "release"]
|
||||
pull_request:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
schedule:
|
||||
@@ -156,20 +156,12 @@ jobs:
|
||||
env:
|
||||
build_dir: .build
|
||||
# Bump this number to invalidate all caches globally.
|
||||
CACHE_VERSION: 3
|
||||
CACHE_VERSION: 2
|
||||
MAIN_BRANCH_NAME: dev
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get commit message
|
||||
id: get-commit-message
|
||||
uses: ./.github/actions/xahau-ga-get-commit-message
|
||||
with:
|
||||
event-name: ${{ github.event_name }}
|
||||
head-commit-message: ${{ github.event.head_commit.message }}
|
||||
pr-head-sha: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Install build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
@@ -239,6 +231,48 @@ jobs:
|
||||
# Install Conan 2
|
||||
pip install --upgrade "conan>=2.0,<3"
|
||||
|
||||
- name: Configure ccache
|
||||
uses: ./.github/actions/xahau-configure-ccache
|
||||
with:
|
||||
max_size: 2G
|
||||
hash_dir: true
|
||||
compiler_check: content
|
||||
is_main_branch: ${{ github.ref_name == env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Configure Conan
|
||||
run: |
|
||||
# Create the default profile directory if it doesn't exist
|
||||
mkdir -p ~/.conan2/profiles
|
||||
|
||||
# Determine the correct libcxx based on stdlib parameter
|
||||
if [ "${{ matrix.stdlib }}" = "libcxx" ]; then
|
||||
LIBCXX="libc++"
|
||||
else
|
||||
LIBCXX="libstdc++11"
|
||||
fi
|
||||
|
||||
# Create profile with our specific settings
|
||||
cat > ~/.conan2/profiles/default <<EOF
|
||||
[settings]
|
||||
arch=x86_64
|
||||
build_type=${{ matrix.configuration }}
|
||||
compiler=${{ matrix.compiler }}
|
||||
compiler.cppstd=20
|
||||
compiler.libcxx=${LIBCXX}
|
||||
compiler.version=${{ matrix.compiler_version }}
|
||||
os=Linux
|
||||
|
||||
[buildenv]
|
||||
CC=/usr/bin/${{ matrix.cc }}
|
||||
CXX=/usr/bin/${{ matrix.cxx }}
|
||||
|
||||
[conf]
|
||||
tools.build:compiler_executables={"c": "/usr/bin/${{ matrix.cc }}", "cpp": "/usr/bin/${{ matrix.cxx }}"}
|
||||
EOF
|
||||
|
||||
# Display profile for verification
|
||||
conan profile show
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
echo "PATH:"
|
||||
@@ -259,13 +293,7 @@ jobs:
|
||||
compiler-id: ${{ matrix.compiler_id }}
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
compiler: ${{ matrix.compiler }}
|
||||
compiler_version: ${{ matrix.compiler_version }}
|
||||
cc: ${{ matrix.cc }}
|
||||
cxx: ${{ matrix.cxx }}
|
||||
stdlib: ${{ matrix.stdlib }}
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Build
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
@@ -280,8 +308,6 @@ jobs:
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
stdlib: ${{ matrix.stdlib }}
|
||||
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
|
||||
aws-access-key-id: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.XAHAUD_GITHUB_ACTIONS_CACHE_NIQ_AWS_ACCESS_KEY }}
|
||||
|
||||
- name: Set artifact name
|
||||
id: set-artifact-name
|
||||
|
||||
@@ -458,7 +458,6 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/app/tx/impl/CreateOffer.cpp
|
||||
src/ripple/app/tx/impl/CreateTicket.cpp
|
||||
src/ripple/app/tx/impl/Cron.cpp
|
||||
src/ripple/app/tx/impl/CronSet.cpp
|
||||
src/ripple/app/tx/impl/DeleteAccount.cpp
|
||||
src/ripple/app/tx/impl/DepositPreauth.cpp
|
||||
src/ripple/app/tx/impl/Escrow.cpp
|
||||
@@ -476,6 +475,7 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/app/tx/impl/Payment.cpp
|
||||
src/ripple/app/tx/impl/Remit.cpp
|
||||
src/ripple/app/tx/impl/SetAccount.cpp
|
||||
src/ripple/app/tx/impl/SetCron.cpp
|
||||
src/ripple/app/tx/impl/SetHook.cpp
|
||||
src/ripple/app/tx/impl/SetRemarks.cpp
|
||||
src/ripple/app/tx/impl/SetRegularKey.cpp
|
||||
|
||||
@@ -37,7 +37,6 @@
|
||||
#define KEYLET_NFT_OFFER 23
|
||||
#define KEYLET_HOOK_DEFINITION 24
|
||||
#define KEYLET_HOOK_STATE_DIR 25
|
||||
#define KEYLET_CRON 26
|
||||
|
||||
#define COMPARE_EQUAL 1U
|
||||
#define COMPARE_LESS 2U
|
||||
|
||||
@@ -278,7 +278,8 @@ enum keylet_code : uint32_t {
|
||||
NFT_OFFER = 23,
|
||||
HOOK_DEFINITION = 24,
|
||||
HOOK_STATE_DIR = 25,
|
||||
CRON = 26
|
||||
LAST_KLTYPE_V0 = HOOK_DEFINITION,
|
||||
LAST_KLTYPE_V1 = HOOK_STATE_DIR,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -2903,6 +2903,17 @@ DEFINE_HOOK_FUNCTION(
|
||||
if (write_len < 34)
|
||||
return TOO_SMALL;
|
||||
|
||||
bool const v1 = applyCtx.view().rules().enabled(featureHooksUpdate1);
|
||||
|
||||
if (keylet_type == 0)
|
||||
return INVALID_ARGUMENT;
|
||||
|
||||
auto const last =
|
||||
v1 ? keylet_code::LAST_KLTYPE_V1 : keylet_code::LAST_KLTYPE_V0;
|
||||
|
||||
if (keylet_type > last)
|
||||
return INVALID_ARGUMENT;
|
||||
|
||||
try
|
||||
{
|
||||
switch (keylet_type)
|
||||
@@ -3004,8 +3015,7 @@ DEFINE_HOOK_FUNCTION(
|
||||
return serialize_keylet(kl, memory, write_ptr, write_len);
|
||||
}
|
||||
|
||||
// keylets that take 20 byte account id, and (4 byte uint for 32
|
||||
// byte hash)
|
||||
// keylets that take 20 byte account id, and 4 byte uint
|
||||
case keylet_code::OFFER:
|
||||
case keylet_code::CHECK:
|
||||
case keylet_code::ESCROW:
|
||||
@@ -3048,33 +3058,6 @@ DEFINE_HOOK_FUNCTION(
|
||||
return serialize_keylet(kl, memory, write_ptr, write_len);
|
||||
}
|
||||
|
||||
// keylets that take 20 byte account id, and 4 byte uint
|
||||
case keylet_code::CRON: {
|
||||
if (!applyCtx.view().rules().enabled(featureCron))
|
||||
return INVALID_ARGUMENT;
|
||||
|
||||
if (a == 0 || b == 0)
|
||||
return INVALID_ARGUMENT;
|
||||
if (e != 0 || f != 0 || d != 0)
|
||||
return INVALID_ARGUMENT;
|
||||
|
||||
uint32_t read_ptr = a, read_len = b;
|
||||
|
||||
if (NOT_IN_BOUNDS(read_ptr, read_len, memory_length))
|
||||
return OUT_OF_BOUNDS;
|
||||
|
||||
if (read_len != 20)
|
||||
return INVALID_ARGUMENT;
|
||||
|
||||
ripple::AccountID id = AccountID::fromVoid(memory + read_ptr);
|
||||
|
||||
uint32_t seq = c;
|
||||
|
||||
ripple::Keylet kl = ripple::keylet::cron(seq, id);
|
||||
|
||||
return serialize_keylet(kl, memory, write_ptr, write_len);
|
||||
}
|
||||
|
||||
// keylets that take a 32 byte uint and an 8byte uint64
|
||||
case keylet_code::PAGE: {
|
||||
if (a == 0 || b == 0)
|
||||
@@ -3122,9 +3105,6 @@ DEFINE_HOOK_FUNCTION(
|
||||
}
|
||||
|
||||
case keylet_code::HOOK_STATE_DIR: {
|
||||
if (!applyCtx.view().rules().enabled(featureHooksUpdate1))
|
||||
return INVALID_ARGUMENT;
|
||||
|
||||
if (a == 0 || b == 0 || c == 0 || d == 0)
|
||||
return INVALID_ARGUMENT;
|
||||
|
||||
@@ -3299,7 +3279,7 @@ DEFINE_HOOK_FUNCTION(
|
||||
return INTERNAL_ERROR;
|
||||
}
|
||||
|
||||
return INVALID_ARGUMENT;
|
||||
return NO_SUCH_KEYLET;
|
||||
|
||||
HOOK_TEARDOWN();
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#include <ripple/app/tx/impl/CronSet.h>
|
||||
#include <ripple/app/tx/impl/SetCron.h>
|
||||
#include <ripple/basics/Log.h>
|
||||
#include <ripple/ledger/View.h>
|
||||
#include <ripple/protocol/Feature.h>
|
||||
@@ -28,13 +28,13 @@
|
||||
namespace ripple {
|
||||
|
||||
TxConsequences
|
||||
CronSet::makeTxConsequences(PreflightContext const& ctx)
|
||||
SetCron::makeTxConsequences(PreflightContext const& ctx)
|
||||
{
|
||||
return TxConsequences{ctx.tx, TxConsequences::normal};
|
||||
}
|
||||
|
||||
NotTEC
|
||||
CronSet::preflight(PreflightContext const& ctx)
|
||||
SetCron::preflight(PreflightContext const& ctx)
|
||||
{
|
||||
if (!ctx.rules.enabled(featureCron))
|
||||
return temDISABLED;
|
||||
@@ -47,7 +47,7 @@ CronSet::preflight(PreflightContext const& ctx)
|
||||
|
||||
if (tx.getFlags() & tfCronSetMask)
|
||||
{
|
||||
JLOG(j.warn()) << "CronSet: Invalid flags set.";
|
||||
JLOG(j.warn()) << "SetCron: Invalid flags set.";
|
||||
return temINVALID_FLAG;
|
||||
}
|
||||
|
||||
@@ -69,7 +69,7 @@ CronSet::preflight(PreflightContext const& ctx)
|
||||
// delete operation
|
||||
if (hasDelay || hasRepeat || hasStartTime)
|
||||
{
|
||||
JLOG(j.debug()) << "CronSet: tfCronUnset flag cannot be used with "
|
||||
JLOG(j.debug()) << "SetCron: tfCronUnset flag cannot be used with "
|
||||
"DelaySeconds, RepeatCount or StartTime.";
|
||||
return temMALFORMED;
|
||||
}
|
||||
@@ -81,7 +81,7 @@ CronSet::preflight(PreflightContext const& ctx)
|
||||
if (!hasStartTime)
|
||||
{
|
||||
JLOG(j.debug())
|
||||
<< "CronSet: StartTime is required. Use StartTime=0 for "
|
||||
<< "SetCron: StartTime is required. Use StartTime=0 for "
|
||||
"immediate execution, or specify a future timestamp.";
|
||||
return temMALFORMED;
|
||||
}
|
||||
@@ -89,7 +89,7 @@ CronSet::preflight(PreflightContext const& ctx)
|
||||
if ((!hasDelay && hasRepeat) || (hasDelay && !hasRepeat))
|
||||
{
|
||||
JLOG(j.debug())
|
||||
<< "CronSet: DelaySeconds and RepeatCount must both be present "
|
||||
<< "SetCron: DelaySeconds and RepeatCount must both be present "
|
||||
"for recurring crons, or both absent for one-off crons.";
|
||||
return temMALFORMED;
|
||||
}
|
||||
@@ -101,7 +101,7 @@ CronSet::preflight(PreflightContext const& ctx)
|
||||
if (delay > 31536000UL /* 365 days in seconds */)
|
||||
{
|
||||
JLOG(j.debug())
|
||||
<< "CronSet: DelaySeconds was too high. (max 365 "
|
||||
<< "SetCron: DelaySeconds was too high. (max 365 "
|
||||
"days in seconds).";
|
||||
return temMALFORMED;
|
||||
}
|
||||
@@ -114,7 +114,7 @@ CronSet::preflight(PreflightContext const& ctx)
|
||||
if (recur == 0)
|
||||
{
|
||||
JLOG(j.debug())
|
||||
<< "CronSet: RepeatCount must be greater than 0."
|
||||
<< "SetCron: RepeatCount must be greater than 0."
|
||||
"For one-time execution, omit DelaySeconds and "
|
||||
"RepeatCount.";
|
||||
return temMALFORMED;
|
||||
@@ -122,8 +122,8 @@ CronSet::preflight(PreflightContext const& ctx)
|
||||
if (recur > 256)
|
||||
{
|
||||
JLOG(j.debug())
|
||||
<< "CronSet: RepeatCount too high. Limit is 256. Issue "
|
||||
"new CronSet to increase.";
|
||||
<< "SetCron: RepeatCount too high. Limit is 256. Issue "
|
||||
"new SetCron to increase.";
|
||||
return temMALFORMED;
|
||||
}
|
||||
}
|
||||
@@ -133,7 +133,7 @@ CronSet::preflight(PreflightContext const& ctx)
|
||||
}
|
||||
|
||||
TER
|
||||
CronSet::preclaim(PreclaimContext const& ctx)
|
||||
SetCron::preclaim(PreclaimContext const& ctx)
|
||||
{
|
||||
if (ctx.tx.isFieldPresent(sfStartTime) &&
|
||||
ctx.tx.getFieldU32(sfStartTime) != 0)
|
||||
@@ -146,7 +146,7 @@ CronSet::preclaim(PreclaimContext const& ctx)
|
||||
|
||||
if (startTime < parentCloseTime)
|
||||
{
|
||||
JLOG(ctx.j.debug()) << "CronSet: StartTime must be in the future "
|
||||
JLOG(ctx.j.debug()) << "SetCron: StartTime must be in the future "
|
||||
"(or 0 for immediate execution)";
|
||||
return tecEXPIRED;
|
||||
}
|
||||
@@ -154,7 +154,7 @@ CronSet::preclaim(PreclaimContext const& ctx)
|
||||
if (startTime > ctx.view.parentCloseTime().time_since_epoch().count() +
|
||||
365 * 24 * 60 * 60)
|
||||
{
|
||||
JLOG(ctx.j.debug()) << "CronSet: StartTime is too far in the "
|
||||
JLOG(ctx.j.debug()) << "SetCron: StartTime is too far in the "
|
||||
"future (max 365 days).";
|
||||
return tecEXPIRED;
|
||||
}
|
||||
@@ -163,7 +163,7 @@ CronSet::preclaim(PreclaimContext const& ctx)
|
||||
}
|
||||
|
||||
TER
|
||||
CronSet::doApply()
|
||||
SetCron::doApply()
|
||||
{
|
||||
auto& view = ctx_.view();
|
||||
auto const& tx = ctx_.tx;
|
||||
@@ -205,21 +205,21 @@ CronSet::doApply()
|
||||
auto sleCron = view.peek(klOld);
|
||||
if (!sleCron)
|
||||
{
|
||||
JLOG(j_.warn()) << "CronSet: Cron object didn't exist.";
|
||||
JLOG(j_.warn()) << "SetCron: Cron object didn't exist.";
|
||||
return tefBAD_LEDGER;
|
||||
}
|
||||
|
||||
if (safe_cast<LedgerEntryType>(
|
||||
sleCron->getFieldU16(sfLedgerEntryType)) != ltCRON)
|
||||
{
|
||||
JLOG(j_.warn()) << "CronSet: sfCron pointed to non-cron object!!";
|
||||
JLOG(j_.warn()) << "SetCron: sfCron pointed to non-cron object!!";
|
||||
return tefBAD_LEDGER;
|
||||
}
|
||||
|
||||
if (!view.dirRemove(
|
||||
keylet::ownerDir(id), (*sleCron)[sfOwnerNode], klOld, false))
|
||||
{
|
||||
JLOG(j_.warn()) << "CronSet: Ownerdir bad. " << id;
|
||||
JLOG(j_.warn()) << "SetCron: Ownerdir bad. " << id;
|
||||
return tefBAD_LEDGER;
|
||||
}
|
||||
|
||||
@@ -278,7 +278,7 @@ CronSet::doApply()
|
||||
}
|
||||
|
||||
XRPAmount
|
||||
CronSet::calculateBaseFee(ReadView const& view, STTx const& tx)
|
||||
SetCron::calculateBaseFee(ReadView const& view, STTx const& tx)
|
||||
{
|
||||
auto const baseFee = Transactor::calculateBaseFee(view, tx);
|
||||
|
||||
@@ -290,7 +290,7 @@ CronSet::calculateBaseFee(ReadView const& view, STTx const& tx)
|
||||
tx.isFieldPresent(sfRepeatCount) ? tx.getFieldU32(sfRepeatCount) : 0;
|
||||
|
||||
// factor a cost based on the total number of txns expected
|
||||
// for RepeatCount of 0 we have this txn (CronSet) and the
|
||||
// for RepeatCount of 0 we have this txn (SetCron) and the
|
||||
// single Cron txn (2). For a RepeatCount of 1 we have this txn,
|
||||
// the first time the cron executes, and the second time (3).
|
||||
uint32_t const additionalExpectedExecutions = 1 + repeatCount;
|
||||
@@ -17,8 +17,8 @@
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifndef RIPPLE_TX_CRONSET_H_INCLUDED
|
||||
#define RIPPLE_TX_CRONSET_H_INCLUDED
|
||||
#ifndef RIPPLE_TX_SETCRON_H_INCLUDED
|
||||
#define RIPPLE_TX_SETCRON_H_INCLUDED
|
||||
|
||||
#include <ripple/app/tx/impl/Transactor.h>
|
||||
#include <ripple/basics/Log.h>
|
||||
@@ -26,12 +26,12 @@
|
||||
|
||||
namespace ripple {
|
||||
|
||||
class CronSet : public Transactor
|
||||
class SetCron : public Transactor
|
||||
{
|
||||
public:
|
||||
static constexpr ConsequencesFactoryType ConsequencesFactory{Custom};
|
||||
|
||||
explicit CronSet(ApplyContext& ctx) : Transactor(ctx)
|
||||
explicit SetCron(ApplyContext& ctx) : Transactor(ctx)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -29,7 +29,6 @@
|
||||
#include <ripple/app/tx/impl/CreateOffer.h>
|
||||
#include <ripple/app/tx/impl/CreateTicket.h>
|
||||
#include <ripple/app/tx/impl/Cron.h>
|
||||
#include <ripple/app/tx/impl/CronSet.h>
|
||||
#include <ripple/app/tx/impl/DeleteAccount.h>
|
||||
#include <ripple/app/tx/impl/DepositPreauth.h>
|
||||
#include <ripple/app/tx/impl/Escrow.h>
|
||||
@@ -45,6 +44,7 @@
|
||||
#include <ripple/app/tx/impl/Payment.h>
|
||||
#include <ripple/app/tx/impl/Remit.h>
|
||||
#include <ripple/app/tx/impl/SetAccount.h>
|
||||
#include <ripple/app/tx/impl/SetCron.h>
|
||||
#include <ripple/app/tx/impl/SetHook.h>
|
||||
#include <ripple/app/tx/impl/SetRegularKey.h>
|
||||
#include <ripple/app/tx/impl/SetRemarks.h>
|
||||
@@ -184,7 +184,7 @@ invoke_preflight(PreflightContext const& ctx)
|
||||
case ttURITOKEN_CANCEL_SELL_OFFER:
|
||||
return invoke_preflight_helper<URIToken>(ctx);
|
||||
case ttCRON_SET:
|
||||
return invoke_preflight_helper<CronSet>(ctx);
|
||||
return invoke_preflight_helper<SetCron>(ctx);
|
||||
case ttCRON:
|
||||
return invoke_preflight_helper<Cron>(ctx);
|
||||
default:
|
||||
@@ -313,7 +313,7 @@ invoke_preclaim(PreclaimContext const& ctx)
|
||||
case ttURITOKEN_CANCEL_SELL_OFFER:
|
||||
return invoke_preclaim<URIToken>(ctx);
|
||||
case ttCRON_SET:
|
||||
return invoke_preclaim<CronSet>(ctx);
|
||||
return invoke_preclaim<SetCron>(ctx);
|
||||
case ttCRON:
|
||||
return invoke_preclaim<Cron>(ctx);
|
||||
default:
|
||||
@@ -404,7 +404,7 @@ invoke_calculateBaseFee(ReadView const& view, STTx const& tx)
|
||||
case ttURITOKEN_CANCEL_SELL_OFFER:
|
||||
return URIToken::calculateBaseFee(view, tx);
|
||||
case ttCRON_SET:
|
||||
return CronSet::calculateBaseFee(view, tx);
|
||||
return SetCron::calculateBaseFee(view, tx);
|
||||
case ttCRON:
|
||||
return Cron::calculateBaseFee(view, tx);
|
||||
default:
|
||||
@@ -601,7 +601,7 @@ invoke_apply(ApplyContext& ctx)
|
||||
return p();
|
||||
}
|
||||
case ttCRON_SET: {
|
||||
CronSet p(ctx);
|
||||
SetCron p(ctx);
|
||||
return p();
|
||||
}
|
||||
case ttCRON: {
|
||||
|
||||
@@ -72,7 +72,7 @@ enum class LedgerNameSpace : std::uint16_t {
|
||||
URI_TOKEN = 'U',
|
||||
IMPORT_VLSEQ = 'I',
|
||||
UNL_REPORT = 'R',
|
||||
CRON = 'L',
|
||||
CRON = 'A',
|
||||
|
||||
// No longer used or supported. Left here to reserve the space
|
||||
// to avoid accidental reuse.
|
||||
|
||||
@@ -376,8 +376,6 @@ LedgerFormats::LedgerFormats()
|
||||
{sfDelaySeconds, soeREQUIRED},
|
||||
{sfRepeatCount, soeREQUIRED},
|
||||
{sfOwnerNode, soeREQUIRED},
|
||||
{sfPreviousTxnID, soeREQUIRED},
|
||||
{sfPreviousTxnLgrSeq, soeREQUIRED}
|
||||
},
|
||||
commonFields);
|
||||
|
||||
|
||||
@@ -256,7 +256,6 @@ JSS(coins);
|
||||
JSS(children);
|
||||
JSS(ctid); // in/out: Tx RPC
|
||||
JSS(cres);
|
||||
JSS(cron);
|
||||
JSS(currency_a); // out: BookChanges
|
||||
JSS(currency_b); // out: BookChanges
|
||||
JSS(currentShard); // out: NodeToShardStatus
|
||||
|
||||
@@ -506,36 +506,6 @@ doLedgerEntry(RPC::JsonContext& context)
|
||||
jvResult[jss::error] = "malformedRequest";
|
||||
}
|
||||
}
|
||||
else if (context.params.isMember(jss::cron))
|
||||
{
|
||||
expectedType = ltCRON;
|
||||
if (!context.params[jss::cron].isObject())
|
||||
{
|
||||
if (!uNodeIndex.parseHex(context.params[jss::cron].asString()))
|
||||
{
|
||||
uNodeIndex = beast::zero;
|
||||
jvResult[jss::error] = "malformedRequest";
|
||||
}
|
||||
}
|
||||
else if (
|
||||
!context.params[jss::cron].isMember(jss::owner) ||
|
||||
!context.params[jss::cron].isMember(jss::time))
|
||||
{
|
||||
jvResult[jss::error] = "malformedRequest";
|
||||
}
|
||||
else
|
||||
{
|
||||
auto const id = parseBase58<AccountID>(
|
||||
context.params[jss::cron][jss::owner].asString());
|
||||
if (!id)
|
||||
jvResult[jss::error] = "malformedAddress";
|
||||
else
|
||||
uNodeIndex =
|
||||
keylet::cron(
|
||||
context.params[jss::cron][jss::time].asUInt(), *id)
|
||||
.key;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (context.params.isMember("params") &&
|
||||
|
||||
@@ -11147,7 +11147,6 @@ public:
|
||||
#define KEYLET_PAYCHAN 21
|
||||
#define KEYLET_EMITTED_TXN 22
|
||||
#define KEYLET_NFT_OFFER 23
|
||||
#define KEYLET_CRON 26
|
||||
#define ASSERT(x)\
|
||||
if (!(x))\
|
||||
rollback((uint32_t)#x, sizeof(#x), __LINE__);
|
||||
@@ -11210,9 +11209,6 @@ public:
|
||||
// Test min size
|
||||
ASSERT(util_keylet((uint32_t)buf, 33, KEYLET_SKIP, 0,0,0,0,0,0) == TOO_SMALL);
|
||||
|
||||
// Invalid keylet type
|
||||
ASSERT(util_keylet((uint32_t)buf, 34, 0, 0,0,0,0,0,0) == INVALID_ARGUMENT);
|
||||
ASSERT(util_keylet((uint32_t)buf, 34, 0x99999999, 0,0,0,0,0,0) == INVALID_ARGUMENT);
|
||||
|
||||
// Test one of each type
|
||||
ASSERT(34 == (e=util_keylet(buf, 34, KEYLET_HOOK,
|
||||
@@ -11655,17 +11651,6 @@ public:
|
||||
0,0
|
||||
)));
|
||||
|
||||
ASSERT(34 == (e=util_keylet(buf, 34, KEYLET_CRON, SBUF(a), 1, 0, 0, 0)));
|
||||
{
|
||||
uint8_t ans[] =
|
||||
{
|
||||
0x00U,0x41U,0xF7U,0xB6U,0x45U,0x43U,0x61U,0x87U,0xCCU,0x61U,
|
||||
0x00U,0x00U,0x00U,0x01U,0x0AU,0x45U,0x80U,0x75U,0x7CU,0xDAU,
|
||||
0xD9U,0x16U,0x7EU,0xEEU,0xC1U,0x3CU,0x6CU,0x15U,0xD5U,0x17U,
|
||||
0xE2U,0x72U,0x9EU,0xC8
|
||||
};
|
||||
ASSERT_KL_EQ(ans);
|
||||
}
|
||||
accept(0,0,0);
|
||||
}
|
||||
)[test.hook]"];
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
599
src/test/app/build_test_hooks.py
Normal file
599
src/test/app/build_test_hooks.py
Normal file
@@ -0,0 +1,599 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generate SetHook_wasm.h from SetHook_test.cpp
|
||||
|
||||
Extracts WASM test code blocks from the test file, compiles them using wasmcc or wat2wasm,
|
||||
and generates a C++ header file with the compiled bytecode.
|
||||
|
||||
Features intelligent caching based on source content and binary versions.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
|
||||
class BinaryChecker:
|
||||
"""Check for required binaries and provide installation instructions."""
|
||||
|
||||
REQUIRED_BINARIES = {
|
||||
"wasmcc": "curl https://raw.githubusercontent.com/wasienv/wasienv/master/install.sh | sh",
|
||||
"hook-cleaner": "git clone https://github.com/RichardAH/hook-cleaner-c.git && cd hook-cleaner-c && make && mkdir -p ~/.local/bin && cp hook-cleaner ~/.local/bin/ (ensure ~/.local/bin is in PATH)",
|
||||
"wat2wasm": "brew install wabt",
|
||||
"clang-format": "curl https://mise.run | sh && mise use -g clang-format@10",
|
||||
}
|
||||
|
||||
# Note: Python implementation doesn't need GNU sed/grep, xxd, or bc
|
||||
# Regex and byte formatting are done natively in Python
|
||||
|
||||
def __init__(self, logger: logging.Logger):
|
||||
self.logger = logger
|
||||
|
||||
def check_binary(self, name: str) -> Optional[str]:
|
||||
"""Check if binary exists and return its path."""
|
||||
result = subprocess.run(["which", name], capture_output=True, text=True)
|
||||
if result.returncode == 0:
|
||||
path = result.stdout.strip()
|
||||
self.logger.info(f"✓ {name}: {path}")
|
||||
return path
|
||||
return None
|
||||
|
||||
def check_all(self) -> bool:
|
||||
"""Check all required binaries. Returns True if all found."""
|
||||
self.logger.info("Checking required tools...")
|
||||
all_found = True
|
||||
|
||||
for binary, install_msg in self.REQUIRED_BINARIES.items():
|
||||
path = self.check_binary(binary)
|
||||
if not path:
|
||||
self.logger.error(f"✗ {binary}: NOT FOUND")
|
||||
self.logger.error(f" Install: {install_msg}")
|
||||
all_found = False
|
||||
|
||||
if all_found:
|
||||
self.logger.info("All required tools found!")
|
||||
|
||||
return all_found
|
||||
|
||||
|
||||
class CompilationCache:
|
||||
"""Cache compiled WASM bytecode based on source and binary versions."""
|
||||
|
||||
def __init__(self, logger: logging.Logger):
|
||||
self.logger = logger
|
||||
self.cache_dir = Path.home() / ".cache" / "build_test_hooks"
|
||||
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.binary_versions = self._get_binary_versions()
|
||||
self.logger.debug(f"Cache directory: {self.cache_dir}")
|
||||
|
||||
def _get_binary_version(self, binary: str) -> str:
|
||||
"""Get version hash of a binary."""
|
||||
try:
|
||||
which_result = subprocess.run(
|
||||
["which", binary], capture_output=True, text=True, check=True
|
||||
)
|
||||
binary_path = which_result.stdout.strip()
|
||||
|
||||
# Hash the binary file itself
|
||||
hasher = hashlib.sha256()
|
||||
with open(binary_path, "rb") as f:
|
||||
hasher.update(f.read())
|
||||
return hasher.hexdigest()[:16]
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Could not hash {binary}: {e}")
|
||||
return "unknown"
|
||||
|
||||
def _get_binary_versions(self) -> Dict[str, str]:
|
||||
"""Get version hashes of all compilation binaries."""
|
||||
binaries = ["wasmcc", "hook-cleaner", "wat2wasm"]
|
||||
versions = {}
|
||||
|
||||
for binary in binaries:
|
||||
versions[binary] = self._get_binary_version(binary)
|
||||
self.logger.debug(f"{binary} version hash: {versions[binary]}")
|
||||
|
||||
return versions
|
||||
|
||||
def _compute_cache_key(self, source: str, is_wat: bool) -> str:
|
||||
"""Compute cache key from source and binary versions."""
|
||||
hasher = hashlib.sha256()
|
||||
hasher.update(source.encode("utf-8"))
|
||||
hasher.update(b"wat" if is_wat else b"c")
|
||||
|
||||
# Include relevant binary versions
|
||||
if is_wat:
|
||||
hasher.update(self.binary_versions["wat2wasm"].encode("utf-8"))
|
||||
else:
|
||||
hasher.update(self.binary_versions["wasmcc"].encode("utf-8"))
|
||||
hasher.update(self.binary_versions["hook-cleaner"].encode("utf-8"))
|
||||
|
||||
return hasher.hexdigest()
|
||||
|
||||
def get(self, source: str, is_wat: bool) -> Optional[bytes]:
|
||||
"""Get cached bytecode if available."""
|
||||
cache_key = self._compute_cache_key(source, is_wat)
|
||||
cache_file = self.cache_dir / f"{cache_key}.wasm"
|
||||
|
||||
if cache_file.exists():
|
||||
self.logger.debug(f"Cache hit: {cache_key[:16]}...")
|
||||
return cache_file.read_bytes()
|
||||
|
||||
self.logger.debug(f"Cache miss: {cache_key[:16]}...")
|
||||
return None
|
||||
|
||||
def put(self, source: str, is_wat: bool, bytecode: bytes) -> None:
|
||||
"""Store bytecode in cache."""
|
||||
cache_key = self._compute_cache_key(source, is_wat)
|
||||
cache_file = self.cache_dir / f"{cache_key}.wasm"
|
||||
|
||||
cache_file.write_bytes(bytecode)
|
||||
self.logger.debug(f"Cached: {cache_key[:16]}... ({len(bytecode)} bytes)")
|
||||
|
||||
|
||||
class SourceValidator:
|
||||
"""Validate C source code for undeclared functions."""
|
||||
|
||||
def __init__(self, logger: logging.Logger):
|
||||
self.logger = logger
|
||||
|
||||
def extract_declarations(self, source: str) -> Tuple[List[str], List[str]]:
|
||||
"""Extract declared and used function names."""
|
||||
# Normalize source: collapse whitespace/newlines to handle multi-line declarations
|
||||
normalized = re.sub(r"\s+", " ", source)
|
||||
|
||||
declared = set()
|
||||
used = set()
|
||||
|
||||
# Find all extern/define declarations (handles multi-line)
|
||||
# Matches: extern TYPE function_name ( ...
|
||||
decl_pattern = r"(?:extern|define)\s+[a-z0-9_]+\s+([a-z_-]+)\s*\("
|
||||
for match in re.finditer(decl_pattern, normalized):
|
||||
func_name = match.group(1)
|
||||
if func_name != "sizeof":
|
||||
declared.add(func_name)
|
||||
|
||||
# Find all function calls
|
||||
# Matches: function_name(
|
||||
call_pattern = r"([a-z_-]+)\("
|
||||
for match in re.finditer(call_pattern, normalized):
|
||||
func_name = match.group(1)
|
||||
if func_name != "sizeof" and not func_name.startswith(("hook", "cbak")):
|
||||
used.add(func_name)
|
||||
|
||||
return sorted(declared), sorted(used)
|
||||
|
||||
def validate(self, source: str, counter: int) -> None:
|
||||
"""Validate that all used functions are declared."""
|
||||
declared, used = self.extract_declarations(source)
|
||||
undeclared = set(used) - set(declared)
|
||||
|
||||
if undeclared:
|
||||
self.logger.error(
|
||||
f"Undeclared functions in block {counter}: {', '.join(sorted(undeclared))}"
|
||||
)
|
||||
self.logger.debug(f" Declared: {', '.join(declared)}")
|
||||
self.logger.debug(f" Used: {', '.join(used)}")
|
||||
raise ValueError(f"Undeclared functions: {', '.join(sorted(undeclared))}")
|
||||
|
||||
|
||||
class WasmCompiler:
|
||||
"""Compile WASM from C or WAT source."""
|
||||
|
||||
def __init__(self, logger: logging.Logger, wasm_dir: Path, cache: CompilationCache):
|
||||
self.logger = logger
|
||||
self.wasm_dir = wasm_dir
|
||||
self.cache = cache
|
||||
self.validator = SourceValidator(logger)
|
||||
|
||||
def is_wat_format(self, source: str) -> bool:
|
||||
"""Check if source is WAT format."""
|
||||
return "(module" in source
|
||||
|
||||
def compile_c(self, source: str, counter: int) -> bytes:
|
||||
"""Compile C source to WASM."""
|
||||
self.logger.debug(f"Compiling C for block {counter}")
|
||||
self.validator.validate(source, counter)
|
||||
|
||||
# Save source for debugging
|
||||
source_file = self.wasm_dir / f"test-{counter}-gen.c"
|
||||
source_file.write_text(f'#include "api.h"\n{source}')
|
||||
|
||||
# Compile with wasmcc (binary I/O)
|
||||
wasmcc_result = subprocess.run(
|
||||
[
|
||||
"wasmcc",
|
||||
"-x",
|
||||
"c",
|
||||
"/dev/stdin",
|
||||
"-o",
|
||||
"/dev/stdout",
|
||||
"-O2",
|
||||
"-Wl,--allow-undefined",
|
||||
],
|
||||
input=source.encode("utf-8"),
|
||||
capture_output=True,
|
||||
check=True,
|
||||
)
|
||||
|
||||
# Clean with hook-cleaner (binary I/O)
|
||||
cleaner_result = subprocess.run(
|
||||
["hook-cleaner", "-", "-"],
|
||||
input=wasmcc_result.stdout,
|
||||
capture_output=True,
|
||||
check=True,
|
||||
)
|
||||
|
||||
return cleaner_result.stdout
|
||||
|
||||
def compile_wat(self, source: str) -> bytes:
|
||||
"""Compile WAT source to WASM."""
|
||||
self.logger.debug("Compiling WAT")
|
||||
source = re.sub(r"/\*end\*/$", "", source)
|
||||
|
||||
result = subprocess.run(
|
||||
["wat2wasm", "-", "-o", "/dev/stdout"],
|
||||
input=source.encode("utf-8"),
|
||||
capture_output=True,
|
||||
check=True,
|
||||
)
|
||||
|
||||
return result.stdout
|
||||
|
||||
def compile(self, source: str, counter: int) -> bytes:
|
||||
"""Compile source, using cache if available."""
|
||||
is_wat = self.is_wat_format(source)
|
||||
|
||||
# Check cache first
|
||||
cached = self.cache.get(source, is_wat)
|
||||
if cached is not None:
|
||||
self.logger.info(f"Block {counter}: using cached bytecode")
|
||||
return cached
|
||||
|
||||
# Compile
|
||||
self.logger.info(f"Block {counter}: compiling {'WAT' if is_wat else 'C'}")
|
||||
|
||||
try:
|
||||
if is_wat:
|
||||
bytecode = self.compile_wat(source)
|
||||
else:
|
||||
bytecode = self.compile_c(source, counter)
|
||||
|
||||
# Cache result
|
||||
self.cache.put(source, is_wat, bytecode)
|
||||
return bytecode
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
# Try to decode stderr if it exists
|
||||
error_msg = str(e)
|
||||
if e.stderr:
|
||||
try:
|
||||
error_msg = e.stderr.decode("utf-8")
|
||||
except:
|
||||
error_msg = f"Binary error output ({len(e.stderr)} bytes)"
|
||||
self.logger.error(f"Compilation failed: {error_msg}")
|
||||
raise
|
||||
|
||||
|
||||
class OutputFormatter:
|
||||
"""Format compiled bytecode as C++ arrays."""
|
||||
|
||||
@staticmethod
|
||||
def bytes_to_cpp_array(data: bytes) -> str:
|
||||
"""Convert binary data to C++ array format."""
|
||||
lines = []
|
||||
for i in range(0, len(data), 10):
|
||||
chunk = data[i : i + 10]
|
||||
hex_values = ",".join(f"0x{b:02X}U" for b in chunk)
|
||||
lines.append(f" {hex_values},")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
class SourceExtractor:
|
||||
"""Extract WASM test blocks from source file."""
|
||||
|
||||
def __init__(self, logger: logging.Logger, input_file: Path):
|
||||
self.logger = logger
|
||||
self.input_file = input_file
|
||||
|
||||
def extract(self) -> List[Tuple[str, int]]:
|
||||
"""Extract all WASM test blocks with their line numbers. Returns [(source, line_number), ...]"""
|
||||
self.logger.info(f"Reading {self.input_file}")
|
||||
content = self.input_file.read_text()
|
||||
|
||||
pattern = r'R"\[test\.hook\]\((.*?)\)\[test\.hook\]"'
|
||||
blocks_with_lines = []
|
||||
|
||||
for match in re.finditer(pattern, content, re.DOTALL):
|
||||
source = match.group(1)
|
||||
# Count newlines before this match to get line number
|
||||
line_number = content[: match.start()].count("\n") + 1
|
||||
blocks_with_lines.append((source, line_number))
|
||||
|
||||
self.logger.info(f"Found {len(blocks_with_lines)} WASM test blocks")
|
||||
return blocks_with_lines
|
||||
|
||||
|
||||
class OutputWriter:
|
||||
"""Write compiled blocks to output file."""
|
||||
|
||||
HEADER = """
|
||||
//This file is generated by build_test_hooks.py
|
||||
#ifndef SETHOOK_WASM_INCLUDED
|
||||
#define SETHOOK_WASM_INCLUDED
|
||||
#include <map>
|
||||
#include <stdint.h>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
namespace ripple {
|
||||
namespace test {
|
||||
std::map<std::string, std::vector<uint8_t>> wasm = {
|
||||
"""
|
||||
|
||||
FOOTER = """};
|
||||
}
|
||||
}
|
||||
#endif
|
||||
"""
|
||||
|
||||
def __init__(self, logger: logging.Logger, output_file: Path, cache_dir: Path):
|
||||
self.logger = logger
|
||||
self.output_file = output_file
|
||||
self.cache_dir = cache_dir
|
||||
|
||||
def _get_clang_format_cache_file(self, content_hash: str) -> Path:
|
||||
"""Get cache file path for formatted output."""
|
||||
return self.cache_dir / f"formatted_{content_hash}.h"
|
||||
|
||||
def _format_content(self, unformatted_content: str) -> str:
|
||||
"""Format content using clang-format via temp file."""
|
||||
with tempfile.NamedTemporaryFile(mode="w", suffix=".h", delete=False) as tmp:
|
||||
tmp.write(unformatted_content)
|
||||
tmp_path = tmp.name
|
||||
|
||||
try:
|
||||
subprocess.run(["clang-format", "-i", tmp_path], check=True)
|
||||
with open(tmp_path, "r") as f:
|
||||
return f.read()
|
||||
finally:
|
||||
os.unlink(tmp_path)
|
||||
|
||||
def write(
|
||||
self, compiled_blocks: Dict[int, Tuple[str, bytes]], force_write: bool = False
|
||||
) -> None:
|
||||
"""Write all compiled blocks to output file, only if changed."""
|
||||
# Build unformatted content
|
||||
unformatted = []
|
||||
unformatted.append(self.HEADER)
|
||||
for counter in sorted(compiled_blocks.keys()):
|
||||
source, bytecode = compiled_blocks[counter]
|
||||
unformatted.append(f"/* ==== WASM: {counter} ==== */\n")
|
||||
unformatted.append('{ R"[test.hook](')
|
||||
unformatted.append(source)
|
||||
unformatted.append(')[test.hook]",\n{\n')
|
||||
unformatted.append(OutputFormatter.bytes_to_cpp_array(bytecode))
|
||||
unformatted.append("\n}},\n\n")
|
||||
unformatted.append(self.FOOTER)
|
||||
unformatted_content = "".join(unformatted)
|
||||
|
||||
# Hash the unformatted content
|
||||
content_hash = hashlib.sha256(unformatted_content.encode("utf-8")).hexdigest()
|
||||
cache_file = self._get_clang_format_cache_file(content_hash)
|
||||
|
||||
# Get formatted content (from cache or by formatting)
|
||||
if cache_file.exists():
|
||||
self.logger.info("Using cached clang-format output")
|
||||
formatted_content = cache_file.read_text()
|
||||
else:
|
||||
self.logger.info("Formatting with clang-format")
|
||||
formatted_content = self._format_content(unformatted_content)
|
||||
cache_file.write_text(formatted_content)
|
||||
self.logger.debug(f"Cached formatted output: {content_hash[:16]}...")
|
||||
|
||||
# Check if we need to write (compare with existing file)
|
||||
if not force_write and self.output_file.exists():
|
||||
existing_content = self.output_file.read_text()
|
||||
if existing_content == formatted_content:
|
||||
self.logger.info(
|
||||
f"Output unchanged, skipping write to avoid triggering rebuild"
|
||||
)
|
||||
return
|
||||
|
||||
# Write the file
|
||||
self.logger.info(f"Writing {self.output_file}")
|
||||
self.output_file.write_text(formatted_content)
|
||||
|
||||
|
||||
class TestHookBuilder:
|
||||
"""Main builder orchestrating the compilation process."""
|
||||
|
||||
def __init__(self, args: argparse.Namespace):
|
||||
self.args = args
|
||||
self.logger = self._setup_logging()
|
||||
self.script_dir = Path(__file__).parent
|
||||
self.wasm_dir = self.script_dir / "generated" / "hook" / "c"
|
||||
self.input_file = self.script_dir / "SetHook_test.cpp"
|
||||
self.output_file = self.script_dir / "SetHook_wasm.h"
|
||||
|
||||
self.checker = BinaryChecker(self.logger)
|
||||
self.cache = CompilationCache(self.logger)
|
||||
self.compiler = WasmCompiler(self.logger, self.wasm_dir, self.cache)
|
||||
self.extractor = SourceExtractor(self.logger, self.input_file)
|
||||
self.writer = OutputWriter(self.logger, self.output_file, self.cache.cache_dir)
|
||||
|
||||
def _setup_logging(self) -> logging.Logger:
|
||||
"""Setup logging with specified level."""
|
||||
level = getattr(logging, self.args.log_level.upper())
|
||||
logging.basicConfig(level=level, format="%(levelname)s: %(message)s")
|
||||
return logging.getLogger(__name__)
|
||||
|
||||
def _get_worker_count(self) -> int:
|
||||
"""Get number of parallel workers to use."""
|
||||
if self.args.jobs > 0:
|
||||
return self.args.jobs
|
||||
return os.cpu_count() or 1
|
||||
|
||||
def compile_block(
|
||||
self, counter: int, source: str, line_number: int
|
||||
) -> Tuple[int, str, bytes]:
|
||||
"""Compile a single block."""
|
||||
bytecode = self.compiler.compile(source, counter)
|
||||
return (counter, source, bytecode)
|
||||
|
||||
def _format_block_ranges(self, block_numbers: List[int]) -> str:
|
||||
"""Format block numbers as compact ranges (e.g., '1-3,5,7-9')."""
|
||||
if not block_numbers:
|
||||
return ""
|
||||
|
||||
sorted_blocks = sorted(block_numbers)
|
||||
ranges = []
|
||||
start = sorted_blocks[0]
|
||||
end = sorted_blocks[0]
|
||||
|
||||
for num in sorted_blocks[1:]:
|
||||
if num == end + 1:
|
||||
end = num
|
||||
else:
|
||||
if start == end:
|
||||
ranges.append(str(start))
|
||||
else:
|
||||
ranges.append(f"{start}-{end}")
|
||||
start = end = num
|
||||
|
||||
# Add final range
|
||||
if start == end:
|
||||
ranges.append(str(start))
|
||||
else:
|
||||
ranges.append(f"{start}-{end}")
|
||||
|
||||
return ",".join(ranges)
|
||||
|
||||
def compile_all_blocks(
|
||||
self, blocks: List[Tuple[str, int]]
|
||||
) -> Dict[int, Tuple[str, bytes]]:
|
||||
"""Compile all blocks in parallel."""
|
||||
workers = self._get_worker_count()
|
||||
self.logger.info(f"Compiling {len(blocks)} blocks using {workers} workers")
|
||||
|
||||
compiled = {}
|
||||
failed_blocks = []
|
||||
|
||||
with ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
futures = {
|
||||
executor.submit(self.compile_block, i, block, line_num): (i, line_num)
|
||||
for i, (block, line_num) in enumerate(blocks)
|
||||
}
|
||||
|
||||
for future in as_completed(futures):
|
||||
counter, line_num = futures[future]
|
||||
try:
|
||||
result_counter, source, bytecode = future.result()
|
||||
compiled[result_counter] = (source, bytecode)
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Block {counter} (line {line_num} in {self.input_file.name}) failed: {e}"
|
||||
)
|
||||
failed_blocks.append(counter)
|
||||
|
||||
if failed_blocks:
|
||||
block_range = self._format_block_ranges(failed_blocks)
|
||||
total = len(failed_blocks)
|
||||
plural = "s" if total > 1 else ""
|
||||
raise RuntimeError(f"Block{plural} {block_range} failed ({total} total)")
|
||||
|
||||
return compiled
|
||||
|
||||
def build(self) -> None:
|
||||
"""Execute the full build process."""
|
||||
self.logger.info("Starting WASM test hook build")
|
||||
|
||||
# Display configuration
|
||||
workers = self._get_worker_count()
|
||||
self.logger.info("Configuration:")
|
||||
self.logger.info(f" Workers: {workers} (CPU count: {os.cpu_count()})")
|
||||
self.logger.info(f" Log level: {self.args.log_level.upper()}")
|
||||
self.logger.info(f" Force write: {self.args.force_write}")
|
||||
self.logger.info(f" Input: {self.input_file}")
|
||||
self.logger.info(f" Output: {self.output_file}")
|
||||
self.logger.info(f" Cache: {self.cache.cache_dir}")
|
||||
self.logger.info(f" WASM dir: {self.wasm_dir}")
|
||||
self.logger.info("")
|
||||
|
||||
if not self.checker.check_all():
|
||||
self.logger.error("Missing required binaries")
|
||||
sys.exit(1)
|
||||
|
||||
self.wasm_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
blocks = self.extractor.extract()
|
||||
compiled = self.compile_all_blocks(blocks)
|
||||
self.writer.write(compiled, force_write=self.args.force_write)
|
||||
|
||||
self.logger.info(f"Successfully generated {self.output_file}")
|
||||
|
||||
|
||||
def create_parser() -> argparse.ArgumentParser:
|
||||
"""Create argument parser."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate SetHook_wasm.h from SetHook_test.cpp",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
%(prog)s # Build with INFO logging
|
||||
%(prog)s --log-level=debug # Build with DEBUG logging
|
||||
%(prog)s -j 4 # Build with 4 workers
|
||||
%(prog)s -j 1 # Build sequentially
|
||||
%(prog)s --force-write # Always write output (trigger rebuild)
|
||||
""",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--log-level",
|
||||
default="info",
|
||||
choices=["debug", "info", "warning", "error"],
|
||||
help="Set logging level (default: info)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-j",
|
||||
"--jobs",
|
||||
type=int,
|
||||
default=0,
|
||||
metavar="N",
|
||||
help="Parallel workers (default: CPU count)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--force-write",
|
||||
action="store_true",
|
||||
help="Always write output file even if unchanged (triggers rebuild)",
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def main():
|
||||
parser = create_parser()
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
builder = TestHookBuilder(args)
|
||||
builder.build()
|
||||
except RuntimeError as e:
|
||||
# RuntimeError has our nicely formatted message
|
||||
logging.error(f"Build failed: {e}")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
logging.error(f"Build failed: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1839,88 +1839,6 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
testLedgerEntryCron()
|
||||
{
|
||||
testcase("ledger_entry Request Cron");
|
||||
using namespace test::jtx;
|
||||
|
||||
Env env{*this};
|
||||
|
||||
Account const alice{"alice"};
|
||||
env.fund(XRP(10000), alice);
|
||||
env.close();
|
||||
|
||||
auto const startTime =
|
||||
env.current()->parentCloseTime().time_since_epoch().count() + 100;
|
||||
env(cron::set(alice),
|
||||
cron::startTime(startTime),
|
||||
cron::delay(100),
|
||||
cron::repeat(200),
|
||||
fee(XRP(1)),
|
||||
ter(tesSUCCESS));
|
||||
env.close();
|
||||
|
||||
std::string const ledgerHash{to_string(env.closed()->info().hash)};
|
||||
|
||||
uint256 const cronIndex{keylet::cron(startTime, alice).key};
|
||||
{
|
||||
// Request the cron using its index.
|
||||
Json::Value jvParams;
|
||||
jvParams[jss::cron] = to_string(cronIndex);
|
||||
jvParams[jss::ledger_hash] = ledgerHash;
|
||||
Json::Value const jrr = env.rpc(
|
||||
"json", "ledger_entry", to_string(jvParams))[jss::result];
|
||||
BEAST_EXPECT(jrr[jss::node][sfOwner.jsonName] == alice.human());
|
||||
BEAST_EXPECT(jrr[jss::node][sfStartTime.jsonName] == startTime);
|
||||
BEAST_EXPECT(jrr[jss::node][sfDelaySeconds.jsonName] == 100);
|
||||
BEAST_EXPECT(jrr[jss::node][sfRepeatCount.jsonName] == 200);
|
||||
}
|
||||
{
|
||||
// Request the cron using its owner and time.
|
||||
Json::Value jvParams;
|
||||
jvParams[jss::cron] = Json::objectValue;
|
||||
jvParams[jss::cron][jss::owner] = alice.human();
|
||||
jvParams[jss::cron][jss::time] = startTime;
|
||||
jvParams[jss::ledger_hash] = ledgerHash;
|
||||
Json::Value const jrr = env.rpc(
|
||||
"json", "ledger_entry", to_string(jvParams))[jss::result];
|
||||
BEAST_EXPECT(jrr[jss::node][sfOwner.jsonName] == alice.human());
|
||||
BEAST_EXPECT(jrr[jss::node][sfStartTime.jsonName] == startTime);
|
||||
BEAST_EXPECT(jrr[jss::node][sfDelaySeconds.jsonName] == 100);
|
||||
BEAST_EXPECT(jrr[jss::node][sfRepeatCount.jsonName] == 200);
|
||||
}
|
||||
{
|
||||
// Malformed uritoken object. Missing owner member.
|
||||
Json::Value jvParams;
|
||||
jvParams[jss::cron] = Json::objectValue;
|
||||
jvParams[jss::cron][jss::time] = startTime;
|
||||
jvParams[jss::ledger_hash] = ledgerHash;
|
||||
Json::Value const jrr = env.rpc(
|
||||
"json", "ledger_entry", to_string(jvParams))[jss::result];
|
||||
checkErrorValue(jrr, "malformedRequest", "");
|
||||
}
|
||||
{
|
||||
// Malformed uritoken object. Missing time member.
|
||||
Json::Value jvParams;
|
||||
jvParams[jss::cron] = Json::objectValue;
|
||||
jvParams[jss::cron][jss::owner] = alice.human();
|
||||
jvParams[jss::ledger_hash] = ledgerHash;
|
||||
Json::Value const jrr = env.rpc(
|
||||
"json", "ledger_entry", to_string(jvParams))[jss::result];
|
||||
checkErrorValue(jrr, "malformedRequest", "");
|
||||
}
|
||||
{
|
||||
// Request an index that is not a uritoken.
|
||||
Json::Value jvParams;
|
||||
jvParams[jss::cron] = ledgerHash;
|
||||
jvParams[jss::ledger_hash] = ledgerHash;
|
||||
Json::Value const jrr = env.rpc(
|
||||
"json", "ledger_entry", to_string(jvParams))[jss::result];
|
||||
checkErrorValue(jrr, "entryNotFound", "");
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
testLedgerEntryUnknownOption()
|
||||
{
|
||||
@@ -2447,7 +2365,6 @@ public:
|
||||
testLedgerEntryTicket();
|
||||
testLedgerEntryURIToken();
|
||||
testLedgerEntryImportVLSeq();
|
||||
testLedgerEntryCron();
|
||||
testLedgerEntryUnknownOption();
|
||||
testLookupLedger();
|
||||
testNoQueue();
|
||||
|
||||
Reference in New Issue
Block a user