mirror of
https://github.com/Xahau/xahaud.git
synced 2025-12-06 17:27:52 +00:00
feat(wip): experiment overlayfs
This commit is contained in:
95
.github/workflows/build-in-docker.yml.disabled
vendored
Normal file
95
.github/workflows/build-in-docker.yml.disabled
vendored
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
name: Build using Docker
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: ["dev", "candidate", "release", "jshooks"]
|
||||||
|
pull_request:
|
||||||
|
branches: ["dev", "candidate", "release", "jshooks"]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP: 1
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
checkout:
|
||||||
|
runs-on: [self-hosted, vanity]
|
||||||
|
outputs:
|
||||||
|
checkout_path: ${{ steps.vars.outputs.checkout_path }}
|
||||||
|
steps:
|
||||||
|
- name: Prepare checkout path
|
||||||
|
id: vars
|
||||||
|
run: |
|
||||||
|
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | sed -e 's/[^a-zA-Z0-9._-]/-/g')
|
||||||
|
CHECKOUT_PATH="${SAFE_BRANCH}-${{ github.sha }}"
|
||||||
|
echo "checkout_path=${CHECKOUT_PATH}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
path: ${{ steps.vars.outputs.checkout_path }}
|
||||||
|
clean: true
|
||||||
|
fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history
|
||||||
|
|
||||||
|
build:
|
||||||
|
runs-on: [self-hosted, vanity]
|
||||||
|
needs: [checkout]
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: ${{ needs.checkout.outputs.checkout_path }}
|
||||||
|
steps:
|
||||||
|
- name: Set Cleanup Script Path
|
||||||
|
run: |
|
||||||
|
echo "JOB_CLEANUP_SCRIPT=$(mktemp)" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Build using Docker
|
||||||
|
run: /bin/bash release-builder.sh
|
||||||
|
|
||||||
|
- name: Stop Container (Cleanup)
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
echo "Running cleanup script: $JOB_CLEANUP_SCRIPT"
|
||||||
|
/bin/bash -e -x "$JOB_CLEANUP_SCRIPT"
|
||||||
|
CLEANUP_EXIT_CODE=$?
|
||||||
|
|
||||||
|
if [[ "$CLEANUP_EXIT_CODE" -eq 0 ]]; then
|
||||||
|
echo "Cleanup script succeeded."
|
||||||
|
rm -f "$JOB_CLEANUP_SCRIPT"
|
||||||
|
echo "Cleanup script removed."
|
||||||
|
else
|
||||||
|
echo "⚠️ Cleanup script failed! Keeping for debugging: $JOB_CLEANUP_SCRIPT"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP}" == "1" ]]; then
|
||||||
|
echo "🔍 Checking for leftover containers..."
|
||||||
|
BUILD_CONTAINERS=$(docker ps --format '{{.Names}}' | grep '^xahaud_cached_builder' || echo "")
|
||||||
|
|
||||||
|
if [[ -n "$BUILD_CONTAINERS" ]]; then
|
||||||
|
echo "⚠️ WARNING: Some build containers are still running"
|
||||||
|
echo "$BUILD_CONTAINERS"
|
||||||
|
else
|
||||||
|
echo "✅ No build containers found"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
tests:
|
||||||
|
runs-on: [self-hosted, vanity]
|
||||||
|
needs: [build, checkout]
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: ${{ needs.checkout.outputs.checkout_path }}
|
||||||
|
steps:
|
||||||
|
- name: Unit tests
|
||||||
|
run: /bin/bash docker-unit-tests.sh
|
||||||
|
|
||||||
|
cleanup:
|
||||||
|
runs-on: [self-hosted, vanity]
|
||||||
|
needs: [tests, checkout]
|
||||||
|
if: always()
|
||||||
|
steps:
|
||||||
|
- name: Cleanup workspace
|
||||||
|
run: |
|
||||||
|
CHECKOUT_PATH="${{ needs.checkout.outputs.checkout_path }}"
|
||||||
|
echo "Cleaning workspace for ${CHECKOUT_PATH}"
|
||||||
|
rm -rf "${{ github.workspace }}/${CHECKOUT_PATH}"
|
||||||
72
.github/workflows/clang-format.yml.disabled
vendored
Normal file
72
.github/workflows/clang-format.yml.disabled
vendored
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
name: clang-format
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
env:
|
||||||
|
CLANG_VERSION: 10
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
# - name: Install clang-format
|
||||||
|
# run: |
|
||||||
|
# codename=$( lsb_release --codename --short )
|
||||||
|
# sudo tee /etc/apt/sources.list.d/llvm.list >/dev/null <<EOF
|
||||||
|
# deb http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
||||||
|
# deb-src http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
||||||
|
# EOF
|
||||||
|
# wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add
|
||||||
|
# sudo apt-get update -y
|
||||||
|
# sudo apt-get install -y clang-format-${CLANG_VERSION}
|
||||||
|
|
||||||
|
# Temporary fix until this commit is merged
|
||||||
|
# https://github.com/XRPLF/rippled/commit/552377c76f55b403a1c876df873a23d780fcc81c
|
||||||
|
- name: Download and install clang-format
|
||||||
|
run: |
|
||||||
|
sudo apt-get update -y
|
||||||
|
sudo apt-get install -y libtinfo5
|
||||||
|
curl -LO https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/clang+llvm-10.0.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz
|
||||||
|
tar -xf clang+llvm-10.0.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz
|
||||||
|
sudo mv clang+llvm-10.0.1-x86_64-linux-gnu-ubuntu-16.04 /opt/clang-10
|
||||||
|
sudo ln -s /opt/clang-10/bin/clang-format /usr/local/bin/clang-format-10
|
||||||
|
- name: Format src/ripple
|
||||||
|
run: find src/ripple -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -print0 | xargs -0 clang-format-${CLANG_VERSION} -i
|
||||||
|
- name: Format src/test
|
||||||
|
run: find src/test -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -print0 | xargs -0 clang-format-${CLANG_VERSION} -i
|
||||||
|
- name: Check for differences
|
||||||
|
id: assert
|
||||||
|
run: |
|
||||||
|
set -o pipefail
|
||||||
|
git diff --exit-code | tee "clang-format.patch"
|
||||||
|
- name: Upload patch
|
||||||
|
if: failure() && steps.assert.outcome == 'failure'
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: clang-format.patch
|
||||||
|
if-no-files-found: ignore
|
||||||
|
path: clang-format.patch
|
||||||
|
- name: What happened?
|
||||||
|
if: failure() && steps.assert.outcome == 'failure'
|
||||||
|
env:
|
||||||
|
PREAMBLE: |
|
||||||
|
If you are reading this, you are looking at a failed Github Actions
|
||||||
|
job. That means you pushed one or more files that did not conform
|
||||||
|
to the formatting specified in .clang-format. That may be because
|
||||||
|
you neglected to run 'git clang-format' or 'clang-format' before
|
||||||
|
committing, or that your version of clang-format has an
|
||||||
|
incompatibility with the one on this
|
||||||
|
machine, which is:
|
||||||
|
SUGGESTION: |
|
||||||
|
|
||||||
|
To fix it, you can do one of two things:
|
||||||
|
1. Download and apply the patch generated as an artifact of this
|
||||||
|
job to your repo, commit, and push.
|
||||||
|
2. Run 'git-clang-format --extensions c,cpp,h,cxx,ipp dev'
|
||||||
|
in your repo, commit, and push.
|
||||||
|
run: |
|
||||||
|
echo "${PREAMBLE}"
|
||||||
|
clang-format-${CLANG_VERSION} --version
|
||||||
|
echo "${SUGGESTION}"
|
||||||
|
exit 1
|
||||||
49
.github/workflows/levelization.yml.disabled
vendored
Normal file
49
.github/workflows/levelization.yml.disabled
vendored
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
name: levelization
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
CLANG_VERSION: 10
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Check levelization
|
||||||
|
run: Builds/levelization/levelization.sh
|
||||||
|
- name: Check for differences
|
||||||
|
id: assert
|
||||||
|
run: |
|
||||||
|
set -o pipefail
|
||||||
|
git diff --exit-code | tee "levelization.patch"
|
||||||
|
- name: Upload patch
|
||||||
|
if: failure() && steps.assert.outcome == 'failure'
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: levelization.patch
|
||||||
|
if-no-files-found: ignore
|
||||||
|
path: levelization.patch
|
||||||
|
- name: What happened?
|
||||||
|
if: failure() && steps.assert.outcome == 'failure'
|
||||||
|
env:
|
||||||
|
MESSAGE: |
|
||||||
|
If you are reading this, you are looking at a failed Github
|
||||||
|
Actions job. That means you changed the dependency relationships
|
||||||
|
between the modules in rippled. That may be an improvement or a
|
||||||
|
regression. This check doesn't judge.
|
||||||
|
|
||||||
|
A rule of thumb, though, is that if your changes caused
|
||||||
|
something to be removed from loops.txt, that's probably an
|
||||||
|
improvement. If something was added, it's probably a regression.
|
||||||
|
|
||||||
|
To fix it, you can do one of two things:
|
||||||
|
1. Download and apply the patch generated as an artifact of this
|
||||||
|
job to your repo, commit, and push.
|
||||||
|
2. Run './Builds/levelization/levelization.sh' in your repo,
|
||||||
|
commit, and push.
|
||||||
|
|
||||||
|
See Builds/levelization/README.md for more info.
|
||||||
|
run: |
|
||||||
|
echo "${MESSAGE}"
|
||||||
|
exit 1
|
||||||
129
.github/workflows/test-overlayfs-delta.yml
vendored
Normal file
129
.github/workflows/test-overlayfs-delta.yml
vendored
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
name: Test OverlayFS Delta Extraction
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: ["*"]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test-overlayfs:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Setup OverlayFS layers
|
||||||
|
run: |
|
||||||
|
echo "=== Creating directory structure ==="
|
||||||
|
mkdir -p /tmp/test/{base,delta,upper,work,merged}
|
||||||
|
|
||||||
|
echo "=== Creating base layer files ==="
|
||||||
|
echo "base file 1" > /tmp/test/base/file1.txt
|
||||||
|
echo "base file 2" > /tmp/test/base/file2.txt
|
||||||
|
echo "base file 3" > /tmp/test/base/file3.txt
|
||||||
|
mkdir -p /tmp/test/base/subdir
|
||||||
|
echo "base subdir file" > /tmp/test/base/subdir/file.txt
|
||||||
|
|
||||||
|
echo "=== Base layer contents ==="
|
||||||
|
find /tmp/test/base -type f -exec sh -c 'echo "{}:"; cat "{}"' \;
|
||||||
|
|
||||||
|
echo "=== Mounting OverlayFS ==="
|
||||||
|
sudo mount -t overlay overlay \
|
||||||
|
-o lowerdir=/tmp/test/base,upperdir=/tmp/test/upper,workdir=/tmp/test/work \
|
||||||
|
/tmp/test/merged
|
||||||
|
|
||||||
|
echo "=== Mounted successfully ==="
|
||||||
|
mount | grep overlay
|
||||||
|
|
||||||
|
- name: Verify merged view shows base files
|
||||||
|
run: |
|
||||||
|
echo "=== Contents of /merged (should show base files) ==="
|
||||||
|
ls -R /tmp/test/merged
|
||||||
|
find /tmp/test/merged -type f -exec sh -c 'echo "{}:"; cat "{}"' \;
|
||||||
|
|
||||||
|
- name: Make changes via merged layer
|
||||||
|
run: |
|
||||||
|
echo "=== Making changes via /merged ==="
|
||||||
|
|
||||||
|
# Overwrite existing file
|
||||||
|
echo "MODIFIED file 2" > /tmp/test/merged/file2.txt
|
||||||
|
echo "Modified file2.txt"
|
||||||
|
|
||||||
|
# Create new file
|
||||||
|
echo "NEW file 4" > /tmp/test/merged/file4.txt
|
||||||
|
echo "Created new file4.txt"
|
||||||
|
|
||||||
|
# Create new directory with file
|
||||||
|
mkdir -p /tmp/test/merged/newdir
|
||||||
|
echo "NEW file in new dir" > /tmp/test/merged/newdir/newfile.txt
|
||||||
|
echo "Created newdir/newfile.txt"
|
||||||
|
|
||||||
|
# Add file to existing directory
|
||||||
|
echo "NEW file in existing subdir" > /tmp/test/merged/subdir/newfile.txt
|
||||||
|
echo "Created subdir/newfile.txt"
|
||||||
|
|
||||||
|
echo "=== Changes complete ==="
|
||||||
|
|
||||||
|
- name: Show the delta (upperdir)
|
||||||
|
run: |
|
||||||
|
echo "========================================"
|
||||||
|
echo "THE DELTA (only changes in /upper):"
|
||||||
|
echo "========================================"
|
||||||
|
|
||||||
|
if [ -z "$(ls -A /tmp/test/upper)" ]; then
|
||||||
|
echo "Upper directory is empty - no changes detected"
|
||||||
|
else
|
||||||
|
echo "Upper directory structure:"
|
||||||
|
ls -R /tmp/test/upper
|
||||||
|
echo ""
|
||||||
|
echo "Upper directory files with content:"
|
||||||
|
find /tmp/test/upper -type f -exec sh -c 'echo "---"; echo "FILE: {}"; cat "{}"; echo ""' \;
|
||||||
|
|
||||||
|
echo "========================================"
|
||||||
|
echo "SIZE OF DELTA:"
|
||||||
|
du -sh /tmp/test/upper
|
||||||
|
echo "========================================"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Compare base vs upper vs merged
|
||||||
|
run: |
|
||||||
|
echo "========================================"
|
||||||
|
echo "COMPARISON:"
|
||||||
|
echo "========================================"
|
||||||
|
|
||||||
|
echo "BASE layer (original, untouched):"
|
||||||
|
ls -la /tmp/test/base/
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "UPPER layer (DELTA - only changes):"
|
||||||
|
ls -la /tmp/test/upper/
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "MERGED layer (unified view = base + upper):"
|
||||||
|
ls -la /tmp/test/merged/
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "========================================"
|
||||||
|
echo "PROOF: Upper dir contains ONLY the delta!"
|
||||||
|
echo "========================================"
|
||||||
|
|
||||||
|
- name: Simulate tarball creation (what we'd upload)
|
||||||
|
run: |
|
||||||
|
echo "=== Creating tarball of delta ==="
|
||||||
|
tar -czf /tmp/delta.tar.gz -C /tmp/test/upper .
|
||||||
|
|
||||||
|
echo "Delta tarball size:"
|
||||||
|
ls -lh /tmp/delta.tar.gz
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Delta tarball contents:"
|
||||||
|
tar -tzf /tmp/delta.tar.gz
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "========================================"
|
||||||
|
echo "This is what we'd upload to S3/rsync!"
|
||||||
|
echo "Only ~few KB instead of entire cache!"
|
||||||
|
echo "========================================"
|
||||||
|
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
echo "=== Unmounting OverlayFS ==="
|
||||||
|
sudo umount /tmp/test/merged || true
|
||||||
36
.github/workflows/verify-generated-headers.yml.disabled
vendored
Normal file
36
.github/workflows/verify-generated-headers.yml.disabled
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
name: Verify Generated Hook Headers
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
verify-generated-headers:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- target: hook/error.h
|
||||||
|
generator: ./hook/generate_error.sh
|
||||||
|
- target: hook/extern.h
|
||||||
|
generator: ./hook/generate_extern.sh
|
||||||
|
- target: hook/sfcodes.h
|
||||||
|
generator: bash ./hook/generate_sfcodes.sh
|
||||||
|
- target: hook/tts.h
|
||||||
|
generator: ./hook/generate_tts.sh
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: ${{ matrix.target }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Verify ${{ matrix.target }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
chmod +x hook/generate_*.sh || true
|
||||||
|
|
||||||
|
tmp=$(mktemp)
|
||||||
|
trap 'rm -f "$tmp"' EXIT
|
||||||
|
|
||||||
|
${{ matrix.generator }} > "$tmp"
|
||||||
|
diff -u ${{ matrix.target }} "$tmp"
|
||||||
149
.github/workflows/xahau-ga-macos.yml.disabled
vendored
Normal file
149
.github/workflows/xahau-ga-macos.yml.disabled
vendored
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
name: MacOS - GA Runner
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: ["dev", "candidate", "release"]
|
||||||
|
pull_request:
|
||||||
|
branches: ["dev", "candidate", "release"]
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * *'
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
generator:
|
||||||
|
- Ninja
|
||||||
|
configuration:
|
||||||
|
- Debug
|
||||||
|
runs-on: macos-15
|
||||||
|
env:
|
||||||
|
build_dir: .build
|
||||||
|
# Bump this number to invalidate all caches globally.
|
||||||
|
CACHE_VERSION: 1
|
||||||
|
MAIN_BRANCH_NAME: dev
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install Conan
|
||||||
|
run: |
|
||||||
|
brew install conan
|
||||||
|
# Verify Conan 2 is installed
|
||||||
|
conan --version
|
||||||
|
|
||||||
|
- name: Install Coreutils
|
||||||
|
run: |
|
||||||
|
brew install coreutils
|
||||||
|
echo "Num proc: $(nproc)"
|
||||||
|
|
||||||
|
- name: Install Ninja
|
||||||
|
if: matrix.generator == 'Ninja'
|
||||||
|
run: brew install ninja
|
||||||
|
|
||||||
|
- name: Install Python
|
||||||
|
run: |
|
||||||
|
if which python3 > /dev/null 2>&1; then
|
||||||
|
echo "Python 3 executable exists"
|
||||||
|
python3 --version
|
||||||
|
else
|
||||||
|
brew install python@3.12
|
||||||
|
fi
|
||||||
|
# Create 'python' symlink if it doesn't exist (for tools expecting 'python')
|
||||||
|
if ! which python > /dev/null 2>&1; then
|
||||||
|
sudo ln -sf $(which python3) /usr/local/bin/python
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Install CMake
|
||||||
|
run: |
|
||||||
|
# Install CMake 3.x to match local dev environments
|
||||||
|
# With Conan 2 and the policy args passed to CMake, newer versions
|
||||||
|
# can have issues with dependencies that require cmake_minimum_required < 3.5
|
||||||
|
brew uninstall cmake --ignore-dependencies 2>/dev/null || true
|
||||||
|
|
||||||
|
# Download and install CMake 3.31.7 directly
|
||||||
|
curl -L https://github.com/Kitware/CMake/releases/download/v3.31.7/cmake-3.31.7-macos-universal.tar.gz -o cmake.tar.gz
|
||||||
|
tar -xzf cmake.tar.gz
|
||||||
|
|
||||||
|
# Move the entire CMake.app to /Applications
|
||||||
|
sudo mv cmake-3.31.7-macos-universal/CMake.app /Applications/
|
||||||
|
|
||||||
|
echo "/Applications/CMake.app/Contents/bin" >> $GITHUB_PATH
|
||||||
|
/Applications/CMake.app/Contents/bin/cmake --version
|
||||||
|
|
||||||
|
- name: Install ccache
|
||||||
|
run: brew install ccache
|
||||||
|
|
||||||
|
- name: Configure ccache
|
||||||
|
uses: ./.github/actions/xahau-configure-ccache
|
||||||
|
with:
|
||||||
|
max_size: 2G
|
||||||
|
hash_dir: true
|
||||||
|
compiler_check: content
|
||||||
|
is_main_branch: ${{ github.ref_name == env.MAIN_BRANCH_NAME }}
|
||||||
|
|
||||||
|
- name: Check environment
|
||||||
|
run: |
|
||||||
|
echo "PATH:"
|
||||||
|
echo "${PATH}" | tr ':' '\n'
|
||||||
|
which python && python --version || echo "Python not found"
|
||||||
|
which conan && conan --version || echo "Conan not found"
|
||||||
|
which cmake && cmake --version || echo "CMake not found"
|
||||||
|
clang --version
|
||||||
|
ccache --version
|
||||||
|
echo "---- Full Environment ----"
|
||||||
|
env
|
||||||
|
|
||||||
|
- name: Configure Conan
|
||||||
|
run: |
|
||||||
|
# Create the default profile directory if it doesn't exist
|
||||||
|
mkdir -p ~/.conan2/profiles
|
||||||
|
|
||||||
|
# Detect compiler version
|
||||||
|
COMPILER_VERSION=$(clang --version | grep -oE 'version [0-9]+' | grep -oE '[0-9]+')
|
||||||
|
|
||||||
|
# Create profile with our specific settings
|
||||||
|
cat > ~/.conan2/profiles/default <<EOF
|
||||||
|
[settings]
|
||||||
|
arch=armv8
|
||||||
|
build_type=Release
|
||||||
|
compiler=apple-clang
|
||||||
|
compiler.cppstd=20
|
||||||
|
compiler.libcxx=libc++
|
||||||
|
compiler.version=${COMPILER_VERSION}
|
||||||
|
os=Macos
|
||||||
|
|
||||||
|
[conf]
|
||||||
|
# Workaround for gRPC with newer Apple Clang
|
||||||
|
tools.build:cxxflags=["-Wno-missing-template-arg-list-after-template-kw"]
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Display profile for verification
|
||||||
|
conan profile show
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
uses: ./.github/actions/xahau-ga-dependencies
|
||||||
|
with:
|
||||||
|
configuration: ${{ matrix.configuration }}
|
||||||
|
build_dir: ${{ env.build_dir }}
|
||||||
|
compiler-id: clang
|
||||||
|
cache_version: ${{ env.CACHE_VERSION }}
|
||||||
|
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
uses: ./.github/actions/xahau-ga-build
|
||||||
|
with:
|
||||||
|
generator: ${{ matrix.generator }}
|
||||||
|
configuration: ${{ matrix.configuration }}
|
||||||
|
build_dir: ${{ env.build_dir }}
|
||||||
|
compiler-id: clang
|
||||||
|
cache_version: ${{ env.CACHE_VERSION }}
|
||||||
|
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||||
|
|
||||||
|
- name: Test
|
||||||
|
run: |
|
||||||
|
${{ env.build_dir }}/rippled --unittest --unittest-jobs $(nproc)
|
||||||
332
.github/workflows/xahau-ga-nix.yml.disabled
vendored
Normal file
332
.github/workflows/xahau-ga-nix.yml.disabled
vendored
Normal file
@@ -0,0 +1,332 @@
|
|||||||
|
name: Nix - GA Runner
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: ["dev", "candidate", "release"]
|
||||||
|
pull_request:
|
||||||
|
branches: ["dev", "candidate", "release"]
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * *'
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
matrix-setup:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container: python:3-slim
|
||||||
|
outputs:
|
||||||
|
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||||
|
steps:
|
||||||
|
- name: Generate build matrix
|
||||||
|
id: set-matrix
|
||||||
|
shell: python
|
||||||
|
run: |
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
# Full matrix with all 6 compiler configurations
|
||||||
|
# Each configuration includes all parameters needed by the build job
|
||||||
|
full_matrix = [
|
||||||
|
{
|
||||||
|
"compiler_id": "gcc-11-libstdcxx",
|
||||||
|
"compiler": "gcc",
|
||||||
|
"cc": "gcc-11",
|
||||||
|
"cxx": "g++-11",
|
||||||
|
"compiler_version": 11,
|
||||||
|
"stdlib": "libstdcxx",
|
||||||
|
"configuration": "Debug"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"compiler_id": "gcc-13-libstdcxx",
|
||||||
|
"compiler": "gcc",
|
||||||
|
"cc": "gcc-13",
|
||||||
|
"cxx": "g++-13",
|
||||||
|
"compiler_version": 13,
|
||||||
|
"stdlib": "libstdcxx",
|
||||||
|
"configuration": "Debug"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"compiler_id": "clang-14-libstdcxx-gcc11",
|
||||||
|
"compiler": "clang",
|
||||||
|
"cc": "clang-14",
|
||||||
|
"cxx": "clang++-14",
|
||||||
|
"compiler_version": 14,
|
||||||
|
"stdlib": "libstdcxx",
|
||||||
|
"clang_gcc_toolchain": 11,
|
||||||
|
"configuration": "Debug"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"compiler_id": "clang-16-libstdcxx-gcc13",
|
||||||
|
"compiler": "clang",
|
||||||
|
"cc": "clang-16",
|
||||||
|
"cxx": "clang++-16",
|
||||||
|
"compiler_version": 16,
|
||||||
|
"stdlib": "libstdcxx",
|
||||||
|
"clang_gcc_toolchain": 13,
|
||||||
|
"configuration": "Debug"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"compiler_id": "clang-17-libcxx",
|
||||||
|
"compiler": "clang",
|
||||||
|
"cc": "clang-17",
|
||||||
|
"cxx": "clang++-17",
|
||||||
|
"compiler_version": 17,
|
||||||
|
"stdlib": "libcxx",
|
||||||
|
"configuration": "Debug"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
# Clang 18 - testing if it's faster than Clang 17 with libc++
|
||||||
|
# Requires patching Conan v1 settings.yml to add version 18
|
||||||
|
"compiler_id": "clang-18-libcxx",
|
||||||
|
"compiler": "clang",
|
||||||
|
"cc": "clang-18",
|
||||||
|
"cxx": "clang++-18",
|
||||||
|
"compiler_version": 18,
|
||||||
|
"stdlib": "libcxx",
|
||||||
|
"configuration": "Debug"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
# Minimal matrix for PRs and feature branches
|
||||||
|
minimal_matrix = [
|
||||||
|
full_matrix[1], # gcc-13 (middle-ground gcc)
|
||||||
|
full_matrix[2] # clang-14 (mature, stable clang)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Determine which matrix to use based on the target branch
|
||||||
|
ref = "${{ github.ref }}"
|
||||||
|
base_ref = "${{ github.base_ref }}" # For PRs, this is the target branch
|
||||||
|
event_name = "${{ github.event_name }}"
|
||||||
|
commit_message = """${{ github.event.head_commit.message }}"""
|
||||||
|
pr_title = """${{ github.event.pull_request.title }}"""
|
||||||
|
|
||||||
|
# Debug logging
|
||||||
|
print(f"Event: {event_name}")
|
||||||
|
print(f"Ref: {ref}")
|
||||||
|
print(f"Base ref: {base_ref}")
|
||||||
|
print(f"PR title: {pr_title}")
|
||||||
|
print(f"Commit message: {commit_message}")
|
||||||
|
|
||||||
|
# Check for override tags in commit message or PR title
|
||||||
|
force_full = "[ci-nix-full-matrix]" in commit_message or "[ci-nix-full-matrix]" in pr_title
|
||||||
|
print(f"Force full matrix: {force_full}")
|
||||||
|
|
||||||
|
# Check if this is targeting a main branch
|
||||||
|
# For PRs: check base_ref (target branch)
|
||||||
|
# For pushes: check ref (current branch)
|
||||||
|
main_branches = ["refs/heads/dev", "refs/heads/release", "refs/heads/candidate"]
|
||||||
|
|
||||||
|
if force_full:
|
||||||
|
# Override: always use full matrix if tag is present
|
||||||
|
use_full = True
|
||||||
|
elif event_name == "pull_request":
|
||||||
|
# For PRs, base_ref is just the branch name (e.g., "dev", not "refs/heads/dev")
|
||||||
|
# Check if the PR targets release or candidate (more critical branches)
|
||||||
|
use_full = base_ref in ["release", "candidate"]
|
||||||
|
else:
|
||||||
|
# For pushes, ref is the full reference (e.g., "refs/heads/dev")
|
||||||
|
use_full = ref in main_branches
|
||||||
|
|
||||||
|
# Select the appropriate matrix
|
||||||
|
if use_full:
|
||||||
|
if force_full:
|
||||||
|
print(f"Using FULL matrix (6 configs) - forced by [ci-nix-full-matrix] tag")
|
||||||
|
else:
|
||||||
|
print(f"Using FULL matrix (6 configs) - targeting main branch")
|
||||||
|
matrix = full_matrix
|
||||||
|
else:
|
||||||
|
print(f"Using MINIMAL matrix (2 configs) - feature branch/PR")
|
||||||
|
matrix = minimal_matrix
|
||||||
|
|
||||||
|
# Output the matrix as JSON
|
||||||
|
output = json.dumps({"include": matrix})
|
||||||
|
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||||
|
f.write(f"matrix={output}\n")
|
||||||
|
|
||||||
|
build:
|
||||||
|
needs: matrix-setup
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
artifact_name: ${{ steps.set-artifact-name.outputs.artifact_name }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix: ${{ fromJSON(needs.matrix-setup.outputs.matrix) }}
|
||||||
|
env:
|
||||||
|
build_dir: .build
|
||||||
|
# Bump this number to invalidate all caches globally.
|
||||||
|
CACHE_VERSION: 2
|
||||||
|
MAIN_BRANCH_NAME: dev
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install build dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y ninja-build ${{ matrix.cc }} ${{ matrix.cxx }} ccache
|
||||||
|
|
||||||
|
# Install the specific GCC version needed for Clang
|
||||||
|
if [ -n "${{ matrix.clang_gcc_toolchain }}" ]; then
|
||||||
|
echo "=== Installing GCC ${{ matrix.clang_gcc_toolchain }} for Clang ==="
|
||||||
|
sudo apt-get install -y gcc-${{ matrix.clang_gcc_toolchain }} g++-${{ matrix.clang_gcc_toolchain }} libstdc++-${{ matrix.clang_gcc_toolchain }}-dev
|
||||||
|
|
||||||
|
echo "=== GCC versions available after installation ==="
|
||||||
|
ls -la /usr/lib/gcc/x86_64-linux-gnu/ | grep -E "^d"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# For Clang < 16 with --gcc-toolchain, hide newer GCC versions
|
||||||
|
# This is needed because --gcc-toolchain still picks the highest version
|
||||||
|
#
|
||||||
|
# THE GREAT GCC HIDING TRICK (for Clang < 16):
|
||||||
|
# Clang versions before 16 don't have --gcc-install-dir, only --gcc-toolchain
|
||||||
|
# which is deprecated and still uses discovery heuristics that ALWAYS pick
|
||||||
|
# the highest version number. So we play a sneaky game...
|
||||||
|
#
|
||||||
|
# We rename newer GCC versions to very low integers (1, 2, 3...) which makes
|
||||||
|
# Clang think they're ancient GCC versions. Since 11 > 3 > 2 > 1, Clang will
|
||||||
|
# pick GCC 11 over our renamed versions. It's dumb but it works!
|
||||||
|
#
|
||||||
|
# Example: GCC 12→1, GCC 13→2, GCC 14→3, so Clang picks 11 (highest number)
|
||||||
|
if [ -n "${{ matrix.clang_gcc_toolchain }}" ] && [ "${{ matrix.compiler_version }}" -lt "16" ]; then
|
||||||
|
echo "=== Hiding GCC versions newer than ${{ matrix.clang_gcc_toolchain }} for Clang < 16 ==="
|
||||||
|
target_version=${{ matrix.clang_gcc_toolchain }}
|
||||||
|
counter=1 # Start with 1 - these will be seen as "GCC version 1, 2, 3" etc
|
||||||
|
for dir in /usr/lib/gcc/x86_64-linux-gnu/*/; do
|
||||||
|
if [ -d "$dir" ]; then
|
||||||
|
version=$(basename "$dir")
|
||||||
|
# Check if version is numeric and greater than target
|
||||||
|
if [[ "$version" =~ ^[0-9]+$ ]] && [ "$version" -gt "$target_version" ]; then
|
||||||
|
echo "Hiding GCC $version -> renaming to $counter (will be seen as GCC version $counter)"
|
||||||
|
# Safety check: ensure target doesn't already exist
|
||||||
|
if [ ! -e "/usr/lib/gcc/x86_64-linux-gnu/$counter" ]; then
|
||||||
|
sudo mv "$dir" "/usr/lib/gcc/x86_64-linux-gnu/$counter"
|
||||||
|
else
|
||||||
|
echo "ERROR: Cannot rename GCC $version - /usr/lib/gcc/x86_64-linux-gnu/$counter already exists"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
counter=$((counter + 1))
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify what Clang will use
|
||||||
|
if [ -n "${{ matrix.clang_gcc_toolchain }}" ]; then
|
||||||
|
echo "=== Verifying GCC toolchain selection ==="
|
||||||
|
echo "Available GCC versions:"
|
||||||
|
ls -la /usr/lib/gcc/x86_64-linux-gnu/ | grep -E "^d.*[0-9]+$" || true
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Clang's detected GCC installation:"
|
||||||
|
${{ matrix.cxx }} -v -E -x c++ /dev/null -o /dev/null 2>&1 | grep "Found candidate GCC installation" || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Install libc++ dev packages if using libc++ (not needed for libstdc++)
|
||||||
|
if [ "${{ matrix.stdlib }}" = "libcxx" ]; then
|
||||||
|
sudo apt-get install -y libc++-${{ matrix.compiler_version }}-dev libc++abi-${{ matrix.compiler_version }}-dev
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Install Conan 2
|
||||||
|
pip install --upgrade "conan>=2.0,<3"
|
||||||
|
|
||||||
|
- name: Configure ccache
|
||||||
|
uses: ./.github/actions/xahau-configure-ccache
|
||||||
|
with:
|
||||||
|
max_size: 2G
|
||||||
|
hash_dir: true
|
||||||
|
compiler_check: content
|
||||||
|
is_main_branch: ${{ github.ref_name == env.MAIN_BRANCH_NAME }}
|
||||||
|
|
||||||
|
- name: Configure Conan
|
||||||
|
run: |
|
||||||
|
# Create the default profile directory if it doesn't exist
|
||||||
|
mkdir -p ~/.conan2/profiles
|
||||||
|
|
||||||
|
# Determine the correct libcxx based on stdlib parameter
|
||||||
|
if [ "${{ matrix.stdlib }}" = "libcxx" ]; then
|
||||||
|
LIBCXX="libc++"
|
||||||
|
else
|
||||||
|
LIBCXX="libstdc++11"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create profile with our specific settings
|
||||||
|
cat > ~/.conan2/profiles/default <<EOF
|
||||||
|
[settings]
|
||||||
|
arch=x86_64
|
||||||
|
build_type=${{ matrix.configuration }}
|
||||||
|
compiler=${{ matrix.compiler }}
|
||||||
|
compiler.cppstd=20
|
||||||
|
compiler.libcxx=${LIBCXX}
|
||||||
|
compiler.version=${{ matrix.compiler_version }}
|
||||||
|
os=Linux
|
||||||
|
|
||||||
|
[buildenv]
|
||||||
|
CC=/usr/bin/${{ matrix.cc }}
|
||||||
|
CXX=/usr/bin/${{ matrix.cxx }}
|
||||||
|
|
||||||
|
[conf]
|
||||||
|
tools.build:compiler_executables={"c": "/usr/bin/${{ matrix.cc }}", "cpp": "/usr/bin/${{ matrix.cxx }}"}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Display profile for verification
|
||||||
|
conan profile show
|
||||||
|
|
||||||
|
- name: Check environment
|
||||||
|
run: |
|
||||||
|
echo "PATH:"
|
||||||
|
echo "${PATH}" | tr ':' '\n'
|
||||||
|
which conan && conan --version || echo "Conan not found"
|
||||||
|
which cmake && cmake --version || echo "CMake not found"
|
||||||
|
which ${{ matrix.cc }} && ${{ matrix.cc }} --version || echo "${{ matrix.cc }} not found"
|
||||||
|
which ${{ matrix.cxx }} && ${{ matrix.cxx }} --version || echo "${{ matrix.cxx }} not found"
|
||||||
|
which ccache && ccache --version || echo "ccache not found"
|
||||||
|
echo "---- Full Environment ----"
|
||||||
|
env
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
uses: ./.github/actions/xahau-ga-dependencies
|
||||||
|
with:
|
||||||
|
configuration: ${{ matrix.configuration }}
|
||||||
|
build_dir: ${{ env.build_dir }}
|
||||||
|
compiler-id: ${{ matrix.compiler_id }}
|
||||||
|
cache_version: ${{ env.CACHE_VERSION }}
|
||||||
|
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||||
|
stdlib: ${{ matrix.stdlib }}
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
uses: ./.github/actions/xahau-ga-build
|
||||||
|
with:
|
||||||
|
generator: Ninja
|
||||||
|
configuration: ${{ matrix.configuration }}
|
||||||
|
build_dir: ${{ env.build_dir }}
|
||||||
|
cc: ${{ matrix.cc }}
|
||||||
|
cxx: ${{ matrix.cxx }}
|
||||||
|
compiler-id: ${{ matrix.compiler_id }}
|
||||||
|
cache_version: ${{ env.CACHE_VERSION }}
|
||||||
|
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||||
|
stdlib: ${{ matrix.stdlib }}
|
||||||
|
clang_gcc_toolchain: ${{ matrix.clang_gcc_toolchain || '' }}
|
||||||
|
|
||||||
|
- name: Set artifact name
|
||||||
|
id: set-artifact-name
|
||||||
|
run: |
|
||||||
|
ARTIFACT_NAME="build-output-nix-${{ github.run_id }}-${{ matrix.compiler }}-${{ matrix.configuration }}"
|
||||||
|
echo "artifact_name=${ARTIFACT_NAME}" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "Using artifact name: ${ARTIFACT_NAME}"
|
||||||
|
|
||||||
|
- name: Debug build directory
|
||||||
|
run: |
|
||||||
|
echo "Checking build directory contents: ${{ env.build_dir }}"
|
||||||
|
ls -la ${{ env.build_dir }} || echo "Build directory not found or empty"
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: |
|
||||||
|
# Ensure the binary exists before trying to run
|
||||||
|
if [ -f "${{ env.build_dir }}/rippled" ]; then
|
||||||
|
${{ env.build_dir }}/rippled --unittest --unittest-jobs $(nproc)
|
||||||
|
else
|
||||||
|
echo "Error: rippled executable not found in ${{ env.build_dir }}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
Reference in New Issue
Block a user