mirror of
https://github.com/Xahau/xahaud.git
synced 2025-11-08 04:35:49 +00:00
Compare commits
265 Commits
patch-conf
...
sync-2.0.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0a1a289bd4 | ||
|
|
827e6fe617 | ||
|
|
5436d1fffd | ||
|
|
85e8ec2acf | ||
|
|
58fa7fda82 | ||
|
|
a5ea86fdfc | ||
|
|
e04d5911de | ||
|
|
8f4a9f44f6 | ||
|
|
a4bb9b9629 | ||
|
|
9b47d06c64 | ||
|
|
615f56570a | ||
|
|
5e005cd6ee | ||
|
|
34b3442e00 | ||
|
|
80a7197590 | ||
|
|
dd5d580b97 | ||
|
|
e4478fa883 | ||
|
|
2876888a20 | ||
|
|
857eefc829 | ||
|
|
7b581443d1 | ||
|
|
5400f43359 | ||
|
|
8cf7d485ab | ||
|
|
372f25d09b | ||
|
|
401395a204 | ||
|
|
8ac2887961 | ||
|
|
b87120ebc6 | ||
|
|
e38dcd3acd | ||
|
|
5ba9e871c5 | ||
|
|
a95d4ad6ca | ||
|
|
ad90852fa5 | ||
|
|
469a3fb3b0 | ||
|
|
0bc2a3ba0f | ||
|
|
fda383b6a4 | ||
|
|
387f789a20 | ||
|
|
fbe221c987 | ||
|
|
a124e54f6b | ||
|
|
8f30d8dcfc | ||
|
|
3e0324081b | ||
|
|
8ea213413a | ||
|
|
7b3da1b0f7 | ||
|
|
07de989780 | ||
|
|
f589fb3f03 | ||
|
|
691a1a5610 | ||
|
|
36eb3d113a | ||
|
|
c6c2983cf2 | ||
|
|
99225d5aee | ||
|
|
46b1b9e63c | ||
|
|
961054dc6b | ||
|
|
53f8bf5b0b | ||
|
|
413a2076f4 | ||
|
|
f3970631aa | ||
|
|
5e232bf785 | ||
|
|
ae39ac3cfb | ||
|
|
31b61b5b94 | ||
|
|
c1c2a9b3a6 | ||
|
|
bcfb37bc5c | ||
|
|
f4c475ab49 | ||
|
|
7ed38c0ae3 | ||
|
|
b06d2bfbcc | ||
|
|
0be2afdb9c | ||
|
|
3e0af5ba3b | ||
|
|
a27a09a6c4 | ||
|
|
78d9be7e09 | ||
|
|
f4bb4b6951 | ||
|
|
67930a9ceb | ||
|
|
e7aebad065 | ||
|
|
c2b14ee11e | ||
|
|
d3a4111d57 | ||
|
|
77cb6d11fb | ||
|
|
1c12f1511b | ||
|
|
0fb7ef4051 | ||
|
|
ee92042c19 | ||
|
|
b9f0c9209f | ||
|
|
8497c6fd49 | ||
|
|
6f25fcd394 | ||
|
|
b295c7691c | ||
|
|
af8a130c19 | ||
|
|
49d2c731f7 | ||
|
|
d5b6c63a80 | ||
|
|
b3a670b541 | ||
|
|
8159b0f0ff | ||
|
|
f2dc0b8cc1 | ||
|
|
ed90432fbe | ||
|
|
057180860d | ||
|
|
89212fd03e | ||
|
|
ea0f8760f0 | ||
|
|
2461ecbba9 | ||
|
|
429a3599b9 | ||
|
|
4221dcf568 | ||
|
|
8fc04d5c1f | ||
|
|
f645e42e6e | ||
|
|
666f5eabd0 | ||
|
|
a1ae22a0d6 | ||
|
|
da8973b86f | ||
|
|
29592347c0 | ||
|
|
8684cb8bf9 | ||
|
|
4077db3389 | ||
|
|
c30b73db62 | ||
|
|
8dc718a560 | ||
|
|
989532702d | ||
|
|
658eb73e00 | ||
|
|
df2e2ecd7c | ||
|
|
f9cd2e0d21 | ||
|
|
542fbdcb61 | ||
|
|
e40a97100b | ||
|
|
913866afb2 | ||
|
|
13acc00c95 | ||
|
|
85a636d494 | ||
|
|
eefc6eab53 | ||
|
|
59e334c099 | ||
|
|
1d2ad17dc9 | ||
|
|
97fb65c6d3 | ||
|
|
4bb7195db4 | ||
|
|
06b8d11317 | ||
|
|
40818bb350 | ||
|
|
a07d6f939b | ||
|
|
546bc31038 | ||
|
|
a8698bdf7f | ||
|
|
b9b70e3e4d | ||
|
|
4fa3890a02 | ||
|
|
0e4470e662 | ||
|
|
3448e17de8 | ||
|
|
5605e9215a | ||
|
|
a024b52261 | ||
|
|
30f4654469 | ||
|
|
c7255d8d8b | ||
|
|
5833b022d8 | ||
|
|
9018596532 | ||
|
|
b827f0170d | ||
|
|
26cb84fa6c | ||
|
|
938feb3c23 | ||
|
|
7d6b2e76d2 | ||
|
|
ab38e48fff | ||
|
|
9b7103ea4d | ||
|
|
e4b7e8f0f2 | ||
|
|
1485078d91 | ||
|
|
6625d2be92 | ||
|
|
2fb5c92140 | ||
|
|
c4b5ae3787 | ||
|
|
d546d761ce | ||
|
|
e84a36867b | ||
|
|
0bf50c5dc7 | ||
|
|
8b0592ce06 | ||
|
|
c27e2a9c05 | ||
|
|
f8d22bbc24 | ||
|
|
95ab35a185 | ||
|
|
313ad7134b | ||
|
|
859dcbcabf | ||
|
|
a8e9448b1a | ||
|
|
cab4d63a07 | ||
|
|
9a90a7ffdc | ||
|
|
ddbbe91e5a | ||
|
|
2d9ac91cd1 | ||
|
|
ed243745f8 | ||
|
|
5eac2d3e44 | ||
|
|
245cf4a676 | ||
|
|
864734f607 | ||
|
|
689740d818 | ||
|
|
48919f028c | ||
|
|
9c26f1f213 | ||
|
|
69b7728b2d | ||
|
|
9178aa9d8a | ||
|
|
17fdd09640 | ||
|
|
40dc69d642 | ||
|
|
58e3840345 | ||
|
|
b73dd584e7 | ||
|
|
0726789d77 | ||
|
|
0b675465b4 | ||
|
|
d088ad61a9 | ||
|
|
ef77b02d7f | ||
|
|
5ef3795cea | ||
|
|
7385828983 | ||
|
|
da5f1d189b | ||
|
|
88b01514c1 | ||
|
|
aeece15096 | ||
|
|
6fdf788b13 | ||
|
|
89cacb1258 | ||
|
|
8ccff44e8c | ||
|
|
420240a2ab | ||
|
|
802ea6c568 | ||
|
|
ecc779346e | ||
|
|
680c6095d4 | ||
|
|
6858861660 | ||
|
|
9faef17407 | ||
|
|
230873f196 | ||
|
|
ee68cc2cd2 | ||
|
|
7c360bad33 | ||
|
|
1fb1a99ea2 | ||
|
|
e0b63ac70e | ||
|
|
26a66bc2ef | ||
|
|
4b93e1657f | ||
|
|
cd45285cab | ||
|
|
b7acfb9803 | ||
|
|
97a10d6556 | ||
|
|
da8df63be3 | ||
|
|
317bd4bc6e | ||
|
|
2fd465bb3f | ||
|
|
fe43029272 | ||
|
|
bf33b6f637 | ||
|
|
c27518b846 | ||
|
|
7d8f5de93d | ||
|
|
858ea1bf25 | ||
|
|
7162fe0497 | ||
|
|
56c0e0dd5f | ||
|
|
20ca066454 | ||
|
|
171610d1a9 | ||
|
|
2f6cf0ab4b | ||
|
|
71884ad48a | ||
|
|
799a056313 | ||
|
|
fa71bda29c | ||
|
|
412593d7bc | ||
|
|
12d8342c34 | ||
|
|
d17f7151ab | ||
|
|
4466175231 | ||
|
|
621ca9c865 | ||
|
|
85a752235a | ||
|
|
d878fd4a6e | ||
|
|
532a471a35 | ||
|
|
e9468d8b4a | ||
|
|
9d54da3880 | ||
|
|
542172f0a1 | ||
|
|
e086724772 | ||
|
|
21863b05f3 | ||
|
|
61ac04aacc | ||
|
|
57a1329bff | ||
|
|
daf22b3b85 | ||
|
|
2b225977e2 | ||
|
|
58b22901cb | ||
|
|
8ba37a3138 | ||
|
|
8cffd3054d | ||
|
|
6b26045cbc | ||
|
|
08f13b7cfe | ||
|
|
766f5d7ee1 | ||
|
|
287c01ad04 | ||
|
|
4239124750 | ||
|
|
1e45d4120c | ||
|
|
9e446bcc85 | ||
|
|
376727d20c | ||
|
|
d921c87c88 | ||
|
|
7b94d3d99d | ||
|
|
79d83bd424 | ||
|
|
1a4d54f9d9 | ||
|
|
26cd629d28 | ||
|
|
2fb93f874b | ||
|
|
833df20fce | ||
|
|
5737c2b6e8 | ||
|
|
a15d0b2ecc | ||
|
|
18d76d3082 | ||
|
|
849a4435e0 | ||
|
|
247e9d98bf | ||
|
|
acd455f5df | ||
|
|
6636e3b6fd | ||
|
|
3c5f118b59 | ||
|
|
88308126cc | ||
|
|
497e52fcc6 | ||
|
|
a3852763e7 | ||
|
|
7cd8f0a03a | ||
|
|
d24c134612 | ||
|
|
cdac69a111 | ||
|
|
1500522427 | ||
|
|
75aba531d6 | ||
|
|
caa8b382d8 | ||
|
|
82e04073be | ||
|
|
e1b78f9682 | ||
|
|
901d1d4e8d | ||
|
|
aca5241515 |
31
.github/actions/xahau-configure-ccache/action.yml
vendored
Normal file
31
.github/actions/xahau-configure-ccache/action.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: 'Configure ccache'
|
||||
description: 'Sets up ccache with consistent configuration'
|
||||
|
||||
inputs:
|
||||
max_size:
|
||||
description: 'Maximum cache size'
|
||||
required: false
|
||||
default: '2G'
|
||||
hash_dir:
|
||||
description: 'Whether to include directory paths in hash'
|
||||
required: false
|
||||
default: 'true'
|
||||
compiler_check:
|
||||
description: 'How to check compiler for changes'
|
||||
required: false
|
||||
default: 'content'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Configure ccache
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p ~/.ccache
|
||||
export CONF_PATH="${CCACHE_CONFIGPATH:-${CCACHE_DIR:-$HOME/.ccache}/ccache.conf}"
|
||||
mkdir -p $(dirname "$CONF_PATH")
|
||||
echo "max_size = ${{ inputs.max_size }}" > "$CONF_PATH"
|
||||
echo "hash_dir = ${{ inputs.hash_dir }}" >> "$CONF_PATH"
|
||||
echo "compiler_check = ${{ inputs.compiler_check }}" >> "$CONF_PATH"
|
||||
ccache -p # Print config for verification
|
||||
ccache -z # Zero statistics before the build
|
||||
108
.github/actions/xahau-ga-build/action.yml
vendored
Normal file
108
.github/actions/xahau-ga-build/action.yml
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
name: build
|
||||
description: 'Builds the project with ccache integration'
|
||||
|
||||
inputs:
|
||||
generator:
|
||||
description: 'CMake generator to use'
|
||||
required: true
|
||||
configuration:
|
||||
description: 'Build configuration (Debug, Release, etc.)'
|
||||
required: true
|
||||
build_dir:
|
||||
description: 'Directory to build in'
|
||||
required: false
|
||||
default: '.build'
|
||||
cc:
|
||||
description: 'C compiler to use'
|
||||
required: false
|
||||
default: ''
|
||||
cxx:
|
||||
description: 'C++ compiler to use'
|
||||
required: false
|
||||
default: ''
|
||||
compiler-id:
|
||||
description: 'Unique identifier for compiler/version combination used for cache keys'
|
||||
required: false
|
||||
default: ''
|
||||
cache_version:
|
||||
description: 'Cache version for invalidation'
|
||||
required: false
|
||||
default: '1'
|
||||
ccache_enabled:
|
||||
description: 'Whether to use ccache'
|
||||
required: false
|
||||
default: 'true'
|
||||
main_branch:
|
||||
description: 'Main branch name for restore keys'
|
||||
required: false
|
||||
default: 'dev'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Generate safe branch name
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
id: safe-branch
|
||||
shell: bash
|
||||
run: |
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Restore ccache directory
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
id: ccache-restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ~/.ccache
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-
|
||||
|
||||
- name: Configure project
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p ${{ inputs.build_dir }}
|
||||
cd ${{ inputs.build_dir }}
|
||||
|
||||
# Set compiler environment variables if provided
|
||||
if [ -n "${{ inputs.cc }}" ]; then
|
||||
export CC="${{ inputs.cc }}"
|
||||
fi
|
||||
|
||||
if [ -n "${{ inputs.cxx }}" ]; then
|
||||
export CXX="${{ inputs.cxx }}"
|
||||
fi
|
||||
|
||||
# Configure ccache launcher args
|
||||
CCACHE_ARGS=""
|
||||
if [ "${{ inputs.ccache_enabled }}" = "true" ]; then
|
||||
CCACHE_ARGS="-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||
fi
|
||||
|
||||
# Run CMake configure
|
||||
cmake .. \
|
||||
-G "${{ inputs.generator }}" \
|
||||
$CCACHE_ARGS \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }}
|
||||
|
||||
- name: Build project
|
||||
shell: bash
|
||||
run: |
|
||||
cd ${{ inputs.build_dir }}
|
||||
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc)
|
||||
|
||||
- name: Show ccache statistics
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
shell: bash
|
||||
run: ccache -s
|
||||
|
||||
- name: Save ccache directory
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ~/.ccache
|
||||
key: ${{ steps.ccache-restore.outputs.cache-primary-key }}
|
||||
86
.github/actions/xahau-ga-dependencies/action.yml
vendored
Normal file
86
.github/actions/xahau-ga-dependencies/action.yml
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
name: dependencies
|
||||
description: 'Installs build dependencies with caching'
|
||||
|
||||
inputs:
|
||||
configuration:
|
||||
description: 'Build configuration (Debug, Release, etc.)'
|
||||
required: true
|
||||
build_dir:
|
||||
description: 'Directory to build dependencies in'
|
||||
required: false
|
||||
default: '.build'
|
||||
compiler-id:
|
||||
description: 'Unique identifier for compiler/version combination used for cache keys'
|
||||
required: false
|
||||
default: ''
|
||||
cache_version:
|
||||
description: 'Cache version for invalidation'
|
||||
required: false
|
||||
default: '1'
|
||||
cache_enabled:
|
||||
description: 'Whether to use caching'
|
||||
required: false
|
||||
default: 'true'
|
||||
main_branch:
|
||||
description: 'Main branch name for restore keys'
|
||||
required: false
|
||||
default: 'dev'
|
||||
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: 'Whether there was a cache hit'
|
||||
value: ${{ steps.cache-restore-conan.outputs.cache-hit }}
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Generate safe branch name
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: safe-branch
|
||||
shell: bash
|
||||
run: |
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Restore Conan cache
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: cache-restore-conan
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: |
|
||||
~/.conan
|
||||
~/.conan2
|
||||
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-${{ inputs.configuration }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-
|
||||
|
||||
- name: Export custom recipes
|
||||
shell: bash
|
||||
run: |
|
||||
conan export external/snappy snappy/1.1.9@
|
||||
conan export external/soci soci/4.0.3@
|
||||
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
# Create build directory
|
||||
mkdir -p ${{ inputs.build_dir }}
|
||||
cd ${{ inputs.build_dir }}
|
||||
|
||||
# Install dependencies using conan
|
||||
conan install \
|
||||
--output-folder . \
|
||||
--build missing \
|
||||
--settings build_type=${{ inputs.configuration }} \
|
||||
..
|
||||
|
||||
- name: Save Conan cache
|
||||
if: inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
~/.conan
|
||||
~/.conan2
|
||||
key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }}
|
||||
7
.github/pull_request_template.md
vendored
7
.github/pull_request_template.md
vendored
@@ -33,6 +33,7 @@ Please check [x] relevant options, delete irrelevant ones.
|
||||
- [ ] New feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- [ ] Refactor (non-breaking change that only restructures code)
|
||||
- [ ] Performance (increase or change in throughput and/or latency)
|
||||
- [ ] Tests (you added tests for code that already exists, or your new feature included in this PR)
|
||||
- [ ] Documentation update
|
||||
- [ ] Chore (no impact to binary, e.g. `.gitignore`, formatting, dropping support for older tooling)
|
||||
@@ -58,6 +59,12 @@ Please check [x] relevant options, delete irrelevant ones.
|
||||
## Before / After
|
||||
If relevant, use this section for an English description of the change at a technical level.
|
||||
If this change affects an API, examples should be included here.
|
||||
|
||||
For performance-impacting changes, please provide these details:
|
||||
1. Is this a new feature, bug fix, or improvement to existing functionality?
|
||||
2. What behavior/functionality does the change impact?
|
||||
3. In what processing can the impact be measured? Be as specific as possible - e.g. RPC client call, payment transaction that involves LOB, AMM, caching, DB operations, etc.
|
||||
4. Does this change affect concurrent processing - e.g. does it involve acquiring locks, multi-threaded processing, or async processing?
|
||||
-->
|
||||
|
||||
<!--
|
||||
|
||||
109
.github/workflows/build-in-docker.yml
vendored
109
.github/workflows/build-in-docker.yml
vendored
@@ -2,25 +2,104 @@ name: Build using Docker
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "dev", "candidate", "release" ]
|
||||
branches: ["dev", "candidate", "release", "jshooks"]
|
||||
pull_request:
|
||||
branches: [ "dev", "candidate", "release" ]
|
||||
branches: ["dev", "candidate", "release", "jshooks"]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
builder:
|
||||
runs-on: [self-hosted, vanity]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
clean: false
|
||||
- name: Check for suspicious patterns
|
||||
run: /bin/bash suspicious_patterns.sh
|
||||
- name: Build using Docker
|
||||
run: /bin/bash release-builder.sh
|
||||
- name: Unit tests
|
||||
run: /bin/bash docker-unit-tests.sh
|
||||
env:
|
||||
DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP: 1
|
||||
|
||||
jobs:
|
||||
checkout:
|
||||
runs-on: [self-hosted, vanity]
|
||||
outputs:
|
||||
checkout_path: ${{ steps.vars.outputs.checkout_path }}
|
||||
steps:
|
||||
- name: Prepare checkout path
|
||||
id: vars
|
||||
run: |
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | sed -e 's/[^a-zA-Z0-9._-]/-/g')
|
||||
CHECKOUT_PATH="${SAFE_BRANCH}-${{ github.sha }}"
|
||||
echo "checkout_path=${CHECKOUT_PATH}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
path: ${{ steps.vars.outputs.checkout_path }}
|
||||
clean: true
|
||||
fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history
|
||||
|
||||
checkpatterns:
|
||||
runs-on: [self-hosted, vanity]
|
||||
needs: checkout
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ needs.checkout.outputs.checkout_path }}
|
||||
steps:
|
||||
- name: Check for suspicious patterns
|
||||
run: /bin/bash suspicious_patterns.sh
|
||||
|
||||
build:
|
||||
runs-on: [self-hosted, vanity]
|
||||
needs: [checkpatterns, checkout]
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ needs.checkout.outputs.checkout_path }}
|
||||
steps:
|
||||
- name: Set Cleanup Script Path
|
||||
run: |
|
||||
echo "JOB_CLEANUP_SCRIPT=$(mktemp)" >> $GITHUB_ENV
|
||||
|
||||
- name: Build using Docker
|
||||
run: /bin/bash release-builder.sh
|
||||
|
||||
- name: Stop Container (Cleanup)
|
||||
if: always()
|
||||
run: |
|
||||
echo "Running cleanup script: $JOB_CLEANUP_SCRIPT"
|
||||
/bin/bash -e -x "$JOB_CLEANUP_SCRIPT"
|
||||
CLEANUP_EXIT_CODE=$?
|
||||
|
||||
if [[ "$CLEANUP_EXIT_CODE" -eq 0 ]]; then
|
||||
echo "Cleanup script succeeded."
|
||||
rm -f "$JOB_CLEANUP_SCRIPT"
|
||||
echo "Cleanup script removed."
|
||||
else
|
||||
echo "⚠️ Cleanup script failed! Keeping for debugging: $JOB_CLEANUP_SCRIPT"
|
||||
fi
|
||||
|
||||
if [[ "${DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP}" == "1" ]]; then
|
||||
echo "🔍 Checking for leftover containers..."
|
||||
BUILD_CONTAINERS=$(docker ps --format '{{.Names}}' | grep '^xahaud_cached_builder' || echo "")
|
||||
|
||||
if [[ -n "$BUILD_CONTAINERS" ]]; then
|
||||
echo "⚠️ WARNING: Some build containers are still running"
|
||||
echo "$BUILD_CONTAINERS"
|
||||
else
|
||||
echo "✅ No build containers found"
|
||||
fi
|
||||
fi
|
||||
|
||||
tests:
|
||||
runs-on: [self-hosted, vanity]
|
||||
needs: [build, checkout]
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ needs.checkout.outputs.checkout_path }}
|
||||
steps:
|
||||
- name: Unit tests
|
||||
run: /bin/bash docker-unit-tests.sh
|
||||
|
||||
cleanup:
|
||||
runs-on: [self-hosted, vanity]
|
||||
needs: [tests, checkout]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Cleanup workspace
|
||||
run: |
|
||||
CHECKOUT_PATH="${{ needs.checkout.outputs.checkout_path }}"
|
||||
echo "Cleaning workspace for ${CHECKOUT_PATH}"
|
||||
rm -rf "${{ github.workspace }}/${CHECKOUT_PATH}"
|
||||
|
||||
20
.github/workflows/checkpatterns.yml
vendored
Normal file
20
.github/workflows/checkpatterns.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
name: checkpatterns
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
checkpatterns:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check for suspicious patterns
|
||||
run: |
|
||||
if [ -f "suspicious_patterns.sh" ]; then
|
||||
bash suspicious_patterns.sh
|
||||
else
|
||||
echo "Warning: suspicious_patterns.sh not found, skipping check"
|
||||
# Still exit with success for compatibility with dependent jobs
|
||||
exit 0
|
||||
fi
|
||||
33
.github/workflows/clang-format.yml
vendored
33
.github/workflows/clang-format.yml
vendored
@@ -4,21 +4,32 @@ on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
CLANG_VERSION: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install clang-format
|
||||
# - name: Install clang-format
|
||||
# run: |
|
||||
# codename=$( lsb_release --codename --short )
|
||||
# sudo tee /etc/apt/sources.list.d/llvm.list >/dev/null <<EOF
|
||||
# deb http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
||||
# deb-src http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
||||
# EOF
|
||||
# wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add
|
||||
# sudo apt-get update -y
|
||||
# sudo apt-get install -y clang-format-${CLANG_VERSION}
|
||||
|
||||
# Temporary fix until this commit is merged
|
||||
# https://github.com/XRPLF/rippled/commit/552377c76f55b403a1c876df873a23d780fcc81c
|
||||
- name: Download and install clang-format
|
||||
run: |
|
||||
codename=$( lsb_release --codename --short )
|
||||
sudo tee /etc/apt/sources.list.d/llvm.list >/dev/null <<EOF
|
||||
deb http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
||||
deb-src http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
||||
EOF
|
||||
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add
|
||||
sudo apt-get update
|
||||
sudo apt-get install clang-format-${CLANG_VERSION}
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y libtinfo5
|
||||
curl -LO https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/clang+llvm-10.0.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz
|
||||
tar -xf clang+llvm-10.0.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz
|
||||
sudo mv clang+llvm-10.0.1-x86_64-linux-gnu-ubuntu-16.04 /opt/clang-10
|
||||
sudo ln -s /opt/clang-10/bin/clang-format /usr/local/bin/clang-format-10
|
||||
- name: Format src/ripple
|
||||
run: find src/ripple -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -print0 | xargs -0 clang-format-${CLANG_VERSION} -i
|
||||
- name: Format src/test
|
||||
@@ -30,7 +41,7 @@ jobs:
|
||||
git diff --exit-code | tee "clang-format.patch"
|
||||
- name: Upload patch
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: clang-format.patch
|
||||
|
||||
25
.github/workflows/doxygen.yml
vendored
25
.github/workflows/doxygen.yml
vendored
@@ -1,25 +0,0 @@
|
||||
name: Build and publish Doxygen documentation
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
|
||||
jobs:
|
||||
job:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: docker://rippleci/rippled-ci-builder:2944b78d22db
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: build
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -DBoost_NO_BOOST_CMAKE=ON ..
|
||||
cmake --build . --target docs --parallel $(nproc)
|
||||
- name: publish
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: build/docs/html
|
||||
2
.github/workflows/levelization.yml
vendored
2
.github/workflows/levelization.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
git diff --exit-code | tee "levelization.patch"
|
||||
- name: Upload patch
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: levelization.patch
|
||||
|
||||
116
.github/workflows/xahau-ga-macos.yml
vendored
Normal file
116
.github/workflows/xahau-ga-macos.yml
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
name: MacOS - GA Runner
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
pull_request:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
matrix:
|
||||
generator:
|
||||
- Ninja
|
||||
configuration:
|
||||
- Debug
|
||||
runs-on: macos-15
|
||||
env:
|
||||
build_dir: .build
|
||||
# Bump this number to invalidate all caches globally.
|
||||
CACHE_VERSION: 1
|
||||
MAIN_BRANCH_NAME: dev
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Conan
|
||||
run: |
|
||||
brew install conan@1
|
||||
# Add Conan 1 to the PATH for this job
|
||||
echo "$(brew --prefix conan@1)/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Install Coreutils
|
||||
run: |
|
||||
brew install coreutils
|
||||
echo "Num proc: $(nproc)"
|
||||
|
||||
- name: Install Ninja
|
||||
if: matrix.generator == 'Ninja'
|
||||
run: brew install ninja
|
||||
|
||||
- name: Install Python
|
||||
run: |
|
||||
if which python3 > /dev/null 2>&1; then
|
||||
echo "Python 3 executable exists"
|
||||
python3 --version
|
||||
else
|
||||
brew install python@3.12
|
||||
fi
|
||||
# Create 'python' symlink if it doesn't exist (for tools expecting 'python')
|
||||
if ! which python > /dev/null 2>&1; then
|
||||
sudo ln -sf $(which python3) /usr/local/bin/python
|
||||
fi
|
||||
|
||||
- name: Install CMake
|
||||
run: |
|
||||
if which cmake > /dev/null 2>&1; then
|
||||
echo "cmake executable exists"
|
||||
cmake --version
|
||||
else
|
||||
brew install cmake
|
||||
fi
|
||||
|
||||
- name: Install ccache
|
||||
run: brew install ccache
|
||||
|
||||
- name: Configure ccache
|
||||
uses: ./.github/actions/xahau-configure-ccache
|
||||
with:
|
||||
max_size: 2G
|
||||
hash_dir: true
|
||||
compiler_check: content
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
echo "PATH:"
|
||||
echo "${PATH}" | tr ':' '\n'
|
||||
which python && python --version || echo "Python not found"
|
||||
which conan && conan --version || echo "Conan not found"
|
||||
which cmake && cmake --version || echo "CMake not found"
|
||||
clang --version
|
||||
ccache --version
|
||||
echo "---- Full Environment ----"
|
||||
env
|
||||
|
||||
- name: Configure Conan
|
||||
run: |
|
||||
conan profile new default --detect || true # Ignore error if profile exists
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/xahau-ga-dependencies
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
build_dir: ${{ env.build_dir }}
|
||||
compiler-id: clang
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Build
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
with:
|
||||
generator: ${{ matrix.generator }}
|
||||
configuration: ${{ matrix.configuration }}
|
||||
build_dir: ${{ env.build_dir }}
|
||||
compiler-id: clang
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
${{ env.build_dir }}/rippled --unittest --unittest-jobs $(nproc)
|
||||
123
.github/workflows/xahau-ga-nix.yml
vendored
Normal file
123
.github/workflows/xahau-ga-nix.yml
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
name: Nix - GA Runner
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
pull_request:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-job:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
artifact_name: ${{ steps.set-artifact-name.outputs.artifact_name }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
compiler: [gcc]
|
||||
configuration: [Debug]
|
||||
include:
|
||||
- compiler: gcc
|
||||
cc: gcc-11
|
||||
cxx: g++-11
|
||||
compiler_id: gcc-11
|
||||
env:
|
||||
build_dir: .build
|
||||
# Bump this number to invalidate all caches globally.
|
||||
CACHE_VERSION: 1
|
||||
MAIN_BRANCH_NAME: dev
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y ninja-build ${{ matrix.cc }} ${{ matrix.cxx }} ccache
|
||||
# Install specific Conan version needed
|
||||
pip install --upgrade "conan<2"
|
||||
|
||||
- name: Configure ccache
|
||||
uses: ./.github/actions/xahau-configure-ccache
|
||||
with:
|
||||
max_size: 2G
|
||||
hash_dir: true
|
||||
compiler_check: content
|
||||
|
||||
- name: Configure Conan
|
||||
run: |
|
||||
conan profile new default --detect || true # Ignore error if profile exists
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
conan profile update settings.compiler=${{ matrix.compiler }} default
|
||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
||||
conan profile update env.CC=/usr/bin/${{ matrix.cc }} default
|
||||
conan profile update env.CXX=/usr/bin/${{ matrix.cxx }} default
|
||||
conan profile update conf.tools.build:compiler_executables='{"c": "/usr/bin/${{ matrix.cc }}", "cpp": "/usr/bin/${{ matrix.cxx }}"}' default
|
||||
|
||||
# Set correct compiler version based on matrix.compiler
|
||||
if [ "${{ matrix.compiler }}" = "gcc" ]; then
|
||||
conan profile update settings.compiler.version=11 default
|
||||
elif [ "${{ matrix.compiler }}" = "clang" ]; then
|
||||
conan profile update settings.compiler.version=14 default
|
||||
fi
|
||||
# Display profile for verification
|
||||
conan profile show default
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
echo "PATH:"
|
||||
echo "${PATH}" | tr ':' '\n'
|
||||
which conan && conan --version || echo "Conan not found"
|
||||
which cmake && cmake --version || echo "CMake not found"
|
||||
which ${{ matrix.cc }} && ${{ matrix.cc }} --version || echo "${{ matrix.cc }} not found"
|
||||
which ${{ matrix.cxx }} && ${{ matrix.cxx }} --version || echo "${{ matrix.cxx }} not found"
|
||||
which ccache && ccache --version || echo "ccache not found"
|
||||
echo "---- Full Environment ----"
|
||||
env
|
||||
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/xahau-ga-dependencies
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
build_dir: ${{ env.build_dir }}
|
||||
compiler-id: ${{ matrix.compiler_id }}
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Build
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
with:
|
||||
generator: Ninja
|
||||
configuration: ${{ matrix.configuration }}
|
||||
build_dir: ${{ env.build_dir }}
|
||||
cc: ${{ matrix.cc }}
|
||||
cxx: ${{ matrix.cxx }}
|
||||
compiler-id: ${{ matrix.compiler_id }}
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Set artifact name
|
||||
id: set-artifact-name
|
||||
run: |
|
||||
ARTIFACT_NAME="build-output-nix-${{ github.run_id }}-${{ matrix.compiler }}-${{ matrix.configuration }}"
|
||||
echo "artifact_name=${ARTIFACT_NAME}" >> "$GITHUB_OUTPUT"
|
||||
echo "Using artifact name: ${ARTIFACT_NAME}"
|
||||
|
||||
- name: Debug build directory
|
||||
run: |
|
||||
echo "Checking build directory contents: ${{ env.build_dir }}"
|
||||
ls -la ${{ env.build_dir }} || echo "Build directory not found or empty"
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
# Ensure the binary exists before trying to run
|
||||
if [ -f "${{ env.build_dir }}/rippled" ]; then
|
||||
${{ env.build_dir }}/rippled --unittest --unittest-jobs $(nproc)
|
||||
else
|
||||
echo "Error: rippled executable not found in ${{ env.build_dir }}"
|
||||
exit 1
|
||||
fi
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -114,3 +114,9 @@ pkg_out
|
||||
pkg
|
||||
CMakeUserPresets.json
|
||||
bld.rippled/
|
||||
|
||||
generated
|
||||
.vscode
|
||||
|
||||
# Suggested in-tree build directory
|
||||
/.build/
|
||||
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -3,7 +3,7 @@
|
||||
"C_Cpp.clang_format_path": ".clang-format",
|
||||
"C_Cpp.clang_format_fallbackStyle": "{ ColumnLimit: 0 }",
|
||||
"[cpp]":{
|
||||
"editor.wordBasedSuggestions": false,
|
||||
"editor.wordBasedSuggestions": "off",
|
||||
"editor.suggest.insertMode": "replace",
|
||||
"editor.semanticHighlighting.enabled": true,
|
||||
"editor.tabSize": 4,
|
||||
|
||||
532
BUILD.md
Normal file
532
BUILD.md
Normal file
@@ -0,0 +1,532 @@
|
||||
| :warning: **WARNING** :warning:
|
||||
|---|
|
||||
| These instructions assume you have a C++ development environment ready with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up on Linux, macOS, or Windows, [see this guide](./docs/build/environment.md). |
|
||||
|
||||
> These instructions also assume a basic familiarity with Conan and CMake.
|
||||
> If you are unfamiliar with Conan,
|
||||
> you can read our [crash course](./docs/build/conan.md)
|
||||
> or the official [Getting Started][3] walkthrough.
|
||||
|
||||
## Branches
|
||||
|
||||
For a stable release, choose the `master` branch or one of the [tagged
|
||||
releases](https://github.com/ripple/rippled/releases).
|
||||
|
||||
```
|
||||
git checkout master
|
||||
```
|
||||
|
||||
For the latest release candidate, choose the `release` branch.
|
||||
|
||||
```
|
||||
git checkout release
|
||||
```
|
||||
|
||||
For the latest set of untested features, or to contribute, choose the `develop`
|
||||
branch.
|
||||
|
||||
```
|
||||
git checkout develop
|
||||
```
|
||||
|
||||
## Minimum Requirements
|
||||
|
||||
See [System Requirements](https://xrpl.org/system-requirements.html).
|
||||
|
||||
Building rippled generally requires git, Python, Conan, CMake, and a C++ compiler. Some guidance on setting up such a [C++ development environment can be found here](./docs/build/environment.md).
|
||||
|
||||
- [Python 3.7](https://www.python.org/downloads/)
|
||||
- [Conan 1.55](https://conan.io/downloads.html)
|
||||
- [CMake 3.16](https://cmake.org/download/)
|
||||
|
||||
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
The [minimum compiler versions][2] required are:
|
||||
|
||||
| Compiler | Version |
|
||||
|-------------|---------|
|
||||
| GCC | 11 |
|
||||
| Clang | 13 |
|
||||
| Apple Clang | 13.1.6 |
|
||||
| MSVC | 19.23 |
|
||||
|
||||
### Linux
|
||||
|
||||
The Ubuntu operating system has received the highest level of
|
||||
quality assurance, testing, and support.
|
||||
|
||||
Here are [sample instructions for setting up a C++ development environment on Linux](./docs/build/environment.md#linux).
|
||||
|
||||
### Mac
|
||||
|
||||
Many rippled engineers use macOS for development.
|
||||
|
||||
Here are [sample instructions for setting up a C++ development environment on macOS](./docs/build/environment.md#macos).
|
||||
|
||||
### Windows
|
||||
|
||||
Windows is not recommended for production use at this time.
|
||||
|
||||
- Additionally, 32-bit Windows development is not supported.
|
||||
- Visual Studio 2022 is not yet supported.
|
||||
- rippled generally requires [Boost][] 1.77, which Conan cannot build with VS 2022.
|
||||
- Until rippled is updated for compatibility with later versions of Boost, Windows developers may need to use Visual Studio 2019.
|
||||
|
||||
[Boost]: https://www.boost.org/
|
||||
|
||||
## Steps
|
||||
|
||||
### Set Up Conan
|
||||
|
||||
After you have a [C++ development environment](./docs/build/environment.md) ready with Git, Python, Conan, CMake, and a C++ compiler, you may need to set up your Conan profile.
|
||||
|
||||
These instructions assume a basic familiarity with Conan and CMake.
|
||||
|
||||
If you are unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official [Getting Started][3] walkthrough.
|
||||
|
||||
You'll need at least one Conan profile:
|
||||
|
||||
```
|
||||
conan profile new default --detect
|
||||
```
|
||||
|
||||
Update the compiler settings:
|
||||
|
||||
```
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
```
|
||||
|
||||
**Linux** developers will commonly have a default Conan [profile][] that compiles
|
||||
with GCC and links with libstdc++.
|
||||
If you are linking with libstdc++ (see profile setting `compiler.libcxx`),
|
||||
then you will need to choose the `libstdc++11` ABI:
|
||||
|
||||
```
|
||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
||||
```
|
||||
|
||||
**Windows** developers may need to use the x64 native build tools.
|
||||
An easy way to do that is to run the shortcut "x64 Native Tools Command
|
||||
Prompt" for the version of Visual Studio that you have installed.
|
||||
|
||||
Windows developers must also build `rippled` and its dependencies for the x64
|
||||
architecture:
|
||||
|
||||
```
|
||||
conan profile update settings.arch=x86_64 default
|
||||
```
|
||||
|
||||
### Multiple compilers
|
||||
|
||||
When `/usr/bin/g++` exists on a platform, it is the default cpp compiler. This
|
||||
default works for some users.
|
||||
|
||||
However, if this compiler cannot build rippled or its dependencies, then you can
|
||||
install another compiler and set Conan and CMake to use it.
|
||||
Update the `conf.tools.build:compiler_executables` setting in order to set the correct variables (`CMAKE_<LANG>_COMPILER`) in the
|
||||
generated CMake toolchain file.
|
||||
For example, on Ubuntu 20, you may have gcc at `/usr/bin/gcc` and g++ at `/usr/bin/g++`; if that is the case, you can select those compilers with:
|
||||
```
|
||||
conan profile update 'conf.tools.build:compiler_executables={"c": "/usr/bin/gcc", "cpp": "/usr/bin/g++"}' default
|
||||
```
|
||||
|
||||
Replace `/usr/bin/gcc` and `/usr/bin/g++` with paths to the desired compilers.
|
||||
|
||||
It should choose the compiler for dependencies as well,
|
||||
but not all of them have a Conan recipe that respects this setting (yet).
|
||||
For the rest, you can set these environment variables.
|
||||
Replace `<path>` with paths to the desired compilers:
|
||||
|
||||
- `conan profile update env.CC=<path> default`
|
||||
- `conan profile update env.CXX=<path> default`
|
||||
|
||||
Export our [Conan recipe for Snappy](./external/snappy).
|
||||
It does not explicitly link the C++ standard library,
|
||||
which allows you to statically link it with GCC, if you want.
|
||||
|
||||
```
|
||||
conan export external/snappy snappy/1.1.9@
|
||||
```
|
||||
|
||||
Export our [Conan recipe for SOCI](./external/soci).
|
||||
It patches their CMake to correctly import its dependencies.
|
||||
|
||||
```
|
||||
conan export external/soci soci/4.0.3@
|
||||
```
|
||||
|
||||
### Build and Test
|
||||
|
||||
1. Create a build directory and move into it.
|
||||
|
||||
```
|
||||
mkdir .build
|
||||
cd .build
|
||||
```
|
||||
|
||||
You can use any directory name. Conan treats your working directory as an
|
||||
install folder and generates files with implementation details.
|
||||
You don't need to worry about these files, but make sure to change
|
||||
your working directory to your build directory before calling Conan.
|
||||
|
||||
**Note:** You can specify a directory for the installation files by adding
|
||||
the `install-folder` or `-if` option to every `conan install` command
|
||||
in the next step.
|
||||
|
||||
2. Generate CMake files for every configuration you want to build.
|
||||
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
```
|
||||
|
||||
For a single-configuration generator, e.g. `Unix Makefiles` or `Ninja`,
|
||||
you only need to run this command once.
|
||||
For a multi-configuration generator, e.g. `Visual Studio`, you may want to
|
||||
run it more than once.
|
||||
|
||||
Each of these commands should also have a different `build_type` setting.
|
||||
A second command with the same `build_type` setting will overwrite the files
|
||||
generated by the first. You can pass the build type on the command line with
|
||||
`--settings build_type=$BUILD_TYPE` or in the profile itself,
|
||||
under the section `[settings]` with the key `build_type`.
|
||||
|
||||
If you are using a Microsoft Visual C++ compiler,
|
||||
then you will need to ensure consistency between the `build_type` setting
|
||||
and the `compiler.runtime` setting.
|
||||
|
||||
When `build_type` is `Release`, `compiler.runtime` should be `MT`.
|
||||
|
||||
When `build_type` is `Debug`, `compiler.runtime` should be `MTd`.
|
||||
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release --settings compiler.runtime=MT
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug --settings compiler.runtime=MTd
|
||||
```
|
||||
|
||||
3. Configure CMake and pass the toolchain file generated by Conan, located at
|
||||
`$OUTPUT_FOLDER/build/generators/conan_toolchain.cmake`.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
|
||||
```
|
||||
|
||||
Pass the CMake variable [`CMAKE_BUILD_TYPE`][build_type]
|
||||
and make sure it matches the `build_type` setting you chose in the previous
|
||||
step.
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
|
||||
```
|
||||
|
||||
**Note:** You can pass build options for `rippled` in this step.
|
||||
|
||||
4. Build `rippled`.
|
||||
|
||||
For a single-configuration generator, it will build whatever configuration
|
||||
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator,
|
||||
you must pass the option `--config` to select the build configuration.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
cmake --build .
|
||||
```
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
cmake --build . --config Release
|
||||
cmake --build . --config Debug
|
||||
```
|
||||
|
||||
5. Test rippled.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
./rippled --unittest
|
||||
```
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
./Release/rippled --unittest
|
||||
./Debug/rippled --unittest
|
||||
```
|
||||
|
||||
The location of `rippled` in your build directory depends on your CMake
|
||||
generator. Pass `--help` to see the rest of the command line options.
|
||||
|
||||
|
||||
## Coverage report
|
||||
|
||||
The coverage report is intended for developers using compilers GCC
|
||||
or Clang (including Apple Clang). It is generated by the build target `coverage`,
|
||||
which is only enabled when the `coverage` option is set, e.g. with
|
||||
`--options coverage=True` in `conan` or `-Dcoverage=ON` variable in `cmake`
|
||||
|
||||
Prerequisites for the coverage report:
|
||||
|
||||
- [gcovr tool][gcovr] (can be installed e.g. with [pip][python-pip])
|
||||
- `gcov` for GCC (installed with the compiler by default) or
|
||||
- `llvm-cov` for Clang (installed with the compiler by default)
|
||||
- `Debug` build type
|
||||
|
||||
A coverage report is created when the following steps are completed, in order:
|
||||
|
||||
1. `rippled` binary built with instrumentation data, enabled by the `coverage`
|
||||
option mentioned above
|
||||
2. completed run of unit tests, which populates coverage capture data
|
||||
3. completed run of the `gcovr` tool (which internally invokes either `gcov` or `llvm-cov`)
|
||||
to assemble both instrumentation data and the coverage capture data into a coverage report
|
||||
|
||||
The above steps are automated into a single target `coverage`. The instrumented
|
||||
`rippled` binary can also be used for regular development or testing work, at
|
||||
the cost of extra disk space utilization and a small performance hit
|
||||
(to store coverage capture). In case of a spurious failure of unit tests, it is
|
||||
possible to re-run the `coverage` target without rebuilding the `rippled` binary
|
||||
(since it is simply a dependency of the coverage report target). It is also possible
|
||||
to select only specific tests for the purpose of the coverage report, by setting
|
||||
the `coverage_test` variable in `cmake`
|
||||
|
||||
The default coverage report format is `html-details`, but the user
|
||||
can override it to any of the formats listed in `Builds/CMake/CodeCoverage.cmake`
|
||||
by setting the `coverage_format` variable in `cmake`. It is also possible
|
||||
to generate more than one format at a time by setting the `coverage_extra_args`
|
||||
variable in `cmake`. The specific command line used to run the `gcovr` tool will be
|
||||
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
|
||||
|
||||
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
|
||||
set to the number of available CPU cores. This may cause spurious test
|
||||
errors on Apple. Developers can override the number of unit test jobs with
|
||||
the `coverage_test_parallelism` variable in `cmake`.
|
||||
|
||||
Example use with some cmake variables set:
|
||||
|
||||
```
|
||||
cd .build
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
cmake -DCMAKE_BUILD_TYPE=Debug -Dcoverage=ON -Dcoverage_test_parallelism=2 -Dcoverage_format=html-details -Dcoverage_extra_args="--json coverage.json" -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
|
||||
cmake --build . --target coverage
|
||||
```
|
||||
|
||||
After the `coverage` target is completed, the generated coverage report will be
|
||||
stored inside the build directory, as either of:
|
||||
|
||||
- file named `coverage.`_extension_ , with a suitable extension for the report format, or
|
||||
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
|
||||
|
||||
|
||||
## Options
|
||||
|
||||
| Option | Default Value | Description |
|
||||
| --- | ---| ---|
|
||||
| `assert` | OFF | Enable assertions.
|
||||
| `reporting` | OFF | Build the reporting mode feature. |
|
||||
| `coverage` | OFF | Prepare the coverage report. |
|
||||
| `tests` | ON | Build tests. |
|
||||
| `unity` | ON | Configure a unity build. |
|
||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||
|
||||
[Unity builds][5] may be faster for the first build
|
||||
(at the cost of much more memory) since they concatenate sources into fewer
|
||||
translation units. Non-unity builds may be faster for incremental builds,
|
||||
and can be helpful for detecting `#include` omissions.
|
||||
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
||||
### Conan
|
||||
|
||||
If you have trouble building dependencies after changing Conan settings,
|
||||
try removing the Conan cache.
|
||||
|
||||
```
|
||||
rm -rf ~/.conan/data
|
||||
```
|
||||
|
||||
|
||||
### no std::result_of
|
||||
|
||||
If your compiler version is recent enough to have removed `std::result_of` as
|
||||
part of C++20, e.g. Apple Clang 15.0, then you might need to add a preprocessor
|
||||
definition to your build.
|
||||
|
||||
```
|
||||
conan profile update 'options.boost:extra_b2_flags="define=BOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
||||
conan profile update 'env.CFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
||||
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
||||
conan profile update 'conf.tools.build:cflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
|
||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
|
||||
```
|
||||
|
||||
|
||||
### recompile with -fPIC
|
||||
|
||||
If you get a linker error suggesting that you recompile Boost with
|
||||
position-independent code, such as:
|
||||
|
||||
```
|
||||
/usr/bin/ld.gold: error: /home/username/.conan/data/boost/1.77.0/_/_/package/.../lib/libboost_container.a(alloc_lib.o):
|
||||
requires unsupported dynamic reloc 11; recompile with -fPIC
|
||||
```
|
||||
|
||||
Conan most likely downloaded a bad binary distribution of the dependency.
|
||||
This seems to be a [bug][1] in Conan just for Boost 1.77.0 compiled with GCC
|
||||
for Linux. The solution is to build the dependency locally by passing
|
||||
`--build boost` when calling `conan install`.
|
||||
|
||||
```
|
||||
/usr/bin/ld.gold: error: /home/username/.conan/data/boost/1.77.0/_/_/package/dc8aedd23a0f0a773a5fcdcfe1ae3e89c4205978/lib/libboost_container.a(alloc_lib.o): requires unsupported dynamic reloc 11; recompile with -fPIC
|
||||
```
|
||||
|
||||
|
||||
## Add a Dependency
|
||||
|
||||
If you want to experiment with a new package, follow these steps:
|
||||
|
||||
1. Search for the package on [Conan Center](https://conan.io/center/).
|
||||
2. Modify [`conanfile.py`](./conanfile.py):
|
||||
- Add a version of the package to the `requires` property.
|
||||
- Change any default options for the package by adding them to the
|
||||
`default_options` property (with syntax `'$package:$option': $value`).
|
||||
3. Modify [`CMakeLists.txt`](./CMakeLists.txt):
|
||||
- Add a call to `find_package($package REQUIRED)`.
|
||||
- Link a library from the package to the target `ripple_libs`
|
||||
(search for the existing call to `target_link_libraries(ripple_libs INTERFACE ...)`).
|
||||
4. Start coding! Don't forget to include whatever headers you need from the package.
|
||||
|
||||
|
||||
## A crash course in CMake and Conan
|
||||
|
||||
To better understand how to use Conan,
|
||||
we should first understand _why_ we use Conan,
|
||||
and to understand that,
|
||||
we need to understand how we use CMake.
|
||||
|
||||
|
||||
### CMake
|
||||
|
||||
Technically, you don't need CMake to build this project.
|
||||
You could manually compile every translation unit into an object file,
|
||||
using the right compiler options,
|
||||
and then manually link all those objects together,
|
||||
using the right linker options.
|
||||
However, that is very tedious and error-prone,
|
||||
which is why we lean on tools like CMake.
|
||||
|
||||
We have written CMake configuration files
|
||||
([`CMakeLists.txt`](./CMakeLists.txt) and friends)
|
||||
for this project so that CMake can be used to correctly compile and link
|
||||
all of the translation units in it.
|
||||
Or rather, CMake will generate files for a separate build system
|
||||
(e.g. Make, Ninja, Visual Studio, Xcode, etc.)
|
||||
that compile and link all of the translation units.
|
||||
Even then, CMake has parameters, some of which are platform-specific.
|
||||
In CMake's parlance, parameters are specially-named **variables** like
|
||||
[`CMAKE_BUILD_TYPE`][build_type] or
|
||||
[`CMAKE_MSVC_RUNTIME_LIBRARY`][runtime].
|
||||
Parameters include:
|
||||
|
||||
- what build system to generate files for
|
||||
- where to find the compiler and linker
|
||||
- where to find dependencies, e.g. libraries and headers
|
||||
- how to link dependencies, e.g. any special compiler or linker flags that
|
||||
need to be used with them, including preprocessor definitions
|
||||
- how to compile translation units, e.g. with optimizations, debug symbols,
|
||||
position-independent code, etc.
|
||||
- on Windows, which runtime library to link with
|
||||
|
||||
For some of these parameters, like the build system and compiler,
|
||||
CMake goes through a complicated search process to choose default values.
|
||||
For others, like the dependencies,
|
||||
_we_ had written in the CMake configuration files of this project
|
||||
our own complicated process to choose defaults.
|
||||
For most developers, things "just worked"... until they didn't, and then
|
||||
you were left trying to debug one of these complicated processes, instead of
|
||||
choosing and manually passing the parameter values yourself.
|
||||
|
||||
You can pass every parameter to CMake on the command line,
|
||||
but writing out these parameters every time we want to configure CMake is
|
||||
a pain.
|
||||
Most humans prefer to put them into a configuration file, once, that
|
||||
CMake can read every time it is configured.
|
||||
For CMake, that file is a [toolchain file][toolchain].
|
||||
|
||||
|
||||
### Conan
|
||||
|
||||
These next few paragraphs on Conan are going to read much like the ones above
|
||||
for CMake.
|
||||
|
||||
Technically, you don't need Conan to build this project.
|
||||
You could manually download, configure, build, and install all of the
|
||||
dependencies yourself, and then pass all of the parameters necessary for
|
||||
CMake to link to those dependencies.
|
||||
To guarantee ABI compatibility, you must be sure to use the same set of
|
||||
compiler and linker options for all dependencies _and_ this project.
|
||||
However, that is very tedious and error-prone, which is why we lean on tools
|
||||
like Conan.
|
||||
|
||||
We have written a Conan configuration file ([`conanfile.py`](./conanfile.py))
|
||||
so that Conan can be used to correctly download, configure, build, and install
|
||||
all of the dependencies for this project,
|
||||
using a single set of compiler and linker options for all of them.
|
||||
It generates files that contain almost all of the parameters that CMake
|
||||
expects.
|
||||
Those files include:
|
||||
|
||||
- A single toolchain file.
|
||||
- For every dependency, a CMake [package configuration file][pcf],
|
||||
[package version file][pvf], and for every build type, a package
|
||||
targets file.
|
||||
Together, these files implement version checking and define `IMPORTED`
|
||||
targets for the dependencies.
|
||||
|
||||
The toolchain file itself amends the search path
|
||||
([`CMAKE_PREFIX_PATH`][prefix_path]) so that [`find_package()`][find_package]
|
||||
will [discover][search] the generated package configuration files.
|
||||
|
||||
**Nearly all we must do to properly configure CMake is pass the toolchain
|
||||
file.**
|
||||
What CMake parameters are left out?
|
||||
You'll still need to pick a build system generator,
|
||||
and if you choose a single-configuration generator,
|
||||
you'll need to pass the `CMAKE_BUILD_TYPE`,
|
||||
which should match the `build_type` setting you gave to Conan.
|
||||
|
||||
Even then, Conan has parameters, some of which are platform-specific.
|
||||
In Conan's parlance, parameters are either settings or options.
|
||||
**Settings** are shared by all packages, e.g. the build type.
|
||||
**Options** are specific to a given package, e.g. whether to build and link
|
||||
OpenSSL as a shared library.
|
||||
|
||||
For settings, Conan goes through a complicated search process to choose
|
||||
defaults.
|
||||
For options, each package recipe defines its own defaults.
|
||||
|
||||
You can pass every parameter to Conan on the command line,
|
||||
but it is more convenient to put them in a [profile][profile].
|
||||
**All we must do to properly configure Conan is edit and pass the profile.**
|
||||
|
||||
|
||||
[1]: https://github.com/conan-io/conan-center-index/issues/13168
|
||||
[5]: https://en.wikipedia.org/wiki/Unity_build
|
||||
[6]: https://github.com/boostorg/beast/issues/2648
|
||||
[7]: https://github.com/boostorg/beast/issues/2661
|
||||
[gcovr]: https://gcovr.com/en/stable/getting-started.html
|
||||
[python-pip]: https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/
|
||||
[build_type]: https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
[runtime]: https://cmake.org/cmake/help/latest/variable/CMAKE_MSVC_RUNTIME_LIBRARY.html
|
||||
[toolchain]: https://cmake.org/cmake/help/latest/manual/cmake-toolchains.7.html
|
||||
[pcf]: https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#package-configuration-file
|
||||
[pvf]: https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#package-version-file
|
||||
[find_package]: https://cmake.org/cmake/help/latest/command/find_package.html
|
||||
[search]: https://cmake.org/cmake/help/latest/command/find_package.html#search-procedure
|
||||
[prefix_path]: https://cmake.org/cmake/help/latest/variable/CMAKE_PREFIX_PATH.html
|
||||
[profile]: https://docs.conan.io/en/latest/reference/profiles.html
|
||||
440
Builds/CMake/CodeCoverage.cmake
Normal file
440
Builds/CMake/CodeCoverage.cmake
Normal file
@@ -0,0 +1,440 @@
|
||||
# Copyright (c) 2012 - 2017, Lars Bilke
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice, this
|
||||
# list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# CHANGES:
|
||||
#
|
||||
# 2012-01-31, Lars Bilke
|
||||
# - Enable Code Coverage
|
||||
#
|
||||
# 2013-09-17, Joakim Söderberg
|
||||
# - Added support for Clang.
|
||||
# - Some additional usage instructions.
|
||||
#
|
||||
# 2016-02-03, Lars Bilke
|
||||
# - Refactored functions to use named parameters
|
||||
#
|
||||
# 2017-06-02, Lars Bilke
|
||||
# - Merged with modified version from github.com/ufz/ogs
|
||||
#
|
||||
# 2019-05-06, Anatolii Kurotych
|
||||
# - Remove unnecessary --coverage flag
|
||||
#
|
||||
# 2019-12-13, FeRD (Frank Dana)
|
||||
# - Deprecate COVERAGE_LCOVR_EXCLUDES and COVERAGE_GCOVR_EXCLUDES lists in favor
|
||||
# of tool-agnostic COVERAGE_EXCLUDES variable, or EXCLUDE setup arguments.
|
||||
# - CMake 3.4+: All excludes can be specified relative to BASE_DIRECTORY
|
||||
# - All setup functions: accept BASE_DIRECTORY, EXCLUDE list
|
||||
# - Set lcov basedir with -b argument
|
||||
# - Add automatic --demangle-cpp in lcovr, if 'c++filt' is available (can be
|
||||
# overridden with NO_DEMANGLE option in setup_target_for_coverage_lcovr().)
|
||||
# - Delete output dir, .info file on 'make clean'
|
||||
# - Remove Python detection, since version mismatches will break gcovr
|
||||
# - Minor cleanup (lowercase function names, update examples...)
|
||||
#
|
||||
# 2019-12-19, FeRD (Frank Dana)
|
||||
# - Rename Lcov outputs, make filtered file canonical, fix cleanup for targets
|
||||
#
|
||||
# 2020-01-19, Bob Apthorpe
|
||||
# - Added gfortran support
|
||||
#
|
||||
# 2020-02-17, FeRD (Frank Dana)
|
||||
# - Make all add_custom_target()s VERBATIM to auto-escape wildcard characters
|
||||
# in EXCLUDEs, and remove manual escaping from gcovr targets
|
||||
#
|
||||
# 2021-01-19, Robin Mueller
|
||||
# - Add CODE_COVERAGE_VERBOSE option which will allow to print out commands which are run
|
||||
# - Added the option for users to set the GCOVR_ADDITIONAL_ARGS variable to supply additional
|
||||
# flags to the gcovr command
|
||||
#
|
||||
# 2020-05-04, Mihchael Davis
|
||||
# - Add -fprofile-abs-path to make gcno files contain absolute paths
|
||||
# - Fix BASE_DIRECTORY not working when defined
|
||||
# - Change BYPRODUCT from folder to index.html to stop ninja from complaining about double defines
|
||||
#
|
||||
# 2021-05-10, Martin Stump
|
||||
# - Check if the generator is multi-config before warning about non-Debug builds
|
||||
#
|
||||
# 2022-02-22, Marko Wehle
|
||||
# - Change gcovr output from -o <filename> for --xml <filename> and --html <filename> output respectively.
|
||||
# This will allow for Multiple Output Formats at the same time by making use of GCOVR_ADDITIONAL_ARGS, e.g. GCOVR_ADDITIONAL_ARGS "--txt".
|
||||
#
|
||||
# 2022-09-28, Sebastian Mueller
|
||||
# - fix append_coverage_compiler_flags_to_target to correctly add flags
|
||||
# - replace "-fprofile-arcs -ftest-coverage" with "--coverage" (equivalent)
|
||||
#
|
||||
# 2024-01-04, Bronek Kozicki
|
||||
# - remove setup_target_for_coverage_lcov (slow) and setup_target_for_coverage_fastcov (no support for Clang)
|
||||
# - fix Clang support by adding find_program( ... llvm-cov )
|
||||
# - add Apple Clang support by adding execute_process( COMMAND xcrun -f llvm-cov ... )
|
||||
# - add CODE_COVERAGE_GCOV_TOOL to explicitly select gcov tool and disable find_program
|
||||
# - replace both functions setup_target_for_coverage_gcovr_* with a single setup_target_for_coverage_gcovr
|
||||
# - add support for all gcovr output formats
|
||||
#
|
||||
# USAGE:
|
||||
#
|
||||
# 1. Copy this file into your cmake modules path.
|
||||
#
|
||||
# 2. Add the following line to your CMakeLists.txt (best inside an if-condition
|
||||
# using a CMake option() to enable it just optionally):
|
||||
# include(CodeCoverage)
|
||||
#
|
||||
# 3. Append necessary compiler flags for all supported source files:
|
||||
# append_coverage_compiler_flags()
|
||||
# Or for specific target:
|
||||
# append_coverage_compiler_flags_to_target(YOUR_TARGET_NAME)
|
||||
#
|
||||
# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og
|
||||
#
|
||||
# 4. If you need to exclude additional directories from the report, specify them
|
||||
# using full paths in the COVERAGE_EXCLUDES variable before calling
|
||||
# setup_target_for_coverage_*().
|
||||
# Example:
|
||||
# set(COVERAGE_EXCLUDES
|
||||
# '${PROJECT_SOURCE_DIR}/src/dir1/*'
|
||||
# '/path/to/my/src/dir2/*')
|
||||
# Or, use the EXCLUDE argument to setup_target_for_coverage_*().
|
||||
# Example:
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME coverage
|
||||
# EXECUTABLE testrunner
|
||||
# EXCLUDE "${PROJECT_SOURCE_DIR}/src/dir1/*" "/path/to/my/src/dir2/*")
|
||||
#
|
||||
# 4.a NOTE: With CMake 3.4+, COVERAGE_EXCLUDES or EXCLUDE can also be set
|
||||
# relative to the BASE_DIRECTORY (default: PROJECT_SOURCE_DIR)
|
||||
# Example:
|
||||
# set(COVERAGE_EXCLUDES "dir1/*")
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME coverage
|
||||
# EXECUTABLE testrunner
|
||||
# FORMAT html-details
|
||||
# BASE_DIRECTORY "${PROJECT_SOURCE_DIR}/src"
|
||||
# EXCLUDE "dir2/*")
|
||||
#
|
||||
# 4.b If you need to pass specific options to gcovr, specify them in
|
||||
# GCOVR_ADDITIONAL_ARGS variable.
|
||||
# Example:
|
||||
# set (GCOVR_ADDITIONAL_ARGS --exclude-throw-branches --exclude-noncode-lines -s)
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME coverage
|
||||
# EXECUTABLE testrunner
|
||||
# EXCLUDE "src/dir1" "src/dir2")
|
||||
#
|
||||
# 5. Use the functions described below to create a custom make target which
|
||||
# runs your test executable and produces a code coverage report.
|
||||
#
|
||||
# 6. Build a Debug build:
|
||||
# cmake -DCMAKE_BUILD_TYPE=Debug ..
|
||||
# make
|
||||
# make my_coverage_target
|
||||
|
||||
include(CMakeParseArguments)
|
||||
|
||||
option(CODE_COVERAGE_VERBOSE "Verbose information" FALSE)
|
||||
|
||||
# Check prereqs
|
||||
find_program( GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test)
|
||||
|
||||
if(DEFINED CODE_COVERAGE_GCOV_TOOL)
|
||||
set(GCOV_TOOL "${CODE_COVERAGE_GCOV_TOOL}")
|
||||
elseif(DEFINED ENV{CODE_COVERAGE_GCOV_TOOL})
|
||||
set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}")
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
|
||||
if(APPLE)
|
||||
execute_process( COMMAND xcrun -f llvm-cov
|
||||
OUTPUT_VARIABLE LLVMCOV_PATH
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
else()
|
||||
find_program( LLVMCOV_PATH llvm-cov )
|
||||
endif()
|
||||
if(LLVMCOV_PATH)
|
||||
set(GCOV_TOOL "${LLVMCOV_PATH} gcov")
|
||||
endif()
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
|
||||
find_program( GCOV_PATH gcov )
|
||||
set(GCOV_TOOL "${GCOV_PATH}")
|
||||
endif()
|
||||
|
||||
# Check supported compiler (Clang, GNU and Flang)
|
||||
get_property(LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES)
|
||||
foreach(LANG ${LANGUAGES})
|
||||
if("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
|
||||
if("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3)
|
||||
message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
|
||||
endif()
|
||||
elseif(NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU"
|
||||
AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(LLVM)?[Ff]lang")
|
||||
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
set(COVERAGE_COMPILER_FLAGS "-g --coverage"
|
||||
CACHE INTERNAL "")
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
|
||||
include(CheckCXXCompilerFlag)
|
||||
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
|
||||
if(HAVE_cxx_fprofile_abs_path)
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
include(CheckCCompilerFlag)
|
||||
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
|
||||
if(HAVE_c_fprofile_abs_path)
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(CMAKE_Fortran_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the Fortran compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_CXX_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the C++ compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_C_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the C compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
||||
""
|
||||
CACHE STRING "Flags used for linking binaries during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_SHARED_LINKER_FLAGS_COVERAGE
|
||||
""
|
||||
CACHE STRING "Flags used by the shared libraries linker during coverage builds."
|
||||
FORCE )
|
||||
mark_as_advanced(
|
||||
CMAKE_Fortran_FLAGS_COVERAGE
|
||||
CMAKE_CXX_FLAGS_COVERAGE
|
||||
CMAKE_C_FLAGS_COVERAGE
|
||||
CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
||||
CMAKE_SHARED_LINKER_FLAGS_COVERAGE )
|
||||
|
||||
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
|
||||
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
|
||||
endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
|
||||
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
||||
link_libraries(gcov)
|
||||
endif()
|
||||
|
||||
# Defines a target for running and collection code coverage information
|
||||
# Builds dependencies, runs the given executable and outputs reports.
|
||||
# NOTE! The executable should always have a ZERO as exit code otherwise
|
||||
# the coverage generation will not complete.
|
||||
#
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME ctest_coverage # New target name
|
||||
# EXECUTABLE ctest -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR
|
||||
# DEPENDENCIES executable_target # Dependencies to build first
|
||||
# BASE_DIRECTORY "../" # Base directory for report
|
||||
# # (defaults to PROJECT_SOURCE_DIR)
|
||||
# FORMAT "cobertura" # Output format, one of:
|
||||
# # xml cobertura sonarqube json-summary
|
||||
# # json-details coveralls csv txt
|
||||
# # html-single html-nested html-details
|
||||
# # (xml is an alias to cobertura;
|
||||
# # if no format is set, defaults to xml)
|
||||
# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative
|
||||
# # to BASE_DIRECTORY, with CMake 3.4+)
|
||||
# )
|
||||
# The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the
|
||||
# GCVOR command.
|
||||
function(setup_target_for_coverage_gcovr)
|
||||
set(options NONE)
|
||||
set(oneValueArgs BASE_DIRECTORY NAME FORMAT)
|
||||
set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES)
|
||||
cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
|
||||
|
||||
if(NOT GCOV_TOOL)
|
||||
message(FATAL_ERROR "Could not find gcov or llvm-cov tool! Aborting...")
|
||||
endif()
|
||||
|
||||
if(NOT GCOVR_PATH)
|
||||
message(FATAL_ERROR "Could not find gcovr tool! Aborting...")
|
||||
endif()
|
||||
|
||||
# Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR
|
||||
if(DEFINED Coverage_BASE_DIRECTORY)
|
||||
get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE)
|
||||
else()
|
||||
set(BASEDIR ${PROJECT_SOURCE_DIR})
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED Coverage_FORMAT)
|
||||
set(Coverage_FORMAT xml)
|
||||
endif()
|
||||
|
||||
if("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
|
||||
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
|
||||
else()
|
||||
if((Coverage_FORMAT STREQUAL "html-details")
|
||||
OR (Coverage_FORMAT STREQUAL "html-nested"))
|
||||
set(GCOVR_OUTPUT_FILE ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html)
|
||||
set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME})
|
||||
elseif(Coverage_FORMAT STREQUAL "html-single")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html)
|
||||
elseif((Coverage_FORMAT STREQUAL "json-summary")
|
||||
OR (Coverage_FORMAT STREQUAL "json-details")
|
||||
OR (Coverage_FORMAT STREQUAL "coveralls"))
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json)
|
||||
elseif(Coverage_FORMAT STREQUAL "txt")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.txt)
|
||||
elseif(Coverage_FORMAT STREQUAL "csv")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.csv)
|
||||
else()
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.xml)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if((Coverage_FORMAT STREQUAL "cobertura")
|
||||
OR (Coverage_FORMAT STREQUAL "xml"))
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura-pretty )
|
||||
set(Coverage_FORMAT cobertura) # overwrite xml
|
||||
elseif(Coverage_FORMAT STREQUAL "sonarqube")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --sonarqube "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "json-summary")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary-pretty)
|
||||
elseif(Coverage_FORMAT STREQUAL "json-details")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-pretty)
|
||||
elseif(Coverage_FORMAT STREQUAL "coveralls")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls-pretty)
|
||||
elseif(Coverage_FORMAT STREQUAL "csv")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --csv "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "txt")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --txt "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "html-single")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-self-contained)
|
||||
elseif(Coverage_FORMAT STREQUAL "html-nested")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-nested "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "html-details")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-details "${GCOVR_OUTPUT_FILE}" )
|
||||
else()
|
||||
message(FATAL_ERROR "Unsupported output style ${Coverage_FORMAT}! Aborting...")
|
||||
endif()
|
||||
|
||||
# Collect excludes (CMake 3.4+: Also compute absolute paths)
|
||||
set(GCOVR_EXCLUDES "")
|
||||
foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES})
|
||||
if(CMAKE_VERSION VERSION_GREATER 3.4)
|
||||
get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR})
|
||||
endif()
|
||||
list(APPEND GCOVR_EXCLUDES "${EXCLUDE}")
|
||||
endforeach()
|
||||
list(REMOVE_DUPLICATES GCOVR_EXCLUDES)
|
||||
|
||||
# Combine excludes to several -e arguments
|
||||
set(GCOVR_EXCLUDE_ARGS "")
|
||||
foreach(EXCLUDE ${GCOVR_EXCLUDES})
|
||||
list(APPEND GCOVR_EXCLUDE_ARGS "-e")
|
||||
list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}")
|
||||
endforeach()
|
||||
|
||||
# Set up commands which will be run to generate coverage data
|
||||
# Run tests
|
||||
set(GCOVR_EXEC_TESTS_CMD
|
||||
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
|
||||
)
|
||||
|
||||
# Create folder
|
||||
if(DEFINED GCOVR_CREATE_FOLDER)
|
||||
set(GCOVR_FOLDER_CMD
|
||||
${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER})
|
||||
else()
|
||||
set(GCOVR_FOLDER_CMD echo) # dummy
|
||||
endif()
|
||||
|
||||
# Running gcovr
|
||||
set(GCOVR_CMD
|
||||
${GCOVR_PATH}
|
||||
--gcov-executable ${GCOV_TOOL}
|
||||
--gcov-ignore-parse-errors=negative_hits.warn_once_per_file
|
||||
-r ${BASEDIR}
|
||||
${GCOVR_ADDITIONAL_ARGS}
|
||||
${GCOVR_EXCLUDE_ARGS}
|
||||
--object-directory=${PROJECT_BINARY_DIR}
|
||||
)
|
||||
|
||||
if(CODE_COVERAGE_VERBOSE)
|
||||
message(STATUS "Executed command report")
|
||||
|
||||
message(STATUS "Command to run tests: ")
|
||||
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
|
||||
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
|
||||
|
||||
if(NOT GCOVR_FOLDER_CMD STREQUAL "echo")
|
||||
message(STATUS "Command to create a folder: ")
|
||||
string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}")
|
||||
message(STATUS "${GCOVR_FOLDER_CMD_SPACED}")
|
||||
endif()
|
||||
|
||||
message(STATUS "Command to generate gcovr coverage data: ")
|
||||
string(REPLACE ";" " " GCOVR_CMD_SPACED "${GCOVR_CMD}")
|
||||
message(STATUS "${GCOVR_CMD_SPACED}")
|
||||
endif()
|
||||
|
||||
add_custom_target(${Coverage_NAME}
|
||||
COMMAND ${GCOVR_EXEC_TESTS_CMD}
|
||||
COMMAND ${GCOVR_FOLDER_CMD}
|
||||
COMMAND ${GCOVR_CMD}
|
||||
|
||||
BYPRODUCTS ${GCOVR_OUTPUT_FILE}
|
||||
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
|
||||
DEPENDS ${Coverage_DEPENDENCIES}
|
||||
VERBATIM # Protect arguments to commands
|
||||
COMMENT "Running gcovr to produce code coverage report."
|
||||
)
|
||||
|
||||
# Show info where to find the report
|
||||
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
|
||||
COMMAND ;
|
||||
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
|
||||
)
|
||||
endfunction() # setup_target_for_coverage_gcovr
|
||||
|
||||
function(append_coverage_compiler_flags)
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
message(STATUS "Appending code coverage compiler flags: ${COVERAGE_COMPILER_FLAGS}")
|
||||
endfunction() # append_coverage_compiler_flags
|
||||
|
||||
# Setup coverage for specific library
|
||||
function(append_coverage_compiler_flags_to_target name)
|
||||
separate_arguments(_flag_list NATIVE_COMMAND "${COVERAGE_COMPILER_FLAGS}")
|
||||
target_compile_options(${name} PRIVATE ${_flag_list})
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
||||
target_link_libraries(${name} PRIVATE gcov)
|
||||
endif()
|
||||
endfunction()
|
||||
@@ -130,7 +130,16 @@ else ()
|
||||
>)
|
||||
endif ()
|
||||
|
||||
if (use_gold AND is_gcc)
|
||||
if (use_mold)
|
||||
# use mold linker if available
|
||||
execute_process (
|
||||
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version
|
||||
ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
if ("${LD_VERSION}" MATCHES "mold")
|
||||
target_link_libraries (common INTERFACE -fuse-ld=mold)
|
||||
endif ()
|
||||
unset (LD_VERSION)
|
||||
elseif (use_gold AND is_gcc)
|
||||
# use gold linker if available
|
||||
execute_process (
|
||||
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version
|
||||
@@ -162,9 +171,7 @@ if (use_gold AND is_gcc)
|
||||
$<$<NOT:$<BOOL:${static}>>:-Wl,--disable-new-dtags>)
|
||||
endif ()
|
||||
unset (LD_VERSION)
|
||||
endif ()
|
||||
|
||||
if (use_lld)
|
||||
elseif (use_lld)
|
||||
# use lld linker if available
|
||||
execute_process (
|
||||
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version
|
||||
@@ -175,6 +182,7 @@ if (use_lld)
|
||||
unset (LD_VERSION)
|
||||
endif()
|
||||
|
||||
|
||||
if (assert)
|
||||
foreach (var_ CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_RELEASE)
|
||||
STRING (REGEX REPLACE "[-/]DNDEBUG" "" ${var_} "${${var_}}")
|
||||
|
||||
@@ -23,6 +23,11 @@ else()
|
||||
message(STATUS "ACL not found, continuing without ACL support")
|
||||
endif()
|
||||
|
||||
add_library(libxrpl INTERFACE)
|
||||
target_link_libraries(libxrpl INTERFACE xrpl_core)
|
||||
add_library(xrpl::libxrpl ALIAS libxrpl)
|
||||
|
||||
|
||||
#[===============================[
|
||||
beast/legacy FILES:
|
||||
TODO: review these sources for removal or replacement
|
||||
@@ -79,6 +84,7 @@ target_sources (xrpl_core PRIVATE
|
||||
subdir: protocol
|
||||
#]===============================]
|
||||
src/ripple/protocol/impl/AccountID.cpp
|
||||
src/ripple/protocol/impl/AMMCore.cpp
|
||||
src/ripple/protocol/impl/Book.cpp
|
||||
src/ripple/protocol/impl/BuildInfo.cpp
|
||||
src/ripple/protocol/impl/ErrorCodes.cpp
|
||||
@@ -86,10 +92,13 @@ target_sources (xrpl_core PRIVATE
|
||||
src/ripple/protocol/impl/Indexes.cpp
|
||||
src/ripple/protocol/impl/InnerObjectFormats.cpp
|
||||
src/ripple/protocol/impl/Issue.cpp
|
||||
src/ripple/protocol/impl/STIssue.cpp
|
||||
src/ripple/protocol/impl/Keylet.cpp
|
||||
src/ripple/protocol/impl/LedgerFormats.cpp
|
||||
src/ripple/protocol/impl/LedgerHeader.cpp
|
||||
src/ripple/protocol/impl/PublicKey.cpp
|
||||
src/ripple/protocol/impl/Quality.cpp
|
||||
src/ripple/protocol/impl/QualityFunction.cpp
|
||||
src/ripple/protocol/impl/Rate2.cpp
|
||||
src/ripple/protocol/impl/Rules.cpp
|
||||
src/ripple/protocol/impl/SField.cpp
|
||||
@@ -104,7 +113,9 @@ target_sources (xrpl_core PRIVATE
|
||||
src/ripple/protocol/impl/STObject.cpp
|
||||
src/ripple/protocol/impl/STParsedJSON.cpp
|
||||
src/ripple/protocol/impl/STPathSet.cpp
|
||||
src/ripple/protocol/impl/STXChainBridge.cpp
|
||||
src/ripple/protocol/impl/STTx.cpp
|
||||
src/ripple/protocol/impl/XChainAttestations.cpp
|
||||
src/ripple/protocol/impl/STValidation.cpp
|
||||
src/ripple/protocol/impl/STVar.cpp
|
||||
src/ripple/protocol/impl/STVector256.cpp
|
||||
@@ -118,6 +129,9 @@ target_sources (xrpl_core PRIVATE
|
||||
src/ripple/protocol/impl/UintTypes.cpp
|
||||
src/ripple/protocol/impl/digest.cpp
|
||||
src/ripple/protocol/impl/tokens.cpp
|
||||
src/ripple/protocol/impl/NFTSyntheticSerializer.cpp
|
||||
src/ripple/protocol/impl/NFTokenID.cpp
|
||||
src/ripple/protocol/impl/NFTokenOfferID.cpp
|
||||
#[===============================[
|
||||
main sources:
|
||||
subdir: crypto
|
||||
@@ -144,10 +158,10 @@ target_link_libraries (xrpl_core
|
||||
PUBLIC
|
||||
OpenSSL::Crypto
|
||||
Ripple::boost
|
||||
NIH::WasmEdge
|
||||
wasmedge::wasmedge
|
||||
Ripple::syslibs
|
||||
NIH::secp256k1
|
||||
NIH::ed25519-donna
|
||||
secp256k1::secp256k1
|
||||
ed25519::ed25519
|
||||
date::date
|
||||
Ripple::opts)
|
||||
#[=================================[
|
||||
@@ -155,31 +169,54 @@ target_link_libraries (xrpl_core
|
||||
#]=================================]
|
||||
install (
|
||||
FILES
|
||||
src/ripple/basics/algorithm.h
|
||||
src/ripple/basics/Archive.h
|
||||
src/ripple/basics/base64.h
|
||||
src/ripple/basics/base_uint.h
|
||||
src/ripple/basics/BasicConfig.h
|
||||
src/ripple/basics/Blob.h
|
||||
src/ripple/basics/Buffer.h
|
||||
src/ripple/basics/ByteUtilities.h
|
||||
src/ripple/basics/chrono.h
|
||||
src/ripple/basics/comparators.h
|
||||
src/ripple/basics/CompressionAlgorithms.h
|
||||
src/ripple/basics/contract.h
|
||||
src/ripple/basics/CountedObject.h
|
||||
src/ripple/basics/DecayingSample.h
|
||||
src/ripple/basics/Expected.h
|
||||
src/ripple/basics/FeeUnits.h
|
||||
src/ripple/basics/FileUtilities.h
|
||||
src/ripple/basics/hardened_hash.h
|
||||
src/ripple/basics/IOUAmount.h
|
||||
src/ripple/basics/join.h
|
||||
src/ripple/basics/KeyCache.h
|
||||
src/ripple/basics/LocalValue.h
|
||||
src/ripple/basics/Log.h
|
||||
src/ripple/basics/make_SSLContext.h
|
||||
src/ripple/basics/MathUtilities.h
|
||||
src/ripple/basics/mulDiv.h
|
||||
src/ripple/basics/Number.h
|
||||
src/ripple/basics/partitioned_unordered_map.h
|
||||
src/ripple/basics/PerfLog.h
|
||||
src/ripple/basics/random.h
|
||||
src/ripple/basics/RangeSet.h
|
||||
src/ripple/basics/README.md
|
||||
src/ripple/basics/ResolverAsio.h
|
||||
src/ripple/basics/Resolver.h
|
||||
src/ripple/basics/safe_cast.h
|
||||
src/ripple/basics/scope.h
|
||||
src/ripple/basics/SHAMapHash.h
|
||||
src/ripple/basics/Slice.h
|
||||
src/ripple/basics/spinlock.h
|
||||
src/ripple/basics/strHex.h
|
||||
src/ripple/basics/StringUtilities.h
|
||||
src/ripple/basics/TaggedCache.h
|
||||
src/ripple/basics/tagged_integer.h
|
||||
src/ripple/basics/ThreadSafetyAnalysis.h
|
||||
src/ripple/basics/ToString.h
|
||||
src/ripple/basics/UnorderedContainers.h
|
||||
src/ripple/basics/UptimeClock.h
|
||||
src/ripple/basics/XRPAmount.h
|
||||
src/ripple/basics/algorithm.h
|
||||
src/ripple/basics/base_uint.h
|
||||
src/ripple/basics/chrono.h
|
||||
src/ripple/basics/contract.h
|
||||
src/ripple/basics/FeeUnits.h
|
||||
src/ripple/basics/hardened_hash.h
|
||||
src/ripple/basics/strHex.h
|
||||
DESTINATION include/ripple/basics)
|
||||
install (
|
||||
FILES
|
||||
@@ -190,6 +227,7 @@ install (
|
||||
install (
|
||||
FILES
|
||||
src/ripple/json/JsonPropertyStream.h
|
||||
src/ripple/json/MultivarJson.h
|
||||
src/ripple/json/Object.h
|
||||
src/ripple/json/Output.h
|
||||
src/ripple/json/Writer.h
|
||||
@@ -206,28 +244,37 @@ install (
|
||||
install (
|
||||
FILES
|
||||
src/ripple/protocol/AccountID.h
|
||||
src/ripple/protocol/AMMCore.h
|
||||
src/ripple/protocol/AmountConversions.h
|
||||
src/ripple/protocol/Book.h
|
||||
src/ripple/protocol/BuildInfo.h
|
||||
src/ripple/protocol/ErrorCodes.h
|
||||
src/ripple/protocol/Feature.h
|
||||
src/ripple/protocol/Fees.h
|
||||
src/ripple/protocol/HashPrefix.h
|
||||
src/ripple/protocol/Indexes.h
|
||||
src/ripple/protocol/InnerObjectFormats.h
|
||||
src/ripple/protocol/Issue.h
|
||||
src/ripple/protocol/json_get_or_throw.h
|
||||
src/ripple/protocol/KeyType.h
|
||||
src/ripple/protocol/Keylet.h
|
||||
src/ripple/protocol/KnownFormats.h
|
||||
src/ripple/protocol/LedgerFormats.h
|
||||
src/ripple/protocol/LedgerHeader.h
|
||||
src/ripple/protocol/NFTSyntheticSerializer.h
|
||||
src/ripple/protocol/NFTokenID.h
|
||||
src/ripple/protocol/NFTokenOfferID.h
|
||||
src/ripple/protocol/Protocol.h
|
||||
src/ripple/protocol/PublicKey.h
|
||||
src/ripple/protocol/Quality.h
|
||||
src/ripple/protocol/QualityFunction.h
|
||||
src/ripple/protocol/Rate.h
|
||||
src/ripple/protocol/Rules.h
|
||||
src/ripple/protocol/SField.h
|
||||
src/ripple/protocol/SOTemplate.h
|
||||
src/ripple/protocol/STAccount.h
|
||||
src/ripple/protocol/STAmount.h
|
||||
src/ripple/protocol/STIssue.h
|
||||
src/ripple/protocol/STArray.h
|
||||
src/ripple/protocol/STBase.h
|
||||
src/ripple/protocol/STBitString.h
|
||||
@@ -239,6 +286,8 @@ install (
|
||||
src/ripple/protocol/STParsedJSON.h
|
||||
src/ripple/protocol/STPathSet.h
|
||||
src/ripple/protocol/STTx.h
|
||||
src/ripple/protocol/XChainAttestations.h
|
||||
src/ripple/protocol/STXChainBridge.h
|
||||
src/ripple/protocol/STValidation.h
|
||||
src/ripple/protocol/STVector256.h
|
||||
src/ripple/protocol/SecretKey.h
|
||||
@@ -254,6 +303,9 @@ install (
|
||||
src/ripple/protocol/UintTypes.h
|
||||
src/ripple/protocol/digest.h
|
||||
src/ripple/protocol/jss.h
|
||||
src/ripple/protocol/serialize.h
|
||||
src/ripple/protocol/nft.h
|
||||
src/ripple/protocol/nftPageMask.h
|
||||
src/ripple/protocol/tokens.h
|
||||
DESTINATION include/ripple/protocol)
|
||||
install (
|
||||
@@ -273,6 +325,7 @@ install (
|
||||
DESTINATION include/ripple/beast/clock)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/beast/core/CurrentThreadName.h
|
||||
src/ripple/beast/core/LexicalCast.h
|
||||
src/ripple/beast/core/List.h
|
||||
src/ripple/beast/core/SemanticVersion.h
|
||||
@@ -286,6 +339,14 @@ install (
|
||||
install (
|
||||
FILES src/ripple/beast/hash/impl/xxhash.h
|
||||
DESTINATION include/ripple/beast/hash/impl)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/beast/net/IPAddress.h
|
||||
src/ripple/beast/net/IPAddressConversion.h
|
||||
src/ripple/beast/net/IPAddressV4.h
|
||||
src/ripple/beast/net/IPAddressV6.h
|
||||
src/ripple/beast/net/IPEndpoint.h
|
||||
DESTINATION include/ripple/beast/net)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/beast/rfc2616.h
|
||||
@@ -293,6 +354,26 @@ install (
|
||||
src/ripple/beast/unit_test.h
|
||||
src/ripple/beast/xor_shift_engine.h
|
||||
DESTINATION include/ripple/beast)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/beast/unit_test/amount.hpp
|
||||
src/ripple/beast/unit_test/dstream.hpp
|
||||
src/ripple/beast/unit_test/global_suites.hpp
|
||||
src/ripple/beast/unit_test/main.cpp
|
||||
src/ripple/beast/unit_test/match.hpp
|
||||
src/ripple/beast/unit_test/recorder.hpp
|
||||
src/ripple/beast/unit_test/reporter.hpp
|
||||
src/ripple/beast/unit_test/results.hpp
|
||||
src/ripple/beast/unit_test/runner.hpp
|
||||
src/ripple/beast/unit_test/suite.hpp
|
||||
src/ripple/beast/unit_test/suite_info.hpp
|
||||
src/ripple/beast/unit_test/suite_list.hpp
|
||||
src/ripple/beast/unit_test/thread.hpp
|
||||
DESTINATION include/ripple/beast/unit_test)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/beast/unit_test/detail/const_container.hpp
|
||||
DESTINATION include/ripple/beast/unit_test/detail)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/beast/utility/Journal.h
|
||||
@@ -386,15 +467,19 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/app/reporting/ReportingETL.cpp
|
||||
src/ripple/app/reporting/ETLSource.cpp
|
||||
src/ripple/app/reporting/P2pProxy.cpp
|
||||
src/ripple/app/misc/impl/AMMHelpers.cpp
|
||||
src/ripple/app/misc/impl/AMMUtils.cpp
|
||||
src/ripple/app/misc/CanonicalTXSet.cpp
|
||||
src/ripple/app/misc/FeeVoteImpl.cpp
|
||||
src/ripple/app/misc/HashRouter.cpp
|
||||
src/ripple/app/misc/NegativeUNLVote.cpp
|
||||
src/ripple/app/misc/NetworkOPs.cpp
|
||||
src/ripple/app/misc/SHAMapStoreImp.cpp
|
||||
src/ripple/app/misc/StateAccounting.cpp
|
||||
src/ripple/app/misc/detail/impl/WorkSSL.cpp
|
||||
src/ripple/app/misc/impl/AccountTxPaging.cpp
|
||||
src/ripple/app/misc/impl/AmendmentTable.cpp
|
||||
src/ripple/app/misc/impl/DeliverMax.cpp
|
||||
src/ripple/app/misc/impl/LoadFeeTrack.cpp
|
||||
src/ripple/app/misc/impl/Manifest.cpp
|
||||
src/ripple/app/misc/impl/Transaction.cpp
|
||||
@@ -411,6 +496,8 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/app/paths/RippleCalc.cpp
|
||||
src/ripple/app/paths/RippleLineCache.cpp
|
||||
src/ripple/app/paths/TrustLine.cpp
|
||||
src/ripple/app/paths/impl/AMMLiquidity.cpp
|
||||
src/ripple/app/paths/impl/AMMOffer.cpp
|
||||
src/ripple/app/paths/impl/BookStep.cpp
|
||||
src/ripple/app/paths/impl/DirectStep.cpp
|
||||
src/ripple/app/paths/impl/PaySteps.cpp
|
||||
@@ -427,19 +514,31 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/app/rdb/impl/UnitaryShard.cpp
|
||||
src/ripple/app/rdb/impl/Vacuum.cpp
|
||||
src/ripple/app/rdb/impl/Wallet.cpp
|
||||
src/ripple/app/tx/impl/AMMBid.cpp
|
||||
src/ripple/app/tx/impl/AMMCreate.cpp
|
||||
src/ripple/app/tx/impl/AMMDelete.cpp
|
||||
src/ripple/app/tx/impl/AMMDeposit.cpp
|
||||
src/ripple/app/tx/impl/AMMVote.cpp
|
||||
src/ripple/app/tx/impl/AMMWithdraw.cpp
|
||||
src/ripple/app/tx/impl/ApplyContext.cpp
|
||||
src/ripple/app/tx/impl/BookTip.cpp
|
||||
src/ripple/app/tx/impl/CancelCheck.cpp
|
||||
src/ripple/app/tx/impl/CancelOffer.cpp
|
||||
src/ripple/app/tx/impl/CashCheck.cpp
|
||||
src/ripple/app/tx/impl/Change.cpp
|
||||
src/ripple/app/tx/impl/ClaimReward.cpp
|
||||
src/ripple/app/tx/impl/Clawback.cpp
|
||||
src/ripple/app/tx/impl/CreateCheck.cpp
|
||||
src/ripple/app/tx/impl/CreateOffer.cpp
|
||||
src/ripple/app/tx/impl/CreateTicket.cpp
|
||||
src/ripple/app/tx/impl/DeleteAccount.cpp
|
||||
src/ripple/app/tx/impl/DepositPreauth.cpp
|
||||
src/ripple/app/tx/impl/DID.cpp
|
||||
src/ripple/app/tx/impl/Escrow.cpp
|
||||
src/ripple/app/tx/impl/GenesisMint.cpp
|
||||
src/ripple/app/tx/impl/Import.cpp
|
||||
src/ripple/app/tx/impl/InvariantCheck.cpp
|
||||
src/ripple/app/tx/impl/Invoke.cpp
|
||||
src/ripple/app/tx/impl/NFTokenAcceptOffer.cpp
|
||||
src/ripple/app/tx/impl/NFTokenBurn.cpp
|
||||
src/ripple/app/tx/impl/NFTokenCancelOffer.cpp
|
||||
@@ -448,15 +547,14 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/app/tx/impl/OfferStream.cpp
|
||||
src/ripple/app/tx/impl/PayChan.cpp
|
||||
src/ripple/app/tx/impl/Payment.cpp
|
||||
src/ripple/app/tx/impl/Remit.cpp
|
||||
src/ripple/app/tx/impl/SetAccount.cpp
|
||||
src/ripple/app/tx/impl/SetRegularKey.cpp
|
||||
src/ripple/app/tx/impl/SetHook.cpp
|
||||
src/ripple/app/tx/impl/ClaimReward.cpp
|
||||
src/ripple/app/tx/impl/GenesisMint.cpp
|
||||
src/ripple/app/tx/impl/Import.cpp
|
||||
src/ripple/app/tx/impl/Invoke.cpp
|
||||
src/ripple/app/tx/impl/SetRemarks.cpp
|
||||
src/ripple/app/tx/impl/SetRegularKey.cpp
|
||||
src/ripple/app/tx/impl/SetSignerList.cpp
|
||||
src/ripple/app/tx/impl/SetTrust.cpp
|
||||
src/ripple/app/tx/impl/XChainBridge.cpp
|
||||
src/ripple/app/tx/impl/SignerEntries.cpp
|
||||
src/ripple/app/tx/impl/Taker.cpp
|
||||
src/ripple/app/tx/impl/Transactor.cpp
|
||||
@@ -493,9 +591,7 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/core/impl/JobQueue.cpp
|
||||
src/ripple/core/impl/LoadEvent.cpp
|
||||
src/ripple/core/impl/LoadMonitor.cpp
|
||||
src/ripple/core/impl/SNTPClock.cpp
|
||||
src/ripple/core/impl/SociDB.cpp
|
||||
src/ripple/core/impl/TimeKeeper.cpp
|
||||
src/ripple/core/impl/Workers.cpp
|
||||
src/ripple/core/Pg.cpp
|
||||
#[===============================[
|
||||
@@ -537,7 +633,9 @@ target_sources (rippled PRIVATE
|
||||
subdir: nodestore
|
||||
#]===============================]
|
||||
src/ripple/nodestore/backend/CassandraFactory.cpp
|
||||
src/ripple/nodestore/backend/RWDBFactory.cpp
|
||||
src/ripple/nodestore/backend/MemoryFactory.cpp
|
||||
src/ripple/nodestore/backend/FlatmapFactory.cpp
|
||||
src/ripple/nodestore/backend/NuDBFactory.cpp
|
||||
src/ripple/nodestore/backend/NullFactory.cpp
|
||||
src/ripple/nodestore/backend/RocksDBFactory.cpp
|
||||
@@ -599,9 +697,11 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/rpc/handlers/AccountOffers.cpp
|
||||
src/ripple/rpc/handlers/AccountNamespace.cpp
|
||||
src/ripple/rpc/handlers/AccountTx.cpp
|
||||
src/ripple/rpc/handlers/AMMInfo.cpp
|
||||
src/ripple/rpc/handlers/BlackList.cpp
|
||||
src/ripple/rpc/handlers/BookOffers.cpp
|
||||
src/ripple/rpc/handlers/CanDelete.cpp
|
||||
src/ripple/rpc/handlers/Catalogue.cpp
|
||||
src/ripple/rpc/handlers/Connect.cpp
|
||||
src/ripple/rpc/handlers/ConsensusInfo.cpp
|
||||
src/ripple/rpc/handlers/CrawlShards.cpp
|
||||
@@ -657,13 +757,14 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/rpc/handlers/ValidatorListSites.cpp
|
||||
src/ripple/rpc/handlers/Validators.cpp
|
||||
src/ripple/rpc/handlers/WalletPropose.cpp
|
||||
src/ripple/rpc/handlers/Catalogue.cpp
|
||||
src/ripple/rpc/impl/DeliveredAmount.cpp
|
||||
src/ripple/rpc/impl/Handler.cpp
|
||||
src/ripple/rpc/impl/LegacyPathFind.cpp
|
||||
src/ripple/rpc/impl/RPCHandler.cpp
|
||||
src/ripple/rpc/impl/RPCHelpers.cpp
|
||||
src/ripple/rpc/impl/Role.cpp
|
||||
src/ripple/rpc/impl/ServerHandlerImp.cpp
|
||||
src/ripple/rpc/impl/ServerHandler.cpp
|
||||
src/ripple/rpc/impl/ShardArchiveHandler.cpp
|
||||
src/ripple/rpc/impl/ShardVerificationScheduler.cpp
|
||||
src/ripple/rpc/impl/Status.cpp
|
||||
@@ -703,13 +804,18 @@ if (tests)
|
||||
src/test/app/AccountDelete_test.cpp
|
||||
src/test/app/AccountTxPaging_test.cpp
|
||||
src/test/app/AmendmentTable_test.cpp
|
||||
src/test/app/AMM_test.cpp
|
||||
src/test/app/AMMCalc_test.cpp
|
||||
src/test/app/AMMExtended_test.cpp
|
||||
src/test/app/BaseFee_test.cpp
|
||||
src/test/app/Check_test.cpp
|
||||
src/test/app/ClaimReward_test.cpp
|
||||
src/test/app/Clawback_test.cpp
|
||||
src/test/app/CrossingLimits_test.cpp
|
||||
src/test/app/DeliverMin_test.cpp
|
||||
src/test/app/DepositAuth_test.cpp
|
||||
src/test/app/Discrepancy_test.cpp
|
||||
src/test/app/DID_test.cpp
|
||||
src/test/app/DNS_test.cpp
|
||||
src/test/app/Escrow_test.cpp
|
||||
src/test/app/FeeVote_test.cpp
|
||||
@@ -739,14 +845,21 @@ if (tests)
|
||||
src/test/app/PseudoTx_test.cpp
|
||||
src/test/app/RCLCensorshipDetector_test.cpp
|
||||
src/test/app/RCLValidations_test.cpp
|
||||
src/test/app/ReducedOffer_test.cpp
|
||||
src/test/app/Regression_test.cpp
|
||||
src/test/app/Remit_test.cpp
|
||||
src/test/app/SHAMapStore_test.cpp
|
||||
src/test/app/XChain_test.cpp
|
||||
src/test/app/SetAuth_test.cpp
|
||||
src/test/app/SetHook_test.cpp
|
||||
src/test/app/SetHookTSH_test.cpp
|
||||
src/test/app/SetRegularKey_test.cpp
|
||||
src/test/app/SetRemarks_test.cpp
|
||||
src/test/app/SetTrust_test.cpp
|
||||
src/test/app/Taker_test.cpp
|
||||
src/test/app/TheoreticalQuality_test.cpp
|
||||
src/test/app/Ticket_test.cpp
|
||||
src/test/app/Touch_test.cpp
|
||||
src/test/app/Transaction_ordering_test.cpp
|
||||
src/test/app/TrustAndBalance_test.cpp
|
||||
src/test/app/TxQ_test.cpp
|
||||
@@ -754,8 +867,6 @@ if (tests)
|
||||
src/test/app/ValidatorKeys_test.cpp
|
||||
src/test/app/ValidatorList_test.cpp
|
||||
src/test/app/ValidatorSite_test.cpp
|
||||
src/test/app/SetHook_test.cpp
|
||||
src/test/app/SetHookTSH_test.cpp
|
||||
src/test/app/Wildcard_test.cpp
|
||||
src/test/app/XahauGenesis_test.cpp
|
||||
src/test/app/tx/apply_test.cpp
|
||||
@@ -848,6 +959,7 @@ if (tests)
|
||||
src/test/json/Output_test.cpp
|
||||
src/test/json/Writer_test.cpp
|
||||
src/test/json/json_value_test.cpp
|
||||
src/test/json/MultivarJson_test.cpp
|
||||
#[===============================[
|
||||
test sources:
|
||||
subdir: jtx
|
||||
@@ -855,20 +967,23 @@ if (tests)
|
||||
src/test/jtx/Env_test.cpp
|
||||
src/test/jtx/WSClient_test.cpp
|
||||
src/test/jtx/impl/Account.cpp
|
||||
src/test/jtx/impl/AMM.cpp
|
||||
src/test/jtx/impl/AMMTest.cpp
|
||||
src/test/jtx/impl/Env.cpp
|
||||
src/test/jtx/impl/JSONRPCClient.cpp
|
||||
src/test/jtx/impl/ManualTimeKeeper.cpp
|
||||
src/test/jtx/impl/TestHelpers.cpp
|
||||
src/test/jtx/impl/WSClient.cpp
|
||||
src/test/jtx/impl/hook.cpp
|
||||
src/test/jtx/impl/acctdelete.cpp
|
||||
src/test/jtx/impl/account_txn_id.cpp
|
||||
src/test/jtx/impl/amount.cpp
|
||||
src/test/jtx/impl/attester.cpp
|
||||
src/test/jtx/impl/balance.cpp
|
||||
src/test/jtx/impl/check.cpp
|
||||
src/test/jtx/impl/delivermin.cpp
|
||||
src/test/jtx/impl/deposit.cpp
|
||||
src/test/jtx/impl/did.cpp
|
||||
src/test/jtx/impl/envconfig.cpp
|
||||
src/test/jtx/impl/escrow.cpp
|
||||
src/test/jtx/impl/fee.cpp
|
||||
src/test/jtx/impl/flags.cpp
|
||||
src/test/jtx/impl/genesis.cpp
|
||||
@@ -889,8 +1004,11 @@ if (tests)
|
||||
src/test/jtx/impl/rate.cpp
|
||||
src/test/jtx/impl/regkey.cpp
|
||||
src/test/jtx/impl/reward.cpp
|
||||
src/test/jtx/impl/remarks.cpp
|
||||
src/test/jtx/impl/remit.cpp
|
||||
src/test/jtx/impl/sendmax.cpp
|
||||
src/test/jtx/impl/seq.cpp
|
||||
src/test/jtx/impl/xchain_bridge.cpp
|
||||
src/test/jtx/impl/sig.cpp
|
||||
src/test/jtx/impl/tag.cpp
|
||||
src/test/jtx/impl/ticket.cpp
|
||||
@@ -980,10 +1098,13 @@ if (tests)
|
||||
src/test/rpc/AccountLinesRPC_test.cpp
|
||||
src/test/rpc/AccountObjects_test.cpp
|
||||
src/test/rpc/AccountOffers_test.cpp
|
||||
src/test/rpc/AccountNamespace_test.cpp
|
||||
src/test/rpc/AccountSet_test.cpp
|
||||
src/test/rpc/AccountTx_test.cpp
|
||||
src/test/rpc/AmendmentBlocked_test.cpp
|
||||
src/test/rpc/AMMInfo_test.cpp
|
||||
src/test/rpc/Book_test.cpp
|
||||
src/test/rpc/Catalogue_test.cpp
|
||||
src/test/rpc/DepositAuthorized_test.cpp
|
||||
src/test/rpc/DeliveredAmount_test.cpp
|
||||
src/test/rpc/Feature_test.cpp
|
||||
@@ -993,6 +1114,7 @@ if (tests)
|
||||
src/test/rpc/KeyGeneration_test.cpp
|
||||
src/test/rpc/LedgerClosed_test.cpp
|
||||
src/test/rpc/LedgerData_test.cpp
|
||||
src/test/rpc/LedgerHeader_test.cpp
|
||||
src/test/rpc/LedgerRPC_test.cpp
|
||||
src/test/rpc/LedgerRequestRPC_test.cpp
|
||||
src/test/rpc/ManifestRPC_test.cpp
|
||||
@@ -1017,6 +1139,7 @@ if (tests)
|
||||
src/test/rpc/ValidatorInfo_test.cpp
|
||||
src/test/rpc/ValidatorRPC_test.cpp
|
||||
src/test/rpc/Version_test.cpp
|
||||
src/test/rpc/Handler_test.cpp
|
||||
#[===============================[
|
||||
test sources:
|
||||
subdir: server
|
||||
|
||||
@@ -2,97 +2,37 @@
|
||||
coverage report target
|
||||
#]===================================================================]
|
||||
|
||||
if (coverage)
|
||||
if (is_clang)
|
||||
if (APPLE)
|
||||
execute_process (COMMAND xcrun -f llvm-profdata
|
||||
OUTPUT_VARIABLE LLVM_PROFDATA
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
else ()
|
||||
find_program (LLVM_PROFDATA llvm-profdata)
|
||||
endif ()
|
||||
if (NOT LLVM_PROFDATA)
|
||||
message (WARNING "unable to find llvm-profdata - skipping coverage_report target")
|
||||
endif ()
|
||||
if(NOT coverage)
|
||||
message(FATAL_ERROR "Code coverage not enabled! Aborting ...")
|
||||
endif()
|
||||
|
||||
if (APPLE)
|
||||
execute_process (COMMAND xcrun -f llvm-cov
|
||||
OUTPUT_VARIABLE LLVM_COV
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
else ()
|
||||
find_program (LLVM_COV llvm-cov)
|
||||
endif ()
|
||||
if (NOT LLVM_COV)
|
||||
message (WARNING "unable to find llvm-cov - skipping coverage_report target")
|
||||
endif ()
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
|
||||
message(WARNING "Code coverage on Windows is not supported, ignoring 'coverage' flag")
|
||||
return()
|
||||
endif()
|
||||
|
||||
set (extract_pattern "")
|
||||
if (coverage_core_only)
|
||||
set (extract_pattern "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/")
|
||||
endif ()
|
||||
include(CodeCoverage)
|
||||
|
||||
if (LLVM_COV AND LLVM_PROFDATA)
|
||||
add_custom_target (coverage_report
|
||||
USES_TERMINAL
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Generating coverage - results will be in ${CMAKE_BINARY_DIR}/coverage/index.html."
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Running rippled tests."
|
||||
COMMAND rippled --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --quiet --unittest-log
|
||||
COMMAND ${LLVM_PROFDATA}
|
||||
merge -sparse default.profraw -o rip.profdata
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
|
||||
COMMAND ${LLVM_COV}
|
||||
report -instr-profile=rip.profdata
|
||||
$<TARGET_FILE:rippled> ${extract_pattern}
|
||||
# generate html report
|
||||
COMMAND ${LLVM_COV}
|
||||
show -format=html -output-dir=${CMAKE_BINARY_DIR}/coverage
|
||||
-instr-profile=rip.profdata
|
||||
$<TARGET_FILE:rippled> ${extract_pattern}
|
||||
BYPRODUCTS coverage/index.html)
|
||||
endif ()
|
||||
elseif (is_gcc)
|
||||
find_program (LCOV lcov)
|
||||
if (NOT LCOV)
|
||||
message (WARNING "unable to find lcov - skipping coverage_report target")
|
||||
endif ()
|
||||
# The instructions for these commands come from the `CodeCoverage` module,
|
||||
# which was copied from https://github.com/bilke/cmake-modules, commit fb7d2a3,
|
||||
# then locally changed (see CHANGES: section in `CodeCoverage.cmake`)
|
||||
|
||||
find_program (GENHTML genhtml)
|
||||
if (NOT GENHTML)
|
||||
message (WARNING "unable to find genhtml - skipping coverage_report target")
|
||||
endif ()
|
||||
set(GCOVR_ADDITIONAL_ARGS ${coverage_extra_args})
|
||||
if(NOT GCOVR_ADDITIONAL_ARGS STREQUAL "")
|
||||
separate_arguments(GCOVR_ADDITIONAL_ARGS)
|
||||
endif()
|
||||
|
||||
set (extract_pattern "*")
|
||||
if (coverage_core_only)
|
||||
set (extract_pattern "*/src/ripple/*")
|
||||
endif ()
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS
|
||||
--exclude-throw-branches
|
||||
--exclude-noncode-lines
|
||||
--exclude-unreachable-branches -s
|
||||
-j ${coverage_test_parallelism})
|
||||
|
||||
if (LCOV AND GENHTML)
|
||||
add_custom_target (coverage_report
|
||||
USES_TERMINAL
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Generating coverage- results will be in ${CMAKE_BINARY_DIR}/coverage/index.html."
|
||||
# create baseline info file
|
||||
COMMAND ${LCOV}
|
||||
--no-external -d "${CMAKE_CURRENT_SOURCE_DIR}" -c -d . -i -o baseline.info
|
||||
| grep -v "ignoring data for external file"
|
||||
# run tests
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Running rippled tests for coverage report."
|
||||
COMMAND rippled --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --quiet --unittest-log
|
||||
# Create test coverage data file
|
||||
COMMAND ${LCOV}
|
||||
--no-external -d "${CMAKE_CURRENT_SOURCE_DIR}" -c -d . -o tests.info
|
||||
| grep -v "ignoring data for external file"
|
||||
# Combine baseline and test coverage data
|
||||
COMMAND ${LCOV}
|
||||
-a baseline.info -a tests.info -o lcov-all.info
|
||||
# extract our files
|
||||
COMMAND ${LCOV}
|
||||
-e lcov-all.info "${extract_pattern}" -o lcov.info
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
|
||||
COMMAND ${LCOV} --summary lcov.info
|
||||
# generate HTML report
|
||||
COMMAND ${GENHTML}
|
||||
-o ${CMAKE_BINARY_DIR}/coverage lcov.info
|
||||
BYPRODUCTS coverage/index.html)
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
setup_target_for_coverage_gcovr(
|
||||
NAME coverage
|
||||
FORMAT ${coverage_format}
|
||||
EXECUTABLE rippled
|
||||
EXECUTABLE_ARGS --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --unittest-jobs ${coverage_test_parallelism} --quiet --unittest-log
|
||||
EXCLUDE "src/test" "${CMAKE_BINARY_DIR}/proto_gen" "${CMAKE_BINARY_DIR}/proto_gen_grpc"
|
||||
DEPENDENCIES rippled
|
||||
)
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
#[===================================================================[
|
||||
docs target (optional)
|
||||
#]===================================================================]
|
||||
|
||||
# Early return if the `docs` directory is missing,
|
||||
# e.g. when we are building a Conan package.
|
||||
if(NOT EXISTS docs)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (tests)
|
||||
find_package (Doxygen)
|
||||
if (NOT TARGET Doxygen::doxygen)
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
|
||||
install (
|
||||
TARGETS
|
||||
ed25519-donna
|
||||
common
|
||||
opts
|
||||
ripple_syslibs
|
||||
@@ -16,17 +15,6 @@ install (
|
||||
RUNTIME DESTINATION bin
|
||||
INCLUDES DESTINATION include)
|
||||
|
||||
if(${INSTALL_SECP256K1})
|
||||
install (
|
||||
TARGETS
|
||||
secp256k1
|
||||
EXPORT RippleExports
|
||||
LIBRARY DESTINATION lib
|
||||
ARCHIVE DESTINATION lib
|
||||
RUNTIME DESTINATION bin
|
||||
INCLUDES DESTINATION include)
|
||||
endif()
|
||||
|
||||
install (EXPORT RippleExports
|
||||
FILE RippleTargets.cmake
|
||||
NAMESPACE Ripple::
|
||||
|
||||
@@ -23,15 +23,15 @@ target_compile_options (opts
|
||||
INTERFACE
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
|
||||
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-fprofile-arcs -ftest-coverage>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-fprofile-instr-generate -fcoverage-mapping>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
target_link_libraries (opts
|
||||
INTERFACE
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-fprofile-arcs -ftest-coverage>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-fprofile-instr-generate -fcoverage-mapping>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ if (is_multiconfig)
|
||||
file(GLOB md_files RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} CONFIGURE_DEPENDS
|
||||
*.md)
|
||||
LIST(APPEND all_sources ${md_files})
|
||||
foreach (_target secp256k1 ed25519-donna pbufs xrpl_core rippled)
|
||||
foreach (_target secp256k1::secp256k1 ed25519::ed25519 xrpl_core rippled)
|
||||
get_target_property (_type ${_target} TYPE)
|
||||
if(_type STREQUAL "INTERFACE_LIBRARY")
|
||||
continue()
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
convenience variables and sanity checks
|
||||
#]===================================================================]
|
||||
|
||||
include(ProcessorCount)
|
||||
|
||||
if (NOT ep_procs)
|
||||
ProcessorCount(ep_procs)
|
||||
if (ep_procs GREATER 1)
|
||||
|
||||
@@ -2,121 +2,129 @@
|
||||
declare user options/settings
|
||||
#]===================================================================]
|
||||
|
||||
option (assert "Enables asserts, even in release builds" OFF)
|
||||
include(ProcessorCount)
|
||||
|
||||
option (reporting "Build rippled with reporting mode enabled" OFF)
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
|
||||
option (tests "Build tests" ON)
|
||||
option(assert "Enables asserts, even in release builds" OFF)
|
||||
|
||||
option (unity "Creates a build using UNITY support in cmake. This is the default" ON)
|
||||
if (unity)
|
||||
if (NOT is_ci)
|
||||
set (CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
|
||||
endif ()
|
||||
endif ()
|
||||
if (is_gcc OR is_clang)
|
||||
option (coverage "Generates coverage info." OFF)
|
||||
option (profile "Add profiling flags" OFF)
|
||||
set (coverage_test "" CACHE STRING
|
||||
option(reporting "Build rippled with reporting mode enabled" OFF)
|
||||
|
||||
option(tests "Build tests" ON)
|
||||
|
||||
option(unity "Creates a build using UNITY support in cmake. This is the default" ON)
|
||||
if(unity)
|
||||
if(NOT is_ci)
|
||||
set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
|
||||
endif()
|
||||
endif()
|
||||
if(is_gcc OR is_clang)
|
||||
option(coverage "Generates coverage info." OFF)
|
||||
option(profile "Add profiling flags" OFF)
|
||||
set(coverage_test_parallelism "${PROCESSOR_COUNT}" CACHE STRING
|
||||
"Unit tests parallelism for the purpose of coverage report.")
|
||||
set(coverage_format "html-details" CACHE STRING
|
||||
"Output format of the coverage report.")
|
||||
set(coverage_extra_args "" CACHE STRING
|
||||
"Additional arguments to pass to gcovr.")
|
||||
set(coverage_test "" CACHE STRING
|
||||
"On gcc & clang, the specific unit test(s) to run for coverage. Default is all tests.")
|
||||
if (coverage_test AND NOT coverage)
|
||||
set (coverage ON CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif ()
|
||||
option (coverage_core_only
|
||||
"Include only src/ripple files when generating coverage report. \
|
||||
Set to OFF to include all sources in coverage report."
|
||||
ON)
|
||||
option (wextra "compile with extra gcc/clang warnings enabled" ON)
|
||||
else ()
|
||||
set (profile OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set (coverage OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set (wextra OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif ()
|
||||
if (is_linux)
|
||||
option (BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
|
||||
option (static "link protobuf, openssl, libc++, and boost statically" ON)
|
||||
option (perf "Enables flags that assist with perf recording" OFF)
|
||||
option (use_gold "enables detection of gold (binutils) linker" ON)
|
||||
else ()
|
||||
if(coverage_test AND NOT coverage)
|
||||
set(coverage ON CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
option(wextra "compile with extra gcc/clang warnings enabled" ON)
|
||||
else()
|
||||
set(profile OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
if(is_linux)
|
||||
option(BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
|
||||
option(static "link protobuf, openssl, libc++, and boost statically" ON)
|
||||
option(perf "Enables flags that assist with perf recording" OFF)
|
||||
option(use_gold "enables detection of gold (binutils) linker" ON)
|
||||
option(use_mold "enables detection of mold (binutils) linker" ON)
|
||||
else()
|
||||
# we are not ready to allow shared-libs on windows because it would require
|
||||
# export declarations. On macos it's more feasible, but static openssl
|
||||
# produces odd linker errors, thus we disable shared lib builds for now.
|
||||
set (BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE)
|
||||
set (static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
|
||||
set (perf OFF CACHE BOOL "perf flags, linux only" FORCE)
|
||||
set (use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
|
||||
endif ()
|
||||
if (is_clang)
|
||||
option (use_lld "enables detection of lld linker" ON)
|
||||
else ()
|
||||
set (use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
|
||||
endif ()
|
||||
option (jemalloc "Enables jemalloc for heap profiling" OFF)
|
||||
option (werr "treat warnings as errors" OFF)
|
||||
option (local_protobuf
|
||||
set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE)
|
||||
set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
|
||||
set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
|
||||
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
|
||||
set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
|
||||
endif()
|
||||
if(is_clang)
|
||||
option(use_lld "enables detection of lld linker" ON)
|
||||
else()
|
||||
set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
|
||||
endif()
|
||||
option(jemalloc "Enables jemalloc for heap profiling" OFF)
|
||||
option(werr "treat warnings as errors" OFF)
|
||||
option(local_protobuf
|
||||
"Force a local build of protobuf instead of looking for an installed version." OFF)
|
||||
option (local_grpc
|
||||
option(local_grpc
|
||||
"Force a local build of gRPC instead of looking for an installed version." OFF)
|
||||
|
||||
# this one is a string and therefore can't be an option
|
||||
set (san "" CACHE STRING "On gcc & clang, add sanitizer instrumentation")
|
||||
set_property (CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
|
||||
if (san)
|
||||
string (TOLOWER ${san} san)
|
||||
set (SAN_FLAG "-fsanitize=${san}")
|
||||
set (SAN_LIB "")
|
||||
if (is_gcc)
|
||||
if (san STREQUAL "address")
|
||||
set (SAN_LIB "asan")
|
||||
elseif (san STREQUAL "thread")
|
||||
set (SAN_LIB "tsan")
|
||||
elseif (san STREQUAL "memory")
|
||||
set (SAN_LIB "msan")
|
||||
elseif (san STREQUAL "undefined")
|
||||
set (SAN_LIB "ubsan")
|
||||
endif ()
|
||||
endif ()
|
||||
set (_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
|
||||
set (CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
|
||||
check_cxx_compiler_flag (${SAN_FLAG} COMPILER_SUPPORTS_SAN)
|
||||
set (CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
|
||||
if (NOT COMPILER_SUPPORTS_SAN)
|
||||
message (FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
|
||||
endif ()
|
||||
endif ()
|
||||
set (container_label "" CACHE STRING "tag to use for package building containers")
|
||||
option (packages_only
|
||||
set(san "" CACHE STRING "On gcc & clang, add sanitizer instrumentation")
|
||||
set_property(CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
|
||||
if(san)
|
||||
string(TOLOWER ${san} san)
|
||||
set(SAN_FLAG "-fsanitize=${san}")
|
||||
set(SAN_LIB "")
|
||||
if(is_gcc)
|
||||
if(san STREQUAL "address")
|
||||
set(SAN_LIB "asan")
|
||||
elseif(san STREQUAL "thread")
|
||||
set(SAN_LIB "tsan")
|
||||
elseif(san STREQUAL "memory")
|
||||
set(SAN_LIB "msan")
|
||||
elseif(san STREQUAL "undefined")
|
||||
set(SAN_LIB "ubsan")
|
||||
endif()
|
||||
endif()
|
||||
set(_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
|
||||
set(CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
|
||||
check_cxx_compiler_flag(${SAN_FLAG} COMPILER_SUPPORTS_SAN)
|
||||
set(CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
|
||||
if(NOT COMPILER_SUPPORTS_SAN)
|
||||
message(FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
|
||||
endif()
|
||||
endif()
|
||||
set(container_label "" CACHE STRING "tag to use for package building containers")
|
||||
option(packages_only
|
||||
"ONLY generate package building targets. This is special use-case and almost \
|
||||
certainly not what you want. Use with caution as you won't be able to build \
|
||||
any compiled targets locally." OFF)
|
||||
option (have_package_container
|
||||
option(have_package_container
|
||||
"Sometimes you already have the tagged container you want to use for package \
|
||||
building and you don't want docker to rebuild it. This flag will detach the \
|
||||
dependency of the package build from the container build. It's an advanced \
|
||||
use case and most likely you should not be touching this flag." OFF)
|
||||
|
||||
# the remaining options are obscure and rarely used
|
||||
option (beast_no_unit_test_inline
|
||||
option(beast_no_unit_test_inline
|
||||
"Prevents unit test definitions from being inserted into global table"
|
||||
OFF)
|
||||
option (single_io_service_thread
|
||||
option(single_io_service_thread
|
||||
"Restricts the number of threads calling io_service::run to one. \
|
||||
This can be useful when debugging."
|
||||
OFF)
|
||||
option (boost_show_deprecated
|
||||
option(boost_show_deprecated
|
||||
"Allow boost to fail on deprecated usage. Only useful if you're trying\
|
||||
to find deprecated calls."
|
||||
OFF)
|
||||
option (beast_hashers
|
||||
option(beast_hashers
|
||||
"Use local implementations for sha/ripemd hashes (experimental, not recommended)"
|
||||
OFF)
|
||||
|
||||
if (WIN32)
|
||||
option (beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
|
||||
else ()
|
||||
set (beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
|
||||
endif ()
|
||||
if (coverage)
|
||||
message (STATUS "coverage build requested - forcing Debug build")
|
||||
set (CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
|
||||
endif ()
|
||||
if(WIN32)
|
||||
option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
|
||||
else()
|
||||
set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
|
||||
endif()
|
||||
if(coverage)
|
||||
message(STATUS "coverage build requested - forcing Debug build")
|
||||
set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
|
||||
endif()
|
||||
|
||||
54
Builds/CMake/conan/Boost.cmake
Normal file
54
Builds/CMake/conan/Boost.cmake
Normal file
@@ -0,0 +1,54 @@
|
||||
find_package(Boost 1.83 REQUIRED
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
context
|
||||
coroutine
|
||||
date_time
|
||||
filesystem
|
||||
json
|
||||
program_options
|
||||
regex
|
||||
system
|
||||
thread
|
||||
)
|
||||
|
||||
add_library(ripple_boost INTERFACE)
|
||||
add_library(Ripple::boost ALIAS ripple_boost)
|
||||
if(XCODE)
|
||||
target_include_directories(ripple_boost BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
|
||||
target_compile_options(ripple_boost INTERFACE --system-header-prefix="boost/")
|
||||
else()
|
||||
target_include_directories(ripple_boost SYSTEM BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
|
||||
endif()
|
||||
|
||||
target_link_libraries(ripple_boost
|
||||
INTERFACE
|
||||
Boost::boost
|
||||
Boost::chrono
|
||||
Boost::container
|
||||
Boost::coroutine
|
||||
Boost::date_time
|
||||
Boost::filesystem
|
||||
Boost::json
|
||||
Boost::program_options
|
||||
Boost::regex
|
||||
Boost::system
|
||||
Boost::iostreams
|
||||
Boost::thread)
|
||||
if(Boost_COMPILER)
|
||||
target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking)
|
||||
endif()
|
||||
if(san AND is_clang)
|
||||
# TODO: gcc does not support -fsanitize-blacklist...can we do something else
|
||||
# for gcc ?
|
||||
if(NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers)
|
||||
get_target_property(Boost_INCLUDE_DIRS Boost::headers INTERFACE_INCLUDE_DIRECTORIES)
|
||||
endif()
|
||||
message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist")
|
||||
file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*")
|
||||
target_compile_options(opts
|
||||
INTERFACE
|
||||
# ignore boost headers for sanitizing
|
||||
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
|
||||
endif()
|
||||
27
Builds/CMake/conan/Protobuf.cmake
Normal file
27
Builds/CMake/conan/Protobuf.cmake
Normal file
@@ -0,0 +1,27 @@
|
||||
find_package(Protobuf 3.8)
|
||||
|
||||
set(output_dir ${CMAKE_BINARY_DIR}/proto_gen)
|
||||
file(MAKE_DIRECTORY ${output_dir})
|
||||
set(ccbd ${CMAKE_CURRENT_BINARY_DIR})
|
||||
set(CMAKE_CURRENT_BINARY_DIR ${output_dir})
|
||||
protobuf_generate_cpp(PROTO_SRCS PROTO_HDRS src/ripple/proto/ripple.proto)
|
||||
set(CMAKE_CURRENT_BINARY_DIR ${ccbd})
|
||||
|
||||
target_include_directories(xrpl_core SYSTEM PUBLIC
|
||||
# The generated implementation imports the header relative to the output
|
||||
# directory.
|
||||
$<BUILD_INTERFACE:${output_dir}>
|
||||
$<BUILD_INTERFACE:${output_dir}/src>
|
||||
)
|
||||
target_sources(xrpl_core PRIVATE ${output_dir}/src/ripple/proto/ripple.pb.cc)
|
||||
install(
|
||||
FILES ${output_dir}/src/ripple/proto/ripple.pb.h
|
||||
DESTINATION include/ripple/proto)
|
||||
target_link_libraries(xrpl_core PUBLIC protobuf::libprotobuf)
|
||||
target_compile_options(xrpl_core
|
||||
PUBLIC
|
||||
$<$<BOOL:${XCODE}>:
|
||||
--system-header-prefix="google/protobuf"
|
||||
-Wno-deprecated-dynamic-exception-spec
|
||||
>
|
||||
)
|
||||
82
Builds/CMake/conan/gRPC.cmake
Normal file
82
Builds/CMake/conan/gRPC.cmake
Normal file
@@ -0,0 +1,82 @@
|
||||
find_package(gRPC 1.23)
|
||||
|
||||
#[=================================[
|
||||
generate protobuf sources for
|
||||
grpc defs and bundle into a
|
||||
static lib
|
||||
#]=================================]
|
||||
set(output_dir "${CMAKE_BINARY_DIR}/proto_gen_grpc")
|
||||
set(GRPC_GEN_DIR "${output_dir}/ripple/proto")
|
||||
file(MAKE_DIRECTORY ${GRPC_GEN_DIR})
|
||||
set(GRPC_PROTO_SRCS)
|
||||
set(GRPC_PROTO_HDRS)
|
||||
set(GRPC_PROTO_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/proto/org")
|
||||
file(GLOB_RECURSE GRPC_DEFINITION_FILES "${GRPC_PROTO_ROOT}/*.proto")
|
||||
foreach(file ${GRPC_DEFINITION_FILES})
|
||||
# /home/user/rippled/src/ripple/proto/org/.../v1/get_ledger.proto
|
||||
get_filename_component(_abs_file ${file} ABSOLUTE)
|
||||
# /home/user/rippled/src/ripple/proto/org/.../v1
|
||||
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
|
||||
# get_ledger
|
||||
get_filename_component(_basename ${file} NAME_WE)
|
||||
# /home/user/rippled/src/ripple/proto
|
||||
get_filename_component(_proto_inc ${GRPC_PROTO_ROOT} DIRECTORY) # updir one level
|
||||
# org/.../v1/get_ledger.proto
|
||||
file(RELATIVE_PATH _rel_root_file ${_proto_inc} ${_abs_file})
|
||||
# org/.../v1
|
||||
get_filename_component(_rel_root_dir ${_rel_root_file} DIRECTORY)
|
||||
# src/ripple/proto/org/.../v1
|
||||
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
|
||||
|
||||
# .cmake/proto_gen_grpc/ripple/proto/org/.../v1/get_ledger.grpc.pb.cc
|
||||
set(src_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.cc")
|
||||
set(src_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.cc")
|
||||
set(hdr_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.h")
|
||||
set(hdr_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.h")
|
||||
add_custom_command(
|
||||
OUTPUT ${src_1} ${src_2} ${hdr_1} ${hdr_2}
|
||||
COMMAND protobuf::protoc
|
||||
ARGS --grpc_out=${GRPC_GEN_DIR}
|
||||
--cpp_out=${GRPC_GEN_DIR}
|
||||
--plugin=protoc-gen-grpc=$<TARGET_FILE:gRPC::grpc_cpp_plugin>
|
||||
-I ${_proto_inc} -I ${_rel_dir}
|
||||
${_abs_file}
|
||||
DEPENDS ${_abs_file} protobuf::protoc gRPC::grpc_cpp_plugin
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
COMMENT "Running gRPC C++ protocol buffer compiler on ${file}"
|
||||
VERBATIM)
|
||||
set_source_files_properties(${src_1} ${src_2} ${hdr_1} ${hdr_2} PROPERTIES
|
||||
GENERATED TRUE
|
||||
SKIP_UNITY_BUILD_INCLUSION ON
|
||||
)
|
||||
list(APPEND GRPC_PROTO_SRCS ${src_1} ${src_2})
|
||||
list(APPEND GRPC_PROTO_HDRS ${hdr_1} ${hdr_2})
|
||||
endforeach()
|
||||
|
||||
target_include_directories(xrpl_core SYSTEM PUBLIC
|
||||
$<BUILD_INTERFACE:${output_dir}>
|
||||
$<BUILD_INTERFACE:${output_dir}/ripple/proto>
|
||||
# The generated sources include headers relative to this path. Fix it later.
|
||||
$<INSTALL_INTERFACE:include/ripple/proto>
|
||||
)
|
||||
target_sources(xrpl_core PRIVATE ${GRPC_PROTO_SRCS})
|
||||
install(
|
||||
DIRECTORY ${output_dir}/ripple
|
||||
DESTINATION include/
|
||||
FILES_MATCHING PATTERN "*.h"
|
||||
)
|
||||
target_link_libraries(xrpl_core PUBLIC
|
||||
"gRPC::grpc++"
|
||||
# libgrpc is missing references.
|
||||
absl::random_random
|
||||
)
|
||||
target_compile_options(xrpl_core
|
||||
PRIVATE
|
||||
$<$<BOOL:${MSVC}>:-wd4065>
|
||||
$<$<NOT:$<BOOL:${MSVC}>>:-Wno-deprecated-declarations>
|
||||
PUBLIC
|
||||
$<$<BOOL:${MSVC}>:-wd4996>
|
||||
$<$<BOOL:${XCODE}>:
|
||||
--system-header-prefix="google/protobuf"
|
||||
-Wno-deprecated-dynamic-exception-spec
|
||||
>)
|
||||
@@ -1,14 +1,16 @@
|
||||
#[===================================================================[
|
||||
NIH dep: boost
|
||||
#]===================================================================]
|
||||
|
||||
if((NOT DEFINED BOOST_ROOT) AND(DEFINED ENV{BOOST_ROOT}))
|
||||
set(BOOST_ROOT $ENV{BOOST_ROOT})
|
||||
endif()
|
||||
if((NOT DEFINED BOOST_LIBRARYDIR) AND(DEFINED ENV{BOOST_LIBRARYDIR}))
|
||||
set(BOOST_LIBRARYDIR $ENV{BOOST_LIBRARYDIR})
|
||||
endif()
|
||||
file(TO_CMAKE_PATH "${BOOST_ROOT}" BOOST_ROOT)
|
||||
if(WIN32 OR CYGWIN)
|
||||
# Workaround for MSVC having two boost versions - x86 and x64 on same PC in stage folders
|
||||
if(DEFINED BOOST_ROOT)
|
||||
if((NOT DEFINED BOOST_LIBRARYDIR) AND (DEFINED BOOST_ROOT))
|
||||
if(IS_DIRECTORY ${BOOST_ROOT}/stage64/lib)
|
||||
set(BOOST_LIBRARYDIR ${BOOST_ROOT}/stage64/lib)
|
||||
elseif(IS_DIRECTORY ${BOOST_ROOT}/stage/lib)
|
||||
@@ -44,7 +46,7 @@ else()
|
||||
endif()
|
||||
# TBD:
|
||||
# Boost_USE_DEBUG_RUNTIME: When ON, uses Boost libraries linked against the
|
||||
find_package(Boost 1.70 REQUIRED
|
||||
find_package(Boost 1.86 REQUIRED
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
@@ -52,9 +54,11 @@ find_package(Boost 1.70 REQUIRED
|
||||
coroutine
|
||||
date_time
|
||||
filesystem
|
||||
json
|
||||
program_options
|
||||
regex
|
||||
system
|
||||
iostreams
|
||||
thread)
|
||||
|
||||
add_library(ripple_boost INTERFACE)
|
||||
@@ -74,6 +78,8 @@ target_link_libraries(ripple_boost
|
||||
Boost::coroutine
|
||||
Boost::date_time
|
||||
Boost::filesystem
|
||||
Boost::json
|
||||
Boost::iostreams
|
||||
Boost::program_options
|
||||
Boost::regex
|
||||
Boost::system
|
||||
|
||||
@@ -248,6 +248,7 @@ include(FindPackageHandleStandardArgs)
|
||||
# Save project's policies
|
||||
cmake_policy(PUSH)
|
||||
cmake_policy(SET CMP0057 NEW) # if IN_LIST
|
||||
#cmake_policy(SET CMP0144 NEW)
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
# Before we go searching, check whether a boost cmake package is available, unless
|
||||
@@ -969,7 +970,24 @@ function(_Boost_COMPONENT_DEPENDENCIES component _ret)
|
||||
set(_Boost_WAVE_DEPENDENCIES filesystem serialization thread chrono date_time atomic)
|
||||
set(_Boost_WSERIALIZATION_DEPENDENCIES serialization)
|
||||
endif()
|
||||
if(NOT Boost_VERSION_STRING VERSION_LESS 1.77.0)
|
||||
|
||||
# Special handling for Boost 1.86.0 and higher
|
||||
if(NOT Boost_VERSION_STRING VERSION_LESS 1.86.0)
|
||||
# Explicitly set these for Boost 1.86
|
||||
set(_Boost_IOSTREAMS_DEPENDENCIES "") # No dependencies for iostreams in 1.86
|
||||
|
||||
# Debug output to help diagnose the issue
|
||||
if(Boost_DEBUG)
|
||||
message(STATUS "Using special dependency settings for Boost 1.86.0+")
|
||||
message(STATUS "Component: ${component}, uppercomponent: ${uppercomponent}")
|
||||
message(STATUS "Boost_VERSION_STRING: ${Boost_VERSION_STRING}")
|
||||
message(STATUS "BOOST_ROOT: $ENV{BOOST_ROOT}")
|
||||
message(STATUS "BOOST_LIBRARYDIR: $ENV{BOOST_LIBRARYDIR}")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Only show warning for versions beyond what we've defined
|
||||
if(NOT Boost_VERSION_STRING VERSION_LESS 1.87.0)
|
||||
message(WARNING "New Boost version may have incorrect or missing dependencies and imported targets")
|
||||
endif()
|
||||
endif()
|
||||
@@ -1879,6 +1897,18 @@ foreach(COMPONENT ${Boost_FIND_COMPONENTS})
|
||||
list(INSERT _boost_LIBRARY_SEARCH_DIRS_RELEASE 0 ${Boost_LIBRARY_DIR_DEBUG})
|
||||
endif()
|
||||
|
||||
if(NOT Boost_VERSION_STRING VERSION_LESS 1.86.0)
|
||||
if(BOOST_LIBRARYDIR AND EXISTS "${BOOST_LIBRARYDIR}")
|
||||
# Clear existing search paths and use only BOOST_LIBRARYDIR
|
||||
set(_boost_LIBRARY_SEARCH_DIRS_RELEASE "${BOOST_LIBRARYDIR}" NO_DEFAULT_PATH)
|
||||
set(_boost_LIBRARY_SEARCH_DIRS_DEBUG "${BOOST_LIBRARYDIR}" NO_DEFAULT_PATH)
|
||||
|
||||
if(Boost_DEBUG)
|
||||
message(STATUS "Boost 1.86: Setting library search dirs to BOOST_LIBRARYDIR: ${BOOST_LIBRARYDIR}")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Avoid passing backslashes to _Boost_FIND_LIBRARY due to macro re-parsing.
|
||||
string(REPLACE "\\" "/" _boost_LIBRARY_SEARCH_DIRS_tmp "${_boost_LIBRARY_SEARCH_DIRS_RELEASE}")
|
||||
|
||||
|
||||
@@ -129,27 +129,28 @@ else ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
file (MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/proto_gen)
|
||||
set (save_CBD ${CMAKE_CURRENT_BINARY_DIR})
|
||||
set (CMAKE_CURRENT_BINARY_DIR ${CMAKE_BINARY_DIR}/proto_gen)
|
||||
protobuf_generate_cpp (
|
||||
PROTO_SRCS
|
||||
PROTO_HDRS
|
||||
src/ripple/proto/ripple.proto)
|
||||
set (CMAKE_CURRENT_BINARY_DIR ${save_CBD})
|
||||
set(output_dir ${CMAKE_BINARY_DIR}/proto_gen)
|
||||
file(MAKE_DIRECTORY ${output_dir})
|
||||
set(ccbd ${CMAKE_CURRENT_BINARY_DIR})
|
||||
set(CMAKE_CURRENT_BINARY_DIR ${output_dir})
|
||||
protobuf_generate_cpp(PROTO_SRCS PROTO_HDRS src/ripple/proto/ripple.proto)
|
||||
set(CMAKE_CURRENT_BINARY_DIR ${ccbd})
|
||||
|
||||
add_library (pbufs STATIC ${PROTO_SRCS} ${PROTO_HDRS})
|
||||
|
||||
target_include_directories (pbufs PRIVATE src)
|
||||
target_include_directories (pbufs
|
||||
SYSTEM PUBLIC ${CMAKE_BINARY_DIR}/proto_gen)
|
||||
target_link_libraries (pbufs protobuf::libprotobuf)
|
||||
target_compile_options (pbufs
|
||||
target_include_directories(xrpl_core SYSTEM PUBLIC
|
||||
# The generated implementation imports the header relative to the output
|
||||
# directory.
|
||||
$<BUILD_INTERFACE:${output_dir}>
|
||||
$<BUILD_INTERFACE:${output_dir}/src>
|
||||
)
|
||||
target_sources(xrpl_core PRIVATE ${output_dir}/src/ripple/proto/ripple.pb.cc)
|
||||
install(
|
||||
FILES ${output_dir}/src/ripple/proto/ripple.pb.h
|
||||
DESTINATION include/ripple/proto)
|
||||
target_link_libraries(xrpl_core PUBLIC protobuf::libprotobuf)
|
||||
target_compile_options(xrpl_core
|
||||
PUBLIC
|
||||
$<$<BOOL:${is_xcode}>:
|
||||
--system-header-prefix="google/protobuf"
|
||||
-Wno-deprecated-dynamic-exception-spec
|
||||
>)
|
||||
add_library (Ripple::pbufs ALIAS pbufs)
|
||||
target_link_libraries (ripple_libs INTERFACE Ripple::pbufs)
|
||||
exclude_if_included (pbufs)
|
||||
>
|
||||
)
|
||||
|
||||
@@ -81,4 +81,4 @@ if(XAR_LIBRARY)
|
||||
else()
|
||||
message(WARNING "xar library not found... (only important for mac builds)")
|
||||
endif()
|
||||
add_library (NIH::WasmEdge ALIAS wasmedge)
|
||||
add_library (wasmedge::wasmedge ALIAS wasmedge)
|
||||
|
||||
@@ -74,7 +74,11 @@ else ()
|
||||
if (NOT _location)
|
||||
message (FATAL_ERROR "using pkg-config for grpc, can't find c-ares")
|
||||
endif ()
|
||||
add_library (c-ares::cares ${_static} IMPORTED GLOBAL)
|
||||
if(${_location} MATCHES "\\.a$")
|
||||
add_library(c-ares::cares STATIC IMPORTED GLOBAL)
|
||||
else()
|
||||
add_library(c-ares::cares SHARED IMPORTED GLOBAL)
|
||||
endif()
|
||||
set_target_properties (c-ares::cares PROPERTIES
|
||||
IMPORTED_LOCATION ${_location}
|
||||
INTERFACE_INCLUDE_DIRECTORIES "${${_prefix}_INCLUDE_DIRS}"
|
||||
@@ -204,6 +208,7 @@ else ()
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
-DCMAKE_CXX_STANDARD=17
|
||||
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
|
||||
$<$<BOOL:${CMAKE_TOOLCHAIN_FILE}>:-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}>
|
||||
$<$<BOOL:${VCPKG_TARGET_TRIPLET}>:-DVCPKG_TARGET_TRIPLET=${VCPKG_TARGET_TRIPLET}>
|
||||
@@ -309,25 +314,33 @@ endif ()
|
||||
grpc defs and bundle into a
|
||||
static lib
|
||||
#]=================================]
|
||||
set (GRPC_GEN_DIR "${CMAKE_BINARY_DIR}/proto_gen_grpc")
|
||||
file (MAKE_DIRECTORY ${GRPC_GEN_DIR})
|
||||
set (GRPC_PROTO_SRCS)
|
||||
set (GRPC_PROTO_HDRS)
|
||||
set (GRPC_PROTO_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/proto/org")
|
||||
file(GLOB_RECURSE GRPC_DEFINITION_FILES LIST_DIRECTORIES false "${GRPC_PROTO_ROOT}/*.proto")
|
||||
set(output_dir "${CMAKE_BINARY_DIR}/proto_gen_grpc")
|
||||
set(GRPC_GEN_DIR "${output_dir}/ripple/proto")
|
||||
file(MAKE_DIRECTORY ${GRPC_GEN_DIR})
|
||||
set(GRPC_PROTO_SRCS)
|
||||
set(GRPC_PROTO_HDRS)
|
||||
set(GRPC_PROTO_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/proto/org")
|
||||
file(GLOB_RECURSE GRPC_DEFINITION_FILES "${GRPC_PROTO_ROOT}/*.proto")
|
||||
foreach(file ${GRPC_DEFINITION_FILES})
|
||||
# /home/user/rippled/src/ripple/proto/org/.../v1/get_ledger.proto
|
||||
get_filename_component(_abs_file ${file} ABSOLUTE)
|
||||
# /home/user/rippled/src/ripple/proto/org/.../v1
|
||||
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
|
||||
# get_ledger
|
||||
get_filename_component(_basename ${file} NAME_WE)
|
||||
# /home/user/rippled/src/ripple/proto
|
||||
get_filename_component(_proto_inc ${GRPC_PROTO_ROOT} DIRECTORY) # updir one level
|
||||
# org/.../v1/get_ledger.proto
|
||||
file(RELATIVE_PATH _rel_root_file ${_proto_inc} ${_abs_file})
|
||||
# org/.../v1
|
||||
get_filename_component(_rel_root_dir ${_rel_root_file} DIRECTORY)
|
||||
# src/ripple/proto/org/.../v1
|
||||
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
|
||||
|
||||
set (src_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.cc")
|
||||
set (src_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.cc")
|
||||
set (hdr_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.h")
|
||||
set (hdr_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.h")
|
||||
set(src_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.cc")
|
||||
set(src_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.cc")
|
||||
set(hdr_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.h")
|
||||
set(hdr_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.h")
|
||||
add_custom_command(
|
||||
OUTPUT ${src_1} ${src_2} ${hdr_1} ${hdr_2}
|
||||
COMMAND protobuf::protoc
|
||||
@@ -340,16 +353,32 @@ foreach(file ${GRPC_DEFINITION_FILES})
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
COMMENT "Running gRPC C++ protocol buffer compiler on ${file}"
|
||||
VERBATIM)
|
||||
set_source_files_properties(${src_1} ${src_2} ${hdr_1} ${hdr_2} PROPERTIES GENERATED TRUE)
|
||||
set_source_files_properties(${src_1} ${src_2} ${hdr_1} ${hdr_2} PROPERTIES
|
||||
GENERATED TRUE
|
||||
SKIP_UNITY_BUILD_INCLUSION ON
|
||||
)
|
||||
list(APPEND GRPC_PROTO_SRCS ${src_1} ${src_2})
|
||||
list(APPEND GRPC_PROTO_HDRS ${hdr_1} ${hdr_2})
|
||||
endforeach()
|
||||
|
||||
add_library (grpc_pbufs STATIC ${GRPC_PROTO_SRCS} ${GRPC_PROTO_HDRS})
|
||||
#target_include_directories (grpc_pbufs PRIVATE src)
|
||||
target_include_directories (grpc_pbufs SYSTEM PUBLIC ${GRPC_GEN_DIR})
|
||||
target_link_libraries (grpc_pbufs protobuf::libprotobuf "gRPC::grpc++${grpc_suffix}")
|
||||
target_compile_options (grpc_pbufs
|
||||
target_include_directories(xrpl_core SYSTEM PUBLIC
|
||||
$<BUILD_INTERFACE:${output_dir}>
|
||||
$<BUILD_INTERFACE:${output_dir}/ripple/proto>
|
||||
# The generated sources include headers relative to this path. Fix it later.
|
||||
$<INSTALL_INTERFACE:include/ripple/proto>
|
||||
)
|
||||
target_sources(xrpl_core PRIVATE ${GRPC_PROTO_SRCS})
|
||||
install(
|
||||
DIRECTORY ${output_dir}/ripple
|
||||
DESTINATION include/
|
||||
FILES_MATCHING PATTERN "*.h"
|
||||
)
|
||||
target_link_libraries(xrpl_core PUBLIC
|
||||
"gRPC::grpc++"
|
||||
# libgrpc is missing references.
|
||||
absl::random_random
|
||||
)
|
||||
target_compile_options(xrpl_core
|
||||
PRIVATE
|
||||
$<$<BOOL:${MSVC}>:-wd4065>
|
||||
$<$<NOT:$<BOOL:${MSVC}>>:-Wno-deprecated-declarations>
|
||||
@@ -359,6 +388,5 @@ target_compile_options (grpc_pbufs
|
||||
--system-header-prefix="google/protobuf"
|
||||
-Wno-deprecated-dynamic-exception-spec
|
||||
>)
|
||||
add_library (Ripple::grpc_pbufs ALIAS grpc_pbufs)
|
||||
target_link_libraries (ripple_libs INTERFACE Ripple::grpc_pbufs)
|
||||
exclude_if_included (grpc_pbufs)
|
||||
# target_link_libraries (ripple_libs INTERFACE Ripple::grpc_pbufs)
|
||||
# exclude_if_included (grpc_pbufs)
|
||||
|
||||
@@ -52,6 +52,9 @@ Loop: ripple.overlay ripple.rpc
|
||||
Loop: test.app test.jtx
|
||||
test.app > test.jtx
|
||||
|
||||
Loop: test.app test.rpc
|
||||
test.rpc ~= test.app
|
||||
|
||||
Loop: test.jtx test.toplevel
|
||||
test.toplevel > test.jtx
|
||||
|
||||
|
||||
@@ -90,7 +90,6 @@ test.app > ripple.overlay
|
||||
test.app > ripple.protocol
|
||||
test.app > ripple.resource
|
||||
test.app > ripple.rpc
|
||||
test.app > test.rpc
|
||||
test.app > test.toplevel
|
||||
test.app > test.unit_test
|
||||
test.basics > ripple.basics
|
||||
@@ -131,6 +130,7 @@ test.csf > ripple.json
|
||||
test.csf > ripple.protocol
|
||||
test.json > ripple.beast
|
||||
test.json > ripple.json
|
||||
test.json > ripple.rpc
|
||||
test.json > test.jtx
|
||||
test.jtx > ripple.app
|
||||
test.jtx > ripple.basics
|
||||
@@ -141,6 +141,8 @@ test.jtx > ripple.json
|
||||
test.jtx > ripple.ledger
|
||||
test.jtx > ripple.net
|
||||
test.jtx > ripple.protocol
|
||||
test.jtx > ripple.resource
|
||||
test.jtx > ripple.rpc
|
||||
test.jtx > ripple.server
|
||||
test.ledger > ripple.app
|
||||
test.ledger > ripple.basics
|
||||
@@ -167,7 +169,6 @@ test.nodestore > test.unit_test
|
||||
test.overlay > ripple.app
|
||||
test.overlay > ripple.basics
|
||||
test.overlay > ripple.beast
|
||||
test.overlay > ripple.core
|
||||
test.overlay > ripple.overlay
|
||||
test.overlay > ripple.peerfinder
|
||||
test.overlay > ripple.protocol
|
||||
|
||||
157
CMakeLists.txt
157
CMakeLists.txt
@@ -1,20 +1,30 @@
|
||||
cmake_minimum_required (VERSION 3.16)
|
||||
|
||||
if (POLICY CMP0074)
|
||||
cmake_policy(SET CMP0074 NEW)
|
||||
endif ()
|
||||
|
||||
project (rippled)
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
set(CMAKE_CXX_STANDARD 20)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
|
||||
if(POLICY CMP0074)
|
||||
cmake_policy(SET CMP0074 NEW)
|
||||
endif()
|
||||
if(POLICY CMP0077)
|
||||
cmake_policy(SET CMP0077 NEW)
|
||||
endif()
|
||||
|
||||
# Fix "unrecognized escape" issues when passing CMAKE_MODULE_PATH on Windows.
|
||||
file(TO_CMAKE_PATH "${CMAKE_MODULE_PATH}" CMAKE_MODULE_PATH)
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake")
|
||||
|
||||
if(POLICY CMP0144)
|
||||
cmake_policy(SET CMP0144 NEW)
|
||||
endif()
|
||||
|
||||
project (rippled)
|
||||
set(Boost_NO_BOOST_CMAKE ON)
|
||||
|
||||
# make GIT_COMMIT_HASH define available to all sources
|
||||
find_package(Git)
|
||||
if(Git_FOUND)
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} describe --always --abbrev=40
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git describe --always --abbrev=40
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
|
||||
if(gch)
|
||||
set(GIT_COMMIT_HASH "${gch}")
|
||||
@@ -23,20 +33,32 @@ if(Git_FOUND)
|
||||
endif()
|
||||
endif() #git
|
||||
|
||||
if (thread_safety_analysis)
|
||||
if(thread_safety_analysis)
|
||||
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS)
|
||||
add_compile_options("-stdlib=libc++")
|
||||
add_link_options("-stdlib=libc++")
|
||||
endif()
|
||||
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake")
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/deps")
|
||||
option(USE_CONAN "Use Conan package manager for dependencies" OFF)
|
||||
# Then, auto-detect if conan_toolchain.cmake is being used
|
||||
if(CMAKE_TOOLCHAIN_FILE)
|
||||
# Check if the toolchain file path contains "conan_toolchain"
|
||||
if(CMAKE_TOOLCHAIN_FILE MATCHES "conan_toolchain")
|
||||
set(USE_CONAN ON CACHE BOOL "Using Conan detected from toolchain file" FORCE)
|
||||
message(STATUS "Conan toolchain detected: ${CMAKE_TOOLCHAIN_FILE}")
|
||||
message(STATUS "Building with Conan dependencies")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (NOT USE_CONAN)
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake")
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/deps")
|
||||
endif()
|
||||
|
||||
include (CheckCXXCompilerFlag)
|
||||
include (FetchContent)
|
||||
include (ExternalProject)
|
||||
include (CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP
|
||||
include (ProcessorCount)
|
||||
if (target)
|
||||
message (FATAL_ERROR "The target option has been removed - use native cmake options to control build")
|
||||
endif ()
|
||||
@@ -44,7 +66,9 @@ endif ()
|
||||
include(RippledSanity)
|
||||
include(RippledVersion)
|
||||
include(RippledSettings)
|
||||
include(RippledNIH)
|
||||
if (NOT USE_CONAN)
|
||||
include(RippledNIH)
|
||||
endif()
|
||||
# this check has to remain in the top-level cmake
|
||||
# because of the early return statement
|
||||
if (packages_only)
|
||||
@@ -57,30 +81,103 @@ include(RippledCompiler)
|
||||
include(RippledInterface)
|
||||
|
||||
###
|
||||
if (NOT USE_CONAN)
|
||||
set(SECP256K1_INSTALL TRUE)
|
||||
add_subdirectory(src/secp256k1)
|
||||
add_library(secp256k1::secp256k1 ALIAS secp256k1)
|
||||
add_subdirectory(src/ed25519-donna)
|
||||
include(deps/Boost)
|
||||
include(deps/OpenSSL)
|
||||
# include(deps/Secp256k1)
|
||||
# include(deps/Ed25519-donna)
|
||||
include(deps/Lz4)
|
||||
include(deps/Libarchive)
|
||||
include(deps/Sqlite)
|
||||
include(deps/Soci)
|
||||
include(deps/Snappy)
|
||||
include(deps/Rocksdb)
|
||||
include(deps/Nudb)
|
||||
include(deps/date)
|
||||
# include(deps/Protobuf)
|
||||
# include(deps/gRPC)
|
||||
include(deps/cassandra)
|
||||
include(deps/Postgres)
|
||||
include(deps/WasmEdge)
|
||||
else()
|
||||
include(conan/Boost)
|
||||
find_package(OpenSSL 1.1.1 REQUIRED)
|
||||
set_target_properties(OpenSSL::SSL PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
|
||||
)
|
||||
set(SECP256K1_INSTALL TRUE)
|
||||
add_subdirectory(src/secp256k1)
|
||||
add_library(secp256k1::secp256k1 ALIAS secp256k1)
|
||||
add_subdirectory(src/ed25519-donna)
|
||||
find_package(lz4 REQUIRED)
|
||||
# Target names with :: are not allowed in a generator expression.
|
||||
# We need to pull the include directories and imported location properties
|
||||
# from separate targets.
|
||||
find_package(LibArchive REQUIRED)
|
||||
find_package(SOCI REQUIRED)
|
||||
find_package(SQLite3 REQUIRED)
|
||||
find_package(Snappy REQUIRED)
|
||||
find_package(wasmedge REQUIRED)
|
||||
option(rocksdb "Enable RocksDB" ON)
|
||||
if(rocksdb)
|
||||
find_package(RocksDB REQUIRED)
|
||||
set_target_properties(RocksDB::rocksdb PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1
|
||||
)
|
||||
target_link_libraries(ripple_libs INTERFACE RocksDB::rocksdb)
|
||||
endif()
|
||||
find_package(nudb REQUIRED)
|
||||
find_package(date REQUIRED)
|
||||
if(TARGET nudb::core)
|
||||
set(nudb nudb::core)
|
||||
elseif(TARGET NuDB::nudb)
|
||||
set(nudb NuDB::nudb)
|
||||
else()
|
||||
message(FATAL_ERROR "unknown nudb target")
|
||||
endif()
|
||||
target_link_libraries(ripple_libs INTERFACE ${nudb})
|
||||
|
||||
include(deps/Boost)
|
||||
include(deps/OpenSSL)
|
||||
include(deps/Secp256k1)
|
||||
include(deps/Ed25519-donna)
|
||||
include(deps/Lz4)
|
||||
include(deps/Libarchive)
|
||||
include(deps/Sqlite)
|
||||
include(deps/Soci)
|
||||
include(deps/Snappy)
|
||||
include(deps/Rocksdb)
|
||||
include(deps/Nudb)
|
||||
include(deps/date)
|
||||
include(deps/Protobuf)
|
||||
include(deps/gRPC)
|
||||
include(deps/cassandra)
|
||||
include(deps/Postgres)
|
||||
include(deps/WasmEdge)
|
||||
if(reporting)
|
||||
find_package(cassandra-cpp-driver REQUIRED)
|
||||
find_package(PostgreSQL REQUIRED)
|
||||
target_link_libraries(ripple_libs INTERFACE
|
||||
cassandra-cpp-driver::cassandra-cpp-driver
|
||||
PostgreSQL::PostgreSQL
|
||||
)
|
||||
endif()
|
||||
target_link_libraries(ripple_libs INTERFACE
|
||||
ed25519::ed25519
|
||||
LibArchive::LibArchive
|
||||
lz4::lz4
|
||||
OpenSSL::Crypto
|
||||
OpenSSL::SSL
|
||||
# Ripple::grpc_pbufs
|
||||
# Ripple::pbufs
|
||||
secp256k1::secp256k1
|
||||
soci::soci
|
||||
SQLite::SQLite3
|
||||
)
|
||||
endif()
|
||||
|
||||
if(coverage)
|
||||
include(RippledCov)
|
||||
endif()
|
||||
|
||||
###
|
||||
|
||||
include(RippledCore)
|
||||
if (NOT USE_CONAN)
|
||||
include(deps/Protobuf)
|
||||
include(deps/gRPC)
|
||||
else()
|
||||
include(conan/Protobuf)
|
||||
include(conan/gRPC)
|
||||
endif()
|
||||
include(RippledInstall)
|
||||
include(RippledCov)
|
||||
include(RippledMultiConfig)
|
||||
include(RippledDocs)
|
||||
include(RippledValidatorKeys)
|
||||
|
||||
@@ -123,6 +123,25 @@ pip3 install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
## Unit Tests
|
||||
To execute all unit tests:
|
||||
|
||||
```rippled --unittest --unittest-jobs=<number of cores>```
|
||||
|
||||
(Note: Using multiple cores on a Mac M1 can cause spurious test failures. The
|
||||
cause is still under investigation. If you observe this problem, try specifying fewer jobs.)
|
||||
|
||||
To run a specific set of test suites:
|
||||
|
||||
```
|
||||
rippled --unittest TestSuiteName
|
||||
```
|
||||
Note: In this example, all tests with prefix `TestSuiteName` will be run, so if
|
||||
`TestSuiteName1` and `TestSuiteName2` both exist, then both tests will run.
|
||||
Alternatively, if the unit test name finds an exact match, it will stop
|
||||
doing partial matches, i.e. if a unit test with a title of `TestSuiteName`
|
||||
exists, then no other unit test will be executed, apart from `TestSuiteName`.
|
||||
|
||||
## Avoid
|
||||
|
||||
1. Proliferation of nearly identical code.
|
||||
@@ -182,4 +201,4 @@ existing maintainer without a vote.
|
||||
|
||||
|
||||
[1]: https://docs.github.com/en/get-started/quickstart/contributing-to-projects
|
||||
[2]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/incorporating-changes-from-a-pull-request/about-pull-request-merges#squash-and-merge-your-commits
|
||||
[2]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/incorporating-changes-from-a-pull-request/about-pull-request-merges#squash-and-merge-your-commits
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Xahau
|
||||
# Xahau
|
||||
|
||||
**Note:** Throughout this README, references to "we" or "our" pertain to the community and contributors involved in the Xahau network. It does not imply a legal entity or a specific collection of individuals.
|
||||
|
||||
@@ -67,5 +67,5 @@ git-subtree. See those directories' README files for more details.
|
||||
- [explorer.xahau.network](https://explorer.xahau.network)
|
||||
- **Testnet & Faucet**: Test applications and obtain test XAH at [xahau-test.net](https://xahau-test.net) and use the testnet explorer at [explorer.xahau.network](https://explorer.xahau.network).
|
||||
- **Supporting Wallets**: A list of wallets that support XAH and Xahau-based assets.
|
||||
- [Xumm](https://xumm.app)
|
||||
- [Crossmark](https://crossmark.io)
|
||||
- [Xaman](https://xaman.app)
|
||||
- [Crossmark](https://crossmark.io)
|
||||
|
||||
247
RELEASENOTES.md
247
RELEASENOTES.md
@@ -7,6 +7,253 @@ This document contains the release notes for `rippled`, the reference server imp
|
||||
|
||||
Have new ideas? Need help with setting up your node? [Please open an issue here](https://github.com/xrplf/rippled/issues/new/choose).
|
||||
|
||||
# Introducing XRP Ledger version 1.12.0
|
||||
|
||||
Version 1.12.0 of `rippled`, the reference server implementation of the XRP Ledger protocol, is now available. This release adds new features and bug fixes, and introduces these amendments:
|
||||
|
||||
- `AMM`
|
||||
- `Clawback`
|
||||
- `fixReducedOffersV1`
|
||||
|
||||
[Sign Up for Future Release Announcements](https://groups.google.com/g/ripple-server)
|
||||
|
||||
<!-- BREAK -->
|
||||
|
||||
## Action Required
|
||||
|
||||
Three new amendments are now open for voting according to the XRP Ledger's [amendment process](https://xrpl.org/amendments.html), which enables protocol changes following two weeks of >80% support from trusted validators.
|
||||
|
||||
If you operate an XRP Ledger server, upgrade to version 1.12.0 by September 20, 2023 to ensure service continuity. The exact time that protocol changes take effect depends on the voting decisions of the decentralized network.
|
||||
|
||||
|
||||
## Install / Upgrade
|
||||
|
||||
On supported platforms, see the [instructions on installing or updating `rippled`](https://xrpl.org/install-rippled.html).
|
||||
|
||||
The XRPL Foundation publishes portable binaries, which are drop-in replacements for the `rippled` daemon. [See information and downloads for the portable binaries](https://github.com/XRPLF/rippled-portable-builds#portable-builds-of-the-rippled-server). This will work on most distributions, including Ubuntu 16.04, 18.04, 20.04, and 22.04; CentOS; and others. Please test and open issues on GitHub if there are problems.
|
||||
|
||||
|
||||
## Changelog
|
||||
|
||||
### Amendments, New Features, and Changes
|
||||
(These are changes which may impact or be useful to end users. For example, you may be able to update your code/workflow to take advantage of these changes.)
|
||||
|
||||
- **`AMM`**: Introduces an automated market maker (AMM) protocol to the XRP Ledger's decentralized exchange, enabling you to trade assets without a counterparty. For more information about AMMs, see: [Automated Market Maker](https://opensource.ripple.com/docs/xls-30d-amm/amm-uc/). [#4294](https://github.com/XRPLF/rippled/pull/4294)
|
||||
|
||||
- **`Clawback`**: Adds a setting, *Allow Clawback*, which lets an issuer recover, or _claw back_, tokens that they previously issued. Issuers cannot enable this setting if they have issued tokens already. For additional documentation on this feature, see: [#4553](https://github.com/XRPLF/rippled/pull/4553).
|
||||
|
||||
- **`fixReducedOffersV1`**: Reduces the occurrence of order books that are blocked by reduced offers. [#4512](https://github.com/XRPLF/rippled/pull/4512)
|
||||
|
||||
- Added WebSocket and RPC port info to `server_info` responses. [#4427](https://github.com/XRPLF/rippled/pull/4427)
|
||||
|
||||
- Removed the deprecated `accepted`, `seqNum`, `hash`, and `totalCoins` fields from the `ledger` method. [#4244](https://github.com/XRPLF/rippled/pull/4244)
|
||||
|
||||
|
||||
### Bug Fixes and Performance Improvements
|
||||
(These are behind-the-scenes improvements, such as internal changes to the code, which are not expected to impact end users.)
|
||||
|
||||
- Added a pre-commit hook that runs the clang-format linter locally before committing changes. To install this feature, see: [CONTRIBUTING](https://github.com/XRPLF/xrpl-dev-portal/blob/master/CONTRIBUTING.md). [#4599](https://github.com/XRPLF/rippled/pull/4599)
|
||||
|
||||
- In order to make it more straightforward to catch and handle overflows: changed the output type of the `mulDiv()` function from `std::pair<bool, uint64_t>` to `std::optional`. [#4243](https://github.com/XRPLF/rippled/pull/4243)
|
||||
|
||||
- Updated `Handler::Condition` enum values to make the code less brittle. [#4239](https://github.com/XRPLF/rippled/pull/4239)
|
||||
|
||||
- Renamed `ServerHandlerImp` to `ServerHandler`. [#4516](https://github.com/XRPLF/rippled/pull/4516), [#4592](https://github.com/XRPLF/rippled/pull/4592)
|
||||
|
||||
- Replaced hand-rolled code with `std::from_chars` for better maintainability. [#4473](https://github.com/XRPLF/rippled/pull/4473)
|
||||
|
||||
- Removed an unused `TypedField` move constructor. [#4567](https://github.com/XRPLF/rippled/pull/4567)
|
||||
|
||||
|
||||
### Docs and Build System
|
||||
|
||||
- Updated checkout versions to resolve warnings during GitHub jobs. [#4598](https://github.com/XRPLF/rippled/pull/4598)
|
||||
|
||||
- Fixed an issue with the Debian package build. [#4591](https://github.com/XRPLF/rippled/pull/4591)
|
||||
|
||||
- Updated build instructions with additional steps to take after updating dependencies. [#4623](https://github.com/XRPLF/rippled/pull/4623)
|
||||
|
||||
- Updated contributing doc to clarify that beta releases should also be pushed to the `release` branch. [#4589](https://github.com/XRPLF/rippled/pull/4589)
|
||||
|
||||
- Enabled the `BETA_RPC_API` flag in the default unit tests config, making the API v2 (beta) available to unit tests. [#4573](https://github.com/XRPLF/rippled/pull/4573)
|
||||
|
||||
- Conan dependency management.
|
||||
- Fixed package definitions for Conan. [#4485](https://github.com/XRPLF/rippled/pull/4485)
|
||||
- Updated build dependencies to the most recent versions in Conan Center. [#4595](https://github.com/XRPLF/rippled/pull/4595)
|
||||
- Updated Conan recipe for NuDB. [#4615](https://github.com/XRPLF/rippled/pull/4615)
|
||||
|
||||
- Added binary hardening and linker flags to enhance security during the build process. [#4603](https://github.com/XRPLF/rippled/pull/4603)
|
||||
|
||||
- Added an Artifactory to the `nix` workflow to improve build times. [#4556](https://github.com/XRPLF/rippled/pull/4556)
|
||||
|
||||
- Added quality-of-life improvements to workflows, using new [concurrency control](https://docs.github.com/en/actions/using-jobs/using-concurrency) features. [#4597](https://github.com/XRPLF/rippled/pull/4597)
|
||||
|
||||
|
||||
[Full Commit Log](https://github.com/XRPLF/rippled/compare/1.11.0...1.12.0)
|
||||
|
||||
|
||||
### GitHub
|
||||
|
||||
The public source code repository for `rippled` is hosted on GitHub at <https://github.com/XRPLF/rippled>.
|
||||
|
||||
We welcome all contributions and invite everyone to join the community of XRP Ledger developers to help build the Internet of Value.
|
||||
|
||||
|
||||
## Credits
|
||||
|
||||
The following people contributed directly to this release:
|
||||
|
||||
- Alphonse N. Mousse <39067955+a-noni-mousse@users.noreply.github.com>
|
||||
- Arihant Kothari <arihantkothari17@gmail.com>
|
||||
- Chenna Keshava B S <21219765+ckeshava@users.noreply.github.com>
|
||||
- Denis Angell <dangell@transia.co>
|
||||
- Ed Hennis <ed@ripple.com>
|
||||
- Elliot Lee <github.public@intelliot.com>
|
||||
- Gregory Tsipenyuk <gregtatcam@users.noreply.github.com>
|
||||
- Howard Hinnant <howard.hinnant@gmail.com>
|
||||
- Ikko Eltociear Ashimine <eltociear@gmail.com>
|
||||
- John Freeman <jfreeman08@gmail.com>
|
||||
- Manoj Doshi <mdoshi@ripple.com>
|
||||
- Mark Travis <mtravis@ripple.com>
|
||||
- Mayukha Vadari <mvadari@gmail.com>
|
||||
- Michael Legleux <legleux@users.noreply.github.com>
|
||||
- Peter Chen <34582813+PeterChen13579@users.noreply.github.com>
|
||||
- RichardAH <richard.holland@starstone.co.nz>
|
||||
- Rome Reginelli <rome@ripple.com>
|
||||
- Scott Schurr <scott@ripple.com>
|
||||
- Shawn Xie <35279399+shawnxie999@users.noreply.github.com>
|
||||
- drlongle <drlongle@gmail.com>
|
||||
|
||||
Bug Bounties and Responsible Disclosures:
|
||||
|
||||
We welcome reviews of the rippled code and urge researchers to responsibly disclose any issues they may find.
|
||||
|
||||
To report a bug, please send a detailed report to: <bugs@xrpl.org>
|
||||
|
||||
|
||||
# Introducing XRP Ledger version 1.11.0
|
||||
|
||||
Version 1.11.0 of `rippled`, the reference server implementation of the XRP Ledger protocol, is now available.
|
||||
|
||||
This release reduces memory usage, introduces the `fixNFTokenRemint` amendment, and adds new features and bug fixes. For example, the new NetworkID field in transactions helps to prevent replay attacks with side-chains.
|
||||
|
||||
[Sign Up for Future Release Announcements](https://groups.google.com/g/ripple-server)
|
||||
|
||||
<!-- BREAK -->
|
||||
|
||||
## Action Required
|
||||
|
||||
The `fixNFTokenRemint` amendment is now open for voting according to the XRP Ledger's [amendment process](https://xrpl.org/amendments.html), which enables protocol changes following two weeks of >80% support from trusted validators.
|
||||
|
||||
If you operate an XRP Ledger server, upgrade to version 1.11.0 by July 5 to ensure service continuity. The exact time that protocol changes take effect depends on the voting decisions of the decentralized network.
|
||||
|
||||
|
||||
## Install / Upgrade
|
||||
|
||||
On supported platforms, see the [instructions on installing or updating `rippled`](https://xrpl.org/install-rippled.html).
|
||||
|
||||
|
||||
## What's Changed
|
||||
|
||||
### New Features and Improvements
|
||||
|
||||
* Allow port numbers be be specified using a either a colon or a space by @RichardAH in https://github.com/XRPLF/rippled/pull/4328
|
||||
* Eliminate memory allocation from critical path: by @nbougalis in https://github.com/XRPLF/rippled/pull/4353
|
||||
* Make it easy for projects to depend on libxrpl by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4449
|
||||
* Add the ability to mark amendments as obsolete by @ximinez in https://github.com/XRPLF/rippled/pull/4291
|
||||
* Always create the FeeSettings object in genesis ledger by @ximinez in https://github.com/XRPLF/rippled/pull/4319
|
||||
* Log exception messages in several locations by @drlongle in https://github.com/XRPLF/rippled/pull/4400
|
||||
* Parse flags in account_info method by @drlongle in https://github.com/XRPLF/rippled/pull/4459
|
||||
* Add NFTokenPages to account_objects RPC by @RichardAH in https://github.com/XRPLF/rippled/pull/4352
|
||||
* add jss fields used by clio `nft_info` by @ledhed2222 in https://github.com/XRPLF/rippled/pull/4320
|
||||
* Introduce a slab-based memory allocator and optimize SHAMapItem by @nbougalis in https://github.com/XRPLF/rippled/pull/4218
|
||||
* Add NetworkID field to transactions to help prevent replay attacks on and from side-chains by @RichardAH in https://github.com/XRPLF/rippled/pull/4370
|
||||
* If present, set quorum based on command line. by @mtrippled in https://github.com/XRPLF/rippled/pull/4489
|
||||
* API does not accept seed or public key for account by @drlongle in https://github.com/XRPLF/rippled/pull/4404
|
||||
* Add `nftoken_id`, `nftoken_ids` and `offer_id` meta fields into NFT `Tx` responses by @shawnxie999 in https://github.com/XRPLF/rippled/pull/4447
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* fix(gateway_balances): handle overflow exception by @RichardAH in https://github.com/XRPLF/rippled/pull/4355
|
||||
* fix(ValidatorSite): handle rare null pointer dereference in timeout by @ximinez in https://github.com/XRPLF/rippled/pull/4420
|
||||
* RPC commands understand markers derived from all ledger object types by @ximinez in https://github.com/XRPLF/rippled/pull/4361
|
||||
* `fixNFTokenRemint`: prevent NFT re-mint: by @shawnxie999 in https://github.com/XRPLF/rippled/pull/4406
|
||||
* Fix a case where ripple::Expected returned a json array, not a value by @scottschurr in https://github.com/XRPLF/rippled/pull/4401
|
||||
* fix: Ledger data returns an empty list (instead of null) when all entries are filtered out by @drlongle in https://github.com/XRPLF/rippled/pull/4398
|
||||
* Fix unit test ripple.app.LedgerData by @drlongle in https://github.com/XRPLF/rippled/pull/4484
|
||||
* Fix the fix for std::result_of by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4496
|
||||
* Fix errors for Clang 16 by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4501
|
||||
* Ensure that switchover vars are initialized before use: by @seelabs in https://github.com/XRPLF/rippled/pull/4527
|
||||
* Move faulty assert by @ximinez in https://github.com/XRPLF/rippled/pull/4533
|
||||
* Fix unaligned load and stores: (#4528) by @seelabs in https://github.com/XRPLF/rippled/pull/4531
|
||||
* fix node size estimation by @dangell7 in https://github.com/XRPLF/rippled/pull/4536
|
||||
* fix: remove redundant moves by @ckeshava in https://github.com/XRPLF/rippled/pull/4565
|
||||
|
||||
### Code Cleanup and Testing
|
||||
|
||||
* Replace compare() with the three-way comparison operator in base_uint, Issue and Book by @drlongle in https://github.com/XRPLF/rippled/pull/4411
|
||||
* Rectify the import paths of boost::function_output_iterator by @ckeshava in https://github.com/XRPLF/rippled/pull/4293
|
||||
* Expand Linux test matrix by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4454
|
||||
* Add patched recipe for SOCI by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4510
|
||||
* Switch to self-hosted runners for macOS by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4511
|
||||
* [TRIVIAL] Add missing includes by @seelabs in https://github.com/XRPLF/rippled/pull/4555
|
||||
|
||||
### Docs
|
||||
|
||||
* Refactor build instructions by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4381
|
||||
* Add install instructions for package managers by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4472
|
||||
* Fix typo by @solmsted in https://github.com/XRPLF/rippled/pull/4508
|
||||
* Update environment.md by @sappenin in https://github.com/XRPLF/rippled/pull/4498
|
||||
* Update BUILD.md by @oeggert in https://github.com/XRPLF/rippled/pull/4514
|
||||
* Trivial: add comments for NFToken-related invariants by @scottschurr in https://github.com/XRPLF/rippled/pull/4558
|
||||
|
||||
## New Contributors
|
||||
* @drlongle made their first contribution in https://github.com/XRPLF/rippled/pull/4411
|
||||
* @ckeshava made their first contribution in https://github.com/XRPLF/rippled/pull/4293
|
||||
* @solmsted made their first contribution in https://github.com/XRPLF/rippled/pull/4508
|
||||
* @sappenin made their first contribution in https://github.com/XRPLF/rippled/pull/4498
|
||||
* @oeggert made their first contribution in https://github.com/XRPLF/rippled/pull/4514
|
||||
|
||||
**Full Changelog**: https://github.com/XRPLF/rippled/compare/1.10.1...1.11.0
|
||||
|
||||
|
||||
### GitHub
|
||||
|
||||
The public source code repository for `rippled` is hosted on GitHub at <https://github.com/XRPLF/rippled>.
|
||||
|
||||
We welcome all contributions and invite everyone to join the community of XRP Ledger developers to help build the Internet of Value.
|
||||
|
||||
### Credits
|
||||
|
||||
The following people contributed directly to this release:
|
||||
- Alloy Networks <45832257+alloynetworks@users.noreply.github.com>
|
||||
- Brandon Wilson <brandon@coil.com>
|
||||
- Chenna Keshava B S <21219765+ckeshava@users.noreply.github.com>
|
||||
- David Fuelling <sappenin@gmail.com>
|
||||
- Denis Angell <dangell@transia.co>
|
||||
- Ed Hennis <ed@ripple.com>
|
||||
- Elliot Lee <github.public@intelliot.com>
|
||||
- John Freeman <jfreeman08@gmail.com>
|
||||
- Mark Travis <mtrippled@users.noreply.github.com>
|
||||
- Nik Bougalis <nikb@bougalis.net>
|
||||
- RichardAH <richard.holland@starstone.co.nz>
|
||||
- Scott Determan <scott.determan@yahoo.com>
|
||||
- Scott Schurr <scott@ripple.com>
|
||||
- Shawn Xie <35279399+shawnxie999@users.noreply.github.com>
|
||||
- drlongle <drlongle@gmail.com>
|
||||
- ledhed2222 <ledhed2222@users.noreply.github.com>
|
||||
- oeggert <117319296+oeggert@users.noreply.github.com>
|
||||
- solmsted <steven.olm@gmail.com>
|
||||
|
||||
|
||||
Bug Bounties and Responsible Disclosures:
|
||||
We welcome reviews of the rippled code and urge researchers to
|
||||
responsibly disclose any issues they may find.
|
||||
|
||||
To report a bug, please send a detailed report to:
|
||||
|
||||
bugs@xrpl.org
|
||||
|
||||
|
||||
# Introducing XRP Ledger version 1.10.1
|
||||
|
||||
|
||||
13
SECURITY.md
13
SECURITY.md
@@ -61,13 +61,12 @@ For these complaints or reports, please [contact our support team](mailto:bugs@x
|
||||
|
||||
### The following type of security problems are excluded
|
||||
|
||||
- (D)DOS attacks
|
||||
- Error messages or error pages without sensitive data
|
||||
- Tests & sample data as publicly available in our repositories at Github
|
||||
- Common issues like browser header warnings or DNS configuration, identified by vulnerability scans
|
||||
- Vulnerability scan reports for software we publicly use
|
||||
- Security issues related to outdated OS's, browsers or plugins
|
||||
- Reports for security problems that we have been notified of before
|
||||
1. **In scope**. Only bugs in software under the scope of the program qualify. Currently, that means `xahaud` and `xahau-lib`.
|
||||
2. **Relevant**. A security issue, posing a danger to user funds, privacy or the operation of the Xahau Ledger.
|
||||
3. **Original and previously unknown**. Bugs that are already known and discussed in public do not qualify. Previously reported bugs, even if publicly unknown, are not eligible.
|
||||
4. **Specific**. We welcome general security advice or recommendations, but we cannot pay bounties for that.
|
||||
5. **Fixable**. There has to be something we can do to permanently fix the problem. Note that bugs in other people’s software may still qualify in some cases. For example, if you find a bug in a library that we use which can compromise the security of software that is in scope and we can get it fixed, you may qualify for a bounty.
|
||||
6. **Unused**. If you use the exploit to attack the Xahau Ledger, you do not qualify for a bounty. If you report a vulnerability used in an ongoing or past attack and there is specific, concrete evidence that suggests you are the attacker we reserve the right not to pay a bounty.
|
||||
|
||||
Please note: Reports that are lacking any proof (such as screenshots or other data), detailed information or details on how to reproduce any unexpected result will be investigated but will not be eligible for any reward.
|
||||
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
#!/bin/bash
|
||||
#!/bin/bash -u
|
||||
# We use set -e and bash with -u to bail on first non zero exit code of any
|
||||
# processes launched or upon any unbound variable.
|
||||
# We use set -x to print commands before running them to help with
|
||||
# debugging.
|
||||
set -ex
|
||||
|
||||
set -e
|
||||
|
||||
echo "START INSIDE CONTAINER - CORE"
|
||||
|
||||
@@ -23,12 +30,12 @@ fi
|
||||
perl -i -pe "s/^(\\s*)-DBUILD_SHARED_LIBS=OFF/\\1-DBUILD_SHARED_LIBS=OFF\\n\\1-DROCKSDB_BUILD_SHARED=OFF/g" Builds/CMake/deps/Rocksdb.cmake &&
|
||||
mv Builds/CMake/deps/WasmEdge.cmake Builds/CMake/deps/WasmEdge.old &&
|
||||
echo "find_package(LLVM REQUIRED CONFIG)
|
||||
message(STATUS \"Found LLVM ${LLVM_PACKAGE_VERSION}\")
|
||||
message(STATUS \"Found LLVM \${LLVM_PACKAGE_VERSION}\")
|
||||
message(STATUS \"Using LLVMConfig.cmake in: \${LLVM_DIR}\")
|
||||
add_library (wasmedge STATIC IMPORTED GLOBAL)
|
||||
set_target_properties(wasmedge PROPERTIES IMPORTED_LOCATION \${WasmEdge_LIB})
|
||||
target_link_libraries (ripple_libs INTERFACE wasmedge)
|
||||
add_library (NIH::WasmEdge ALIAS wasmedge)
|
||||
add_library (wasmedge::wasmedge ALIAS wasmedge)
|
||||
message(\"WasmEdge DONE\")
|
||||
" > Builds/CMake/deps/WasmEdge.cmake &&
|
||||
git checkout src/ripple/protocol/impl/BuildInfo.cpp &&
|
||||
|
||||
40
build-full.sh
Executable file → Normal file
40
build-full.sh
Executable file → Normal file
@@ -1,4 +1,11 @@
|
||||
#!/bin/bash
|
||||
#!/bin/bash -u
|
||||
# We use set -e and bash with -u to bail on first non zero exit code of any
|
||||
# processes launched or upon any unbound variable.
|
||||
# We use set -x to print commands before running them to help with
|
||||
# debugging.
|
||||
set -ex
|
||||
|
||||
set -e
|
||||
|
||||
echo "START INSIDE CONTAINER - FULL"
|
||||
|
||||
@@ -9,8 +16,17 @@ echo "-- GITHUB_RUN_NUMBER: $4"
|
||||
|
||||
umask 0000;
|
||||
|
||||
echo "Fixing CentOS 7 EOL"
|
||||
|
||||
sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-*
|
||||
sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-*
|
||||
yum clean all
|
||||
yum-config-manager --disable centos-sclo-sclo
|
||||
|
||||
####
|
||||
|
||||
cd /io;
|
||||
mkdir src/certs;
|
||||
mkdir -p src/certs;
|
||||
curl --silent -k https://raw.githubusercontent.com/RichardAH/rippled-release-builder/main/ca-bundle/certbundle.h -o src/certs/certbundle.h;
|
||||
if [ "`grep certbundle.h src/ripple/net/impl/RegisterSSLCerts.cpp | wc -l`" -eq "0" ]
|
||||
then
|
||||
@@ -57,8 +73,8 @@ then
|
||||
#endif/g" src/ripple/net/impl/RegisterSSLCerts.cpp &&
|
||||
sed -i "s/#include <ripple\/net\/RegisterSSLCerts.h>/\0\n#include <certs\/certbundle.h>/g" src/ripple/net/impl/RegisterSSLCerts.cpp
|
||||
fi
|
||||
mkdir .nih_c;
|
||||
mkdir .nih_toolchain;
|
||||
mkdir -p .nih_c;
|
||||
mkdir -p .nih_toolchain;
|
||||
cd .nih_toolchain &&
|
||||
yum install -y wget lz4 lz4-devel git llvm13-static.x86_64 llvm13-devel.x86_64 devtoolset-10-binutils zlib-static ncurses-static -y \
|
||||
devtoolset-7-gcc-c++ \
|
||||
@@ -81,11 +97,11 @@ echo "-- Install Cmake 3.23.1 --" &&
|
||||
pwd &&
|
||||
( wget -nc -q https://github.com/Kitware/CMake/releases/download/v3.23.1/cmake-3.23.1-linux-x86_64.tar.gz; echo "" ) &&
|
||||
tar -xzf cmake-3.23.1-linux-x86_64.tar.gz -C /hbb/ &&
|
||||
echo "-- Install Boost 1.75.0 --" &&
|
||||
echo "-- Install Boost 1.86.0 --" &&
|
||||
pwd &&
|
||||
( wget -nc -q https://boostorg.jfrog.io/artifactory/main/release/1.75.0/source/boost_1_75_0.tar.gz; echo "" ) &&
|
||||
tar -xzf boost_1_75_0.tar.gz &&
|
||||
cd boost_1_75_0 && ./bootstrap.sh && ./b2 link=static -j$3 && ./b2 install &&
|
||||
( wget -nc -q https://archives.boost.io/release/1.86.0/source/boost_1_86_0.tar.gz; echo "" ) &&
|
||||
tar -xzf boost_1_86_0.tar.gz &&
|
||||
cd boost_1_86_0 && ./bootstrap.sh && ./b2 link=static -j$3 && ./b2 install &&
|
||||
cd ../ &&
|
||||
echo "-- Install Protobuf 3.20.0 --" &&
|
||||
pwd &&
|
||||
@@ -106,7 +122,7 @@ tar -xf libunwind-13.0.1.src.tar.xz &&
|
||||
cp -r libunwind-13.0.1.src/include libunwind-13.0.1.src/src lld-13.0.1.src/ &&
|
||||
cd lld-13.0.1.src &&
|
||||
rm -rf build CMakeCache.txt &&
|
||||
mkdir build &&
|
||||
mkdir -p build &&
|
||||
cd build &&
|
||||
cmake .. -DLLVM_LIBRARY_DIR=/usr/lib64/llvm13/lib/ -DCMAKE_INSTALL_PREFIX=/usr/lib64/llvm13/ -DCMAKE_BUILD_TYPE=Release &&
|
||||
make -j$3 install &&
|
||||
@@ -116,11 +132,11 @@ cd ../../ &&
|
||||
echo "-- Build WasmEdge --" &&
|
||||
( wget -nc -q https://github.com/WasmEdge/WasmEdge/archive/refs/tags/0.11.2.zip; unzip -o 0.11.2.zip; ) &&
|
||||
cd WasmEdge-0.11.2 &&
|
||||
( mkdir build; echo "" ) &&
|
||||
( mkdir -p build; echo "" ) &&
|
||||
cd build &&
|
||||
export BOOST_ROOT="/usr/local/src/boost_1_75_0" &&
|
||||
export BOOST_ROOT="/usr/local/src/boost_1_86_0" &&
|
||||
export Boost_LIBRARY_DIRS="/usr/local/lib" &&
|
||||
export BOOST_INCLUDEDIR="/usr/local/src/boost_1_75_0" &&
|
||||
export BOOST_INCLUDEDIR="/usr/local/src/boost_1_86_0" &&
|
||||
export PATH=`echo $PATH | sed -E "s/devtoolset-7/devtoolset-9/g"` &&
|
||||
cmake .. \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
|
||||
@@ -283,12 +283,14 @@
|
||||
# ssl_cert
|
||||
#
|
||||
# Specifies the path to the SSL certificate file in PEM format.
|
||||
# This is not needed if the chain includes it.
|
||||
# This is not needed if the chain includes it. Use ssl_chain if
|
||||
# your certificate includes one or more intermediates.
|
||||
#
|
||||
# ssl_chain
|
||||
#
|
||||
# If you need a certificate chain, specify the path to the
|
||||
# certificate chain here. The chain may include the end certificate.
|
||||
# This must be used if the certificate includes intermediates.
|
||||
#
|
||||
# ssl_ciphers = <cipherlist>
|
||||
#
|
||||
@@ -387,6 +389,21 @@
|
||||
#
|
||||
#
|
||||
#
|
||||
# [compression]
|
||||
#
|
||||
# true or false
|
||||
#
|
||||
# true - enables compression
|
||||
# false - disables compression [default].
|
||||
#
|
||||
# The rippled server can save bandwidth by compressing its peer-to-peer communications,
|
||||
# at a cost of greater CPU usage. If you enable link compression,
|
||||
# the server automatically compresses communications with peer servers
|
||||
# that also have link compression enabled.
|
||||
# https://xrpl.org/enable-link-compression.html
|
||||
#
|
||||
#
|
||||
#
|
||||
# [ips]
|
||||
#
|
||||
# List of hostnames or ips where the Ripple protocol is served. A default
|
||||
@@ -461,19 +478,6 @@
|
||||
#
|
||||
#
|
||||
#
|
||||
# [sntp_servers]
|
||||
#
|
||||
# IP address or domain of NTP servers to use for time synchronization.
|
||||
#
|
||||
# These NTP servers are suitable for rippled servers located in the United
|
||||
# States:
|
||||
# time.windows.com
|
||||
# time.apple.com
|
||||
# time.nist.gov
|
||||
# pool.ntp.org
|
||||
#
|
||||
#
|
||||
#
|
||||
# [max_transactions]
|
||||
#
|
||||
# Configure the maximum number of transactions to have in the job queue
|
||||
@@ -1056,7 +1060,18 @@
|
||||
# Cassandra is an alternative backend to be used only with Reporting Mode.
|
||||
# See the Reporting Mode section for more details about Reporting Mode.
|
||||
#
|
||||
# Required keys for NuDB and RocksDB:
|
||||
# type = RWDB
|
||||
#
|
||||
# RWDB is a high-performance memory store written by XRPL-Labs and optimized
|
||||
# for xahaud. RWDB is NOT persistent and the data will be lost on restart.
|
||||
# RWDB is recommended for Validator and Peer nodes that are not required to
|
||||
# store history.
|
||||
#
|
||||
# RWDB maintains its high speed regardless of the amount of history
|
||||
# stored. Online delete should NOT be used instead RWDB will use the
|
||||
# ledger_history config value to determine how many ledgers to keep in memory.
|
||||
#
|
||||
# Required keys for NuDB, RWDB and RocksDB:
|
||||
#
|
||||
# path Location to store the database
|
||||
#
|
||||
@@ -1112,7 +1127,8 @@
|
||||
# online_delete Minimum value of 256. Enable automatic purging
|
||||
# of older ledger information. Maintain at least this
|
||||
# number of ledger records online. Must be greater
|
||||
# than or equal to ledger_history.
|
||||
# than or equal to ledger_history. If using RWDB
|
||||
# this value is ignored.
|
||||
#
|
||||
# These keys modify the behavior of online_delete, and thus are only
|
||||
# relevant if online_delete is defined and non-zero:
|
||||
@@ -1637,6 +1653,7 @@ port = 6006
|
||||
ip = 127.0.0.1
|
||||
admin = 127.0.0.1
|
||||
protocol = ws
|
||||
send_queue_limit = 500
|
||||
|
||||
[port_grpc]
|
||||
port = 50051
|
||||
@@ -1647,6 +1664,7 @@ secure_gateway = 127.0.0.1
|
||||
#port = 6005
|
||||
#ip = 127.0.0.1
|
||||
#protocol = wss
|
||||
#send_queue_limit = 500
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
@@ -1702,12 +1720,6 @@ advisory_delete=0
|
||||
[debug_logfile]
|
||||
/var/log/rippled/debug.log
|
||||
|
||||
[sntp_servers]
|
||||
time.windows.com
|
||||
time.apple.com
|
||||
time.nist.gov
|
||||
pool.ntp.org
|
||||
|
||||
# To use the XRP test network
|
||||
# (see https://xrpl.org/connect-your-rippled-to-the-xrp-test-net.html),
|
||||
# use the following [ips] section:
|
||||
|
||||
@@ -450,19 +450,6 @@
|
||||
#
|
||||
#
|
||||
#
|
||||
# [sntp_servers]
|
||||
#
|
||||
# IP address or domain of NTP servers to use for time synchronization.
|
||||
#
|
||||
# These NTP servers are suitable for rippled servers located in the United
|
||||
# States:
|
||||
# time.windows.com
|
||||
# time.apple.com
|
||||
# time.nist.gov
|
||||
# pool.ntp.org
|
||||
#
|
||||
#
|
||||
#
|
||||
# [max_transactions]
|
||||
#
|
||||
# Configure the maximum number of transactions to have in the job queue
|
||||
@@ -1662,12 +1649,6 @@ advisory_delete=0
|
||||
[debug_logfile]
|
||||
/var/log/rippled-reporting/debug.log
|
||||
|
||||
[sntp_servers]
|
||||
time.windows.com
|
||||
time.apple.com
|
||||
time.nist.gov
|
||||
pool.ntp.org
|
||||
|
||||
# To use the XRP test network
|
||||
# (see https://xrpl.org/connect-your-rippled-to-the-xrp-test-net.html),
|
||||
# use the following [ips] section:
|
||||
|
||||
@@ -144,4 +144,12 @@ D686F2538F410C9D0D856788E98E3579595DAF7B38D38887F81ECAC934B06040 HooksUpdate1
|
||||
86E83A7D2ECE3AD5FA87AB2195AE015C950469ABF0B72EAACED318F74886AE90 CryptoConditionsSuite
|
||||
3C43D9A973AA4443EF3FC38E42DD306160FBFFDAB901CD8BAA15D09F2597EB87 NonFungibleTokensV1
|
||||
0285B7E5E08E1A8E4C15636F0591D87F73CB6A7B6452A932AD72BBC8E5D1CBE3 fixNFTokenDirV1
|
||||
36799EA497B1369B170805C078AEFE6188345F9B3E324C21E9CA3FF574E3C3D6 fixNFTokenNegOffer
|
||||
36799EA497B1369B170805C078AEFE6188345F9B3E324C21E9CA3FF574E3C3D6 fixNFTokenNegOffer
|
||||
4C499D17719BB365B69010A436B64FD1A82AAB199FC1CEB06962EBD01059FB09 fixXahauV1
|
||||
215181D23BF5C173314B5FDB9C872C92DE6CC918483727DE037C0C13E7E6EE9D fixXahauV2
|
||||
0D8BF22FF7570D58598D1EF19EBB6E142AD46E59A223FD3816262FBB69345BEA Remit
|
||||
7CA0426E7F411D39BB014E57CD9E08F61DE1750F0D41FCD428D9FB80BB7596B0 ZeroB2M
|
||||
4B8466415FAB32FFA89D9DCBE166A42340115771DF611A7160F8D7439C87ECD8 fixNSDelete
|
||||
EDB4EE4C524E16BDD91D9A529332DED08DCAAA51CC6DC897ACFA1A0ED131C5B6 fix240819
|
||||
8063140E9260799D6716756B891CEC3E7006C4E4F277AB84670663A88F94B9C4 fixPageCap
|
||||
88693F108C3CD8A967F3F4253A32DEF5E35F9406ACD2A11B88B11D90865763A9 fix240911
|
||||
|
||||
162
conanfile.py
Normal file
162
conanfile.py
Normal file
@@ -0,0 +1,162 @@
|
||||
from conan import ConanFile
|
||||
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
|
||||
import re
|
||||
|
||||
class Xrpl(ConanFile):
|
||||
name = 'xrpl'
|
||||
|
||||
license = 'ISC'
|
||||
author = 'John Freeman <jfreeman@ripple.com>'
|
||||
url = 'https://github.com/xrplf/rippled'
|
||||
description = 'The XRP Ledger'
|
||||
settings = 'os', 'compiler', 'build_type', 'arch'
|
||||
options = {
|
||||
'assertions': [True, False],
|
||||
'coverage': [True, False],
|
||||
'fPIC': [True, False],
|
||||
'jemalloc': [True, False],
|
||||
'reporting': [True, False],
|
||||
'rocksdb': [True, False],
|
||||
'shared': [True, False],
|
||||
'static': [True, False],
|
||||
'tests': [True, False],
|
||||
'unity': [True, False],
|
||||
}
|
||||
|
||||
requires = [
|
||||
'boost/1.86.0',
|
||||
'date/3.0.1',
|
||||
'libarchive/3.6.0',
|
||||
'lz4/1.9.3',
|
||||
'grpc/1.50.1',
|
||||
'nudb/2.0.8',
|
||||
'openssl/1.1.1u',
|
||||
'protobuf/3.21.9',
|
||||
'snappy/1.1.10',
|
||||
'soci/4.0.3',
|
||||
'sqlite3/3.42.0',
|
||||
'zlib/1.2.13',
|
||||
'wasmedge/0.11.2',
|
||||
]
|
||||
|
||||
default_options = {
|
||||
'assertions': False,
|
||||
'coverage': False,
|
||||
'fPIC': True,
|
||||
'jemalloc': False,
|
||||
'reporting': False,
|
||||
'rocksdb': True,
|
||||
'shared': False,
|
||||
'static': True,
|
||||
'tests': True,
|
||||
'unity': False,
|
||||
|
||||
'cassandra-cpp-driver:shared': False,
|
||||
'date:header_only': True,
|
||||
'grpc:shared': False,
|
||||
'grpc:secure': True,
|
||||
'libarchive:shared': False,
|
||||
'libarchive:with_acl': False,
|
||||
'libarchive:with_bzip2': False,
|
||||
'libarchive:with_cng': False,
|
||||
'libarchive:with_expat': False,
|
||||
'libarchive:with_iconv': False,
|
||||
'libarchive:with_libxml2': False,
|
||||
'libarchive:with_lz4': True,
|
||||
'libarchive:with_lzma': False,
|
||||
'libarchive:with_lzo': False,
|
||||
'libarchive:with_nettle': False,
|
||||
'libarchive:with_openssl': False,
|
||||
'libarchive:with_pcreposix': False,
|
||||
'libarchive:with_xattr': False,
|
||||
'libarchive:with_zlib': False,
|
||||
'libpq:shared': False,
|
||||
'lz4:shared': False,
|
||||
'openssl:shared': False,
|
||||
'protobuf:shared': False,
|
||||
'protobuf:with_zlib': True,
|
||||
'rocksdb:enable_sse': False,
|
||||
'rocksdb:lite': False,
|
||||
'rocksdb:shared': False,
|
||||
'rocksdb:use_rtti': True,
|
||||
'rocksdb:with_jemalloc': False,
|
||||
'rocksdb:with_lz4': True,
|
||||
'rocksdb:with_snappy': True,
|
||||
'snappy:shared': False,
|
||||
'soci:shared': False,
|
||||
'soci:with_sqlite3': True,
|
||||
'soci:with_boost': True,
|
||||
}
|
||||
|
||||
def set_version(self):
|
||||
path = f'{self.recipe_folder}/src/ripple/protocol/impl/BuildInfo.cpp'
|
||||
regex = r'versionString\s?=\s?\"(.*)\"'
|
||||
with open(path, 'r') as file:
|
||||
matches = (re.search(regex, line) for line in file)
|
||||
match = next(m for m in matches if m)
|
||||
self.version = match.group(1)
|
||||
|
||||
def configure(self):
|
||||
if self.settings.compiler == 'apple-clang':
|
||||
self.options['boost'].visibility = 'global'
|
||||
|
||||
def requirements(self):
|
||||
if self.options.jemalloc:
|
||||
self.requires('jemalloc/5.2.1')
|
||||
if self.options.reporting:
|
||||
self.requires('cassandra-cpp-driver/2.15.3')
|
||||
self.requires('libpq/13.6')
|
||||
if self.options.rocksdb:
|
||||
self.requires('rocksdb/6.27.3')
|
||||
|
||||
exports_sources = (
|
||||
'CMakeLists.txt', 'Builds/*', 'bin/getRippledInfo', 'src/*', 'cfg/*'
|
||||
)
|
||||
|
||||
def layout(self):
|
||||
cmake_layout(self)
|
||||
# Fix this setting to follow the default introduced in Conan 1.48
|
||||
# to align with our build instructions.
|
||||
self.folders.generators = 'build/generators'
|
||||
|
||||
generators = 'CMakeDeps'
|
||||
def generate(self):
|
||||
tc = CMakeToolchain(self)
|
||||
tc.variables['tests'] = self.options.tests
|
||||
tc.variables['assert'] = self.options.assertions
|
||||
tc.variables['coverage'] = self.options.coverage
|
||||
tc.variables['jemalloc'] = self.options.jemalloc
|
||||
tc.variables['reporting'] = self.options.reporting
|
||||
tc.variables['rocksdb'] = self.options.rocksdb
|
||||
tc.variables['BUILD_SHARED_LIBS'] = self.options.shared
|
||||
tc.variables['static'] = self.options.static
|
||||
tc.variables['unity'] = self.options.unity
|
||||
tc.generate()
|
||||
|
||||
def build(self):
|
||||
cmake = CMake(self)
|
||||
cmake.verbose = True
|
||||
cmake.configure()
|
||||
cmake.build()
|
||||
|
||||
def package(self):
|
||||
cmake = CMake(self)
|
||||
cmake.verbose = True
|
||||
cmake.install()
|
||||
|
||||
def package_info(self):
|
||||
libxrpl = self.cpp_info.components['libxrpl']
|
||||
libxrpl.libs = [
|
||||
'libxrpl_core.a',
|
||||
'libed25519.a',
|
||||
'libsecp256k1.a',
|
||||
]
|
||||
# TODO: Fix the protobufs to include each other relative to
|
||||
# `include/`, not `include/ripple/proto/`.
|
||||
libxrpl.includedirs = ['include', 'include/ripple/proto']
|
||||
libxrpl.requires = [
|
||||
'boost::boost',
|
||||
'openssl::crypto',
|
||||
'date::date',
|
||||
'grpc::grpc++',
|
||||
]
|
||||
11
docker-unit-tests.sh
Normal file → Executable file
11
docker-unit-tests.sh
Normal file → Executable file
@@ -1,4 +1,11 @@
|
||||
#!/bin/bash
|
||||
#!/bin/bash -x
|
||||
|
||||
docker run --rm -i -v $(pwd):/io ubuntu sh -c '/io/release-build/xahaud -u'
|
||||
BUILD_CORES=$(echo "scale=0 ; `nproc` / 1.337" | bc)
|
||||
|
||||
if [[ "$GITHUB_REPOSITORY" == "" ]]; then
|
||||
#Default
|
||||
BUILD_CORES=8
|
||||
fi
|
||||
|
||||
echo "Mounting $(pwd)/io in ubuntu and running unit tests"
|
||||
docker run --rm -i -v $(pwd):/io -e BUILD_CORES=$BUILD_CORES ubuntu sh -c '/io/release-build/xahaud --unittest-jobs $BUILD_CORES -u'
|
||||
|
||||
84
docs/build/environment.md
vendored
Normal file
84
docs/build/environment.md
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
Our [build instructions][BUILD.md] assume you have a C++ development
|
||||
environment complete with Git, Python, Conan, CMake, and a C++ compiler.
|
||||
This document exists to help readers set one up on any of the Big Three
|
||||
platforms: Linux, macOS, or Windows.
|
||||
|
||||
[BUILD.md]: ../../BUILD.md
|
||||
|
||||
|
||||
## Linux
|
||||
|
||||
Package ecosystems vary across Linux distributions,
|
||||
so there is no one set of instructions that will work for every Linux user.
|
||||
These instructions are written for Ubuntu 22.04.
|
||||
They are largely copied from the [script][1] used to configure our Docker
|
||||
container for continuous integration.
|
||||
That script handles many more responsibilities.
|
||||
These instructions are just the bare minimum to build one configuration of
|
||||
rippled.
|
||||
You can check that codebase for other Linux distributions and versions.
|
||||
If you cannot find yours there,
|
||||
then we hope that these instructions can at least guide you in the right
|
||||
direction.
|
||||
|
||||
```
|
||||
apt update
|
||||
apt install --yes curl git libssl-dev python3.10-dev python3-pip make g++-11
|
||||
|
||||
curl --location --remote-name \
|
||||
"https://github.com/Kitware/CMake/releases/download/v3.25.1/cmake-3.25.1.tar.gz"
|
||||
tar -xzf cmake-3.25.1.tar.gz
|
||||
rm cmake-3.25.1.tar.gz
|
||||
cd cmake-3.25.1
|
||||
./bootstrap --parallel=$(nproc)
|
||||
make --jobs $(nproc)
|
||||
make install
|
||||
cd ..
|
||||
|
||||
pip3 install 'conan<2'
|
||||
```
|
||||
|
||||
[1]: https://github.com/thejohnfreeman/rippled-docker/blob/master/ubuntu-22.04/install.sh
|
||||
|
||||
|
||||
## macOS
|
||||
|
||||
Open a Terminal and enter the below command to bring up a dialog to install
|
||||
the command line developer tools.
|
||||
Once it is finished, this command should return a version greater than the
|
||||
minimum required (see [BUILD.md][]).
|
||||
|
||||
```
|
||||
clang --version
|
||||
```
|
||||
|
||||
The command line developer tools should include Git too:
|
||||
|
||||
```
|
||||
git --version
|
||||
```
|
||||
|
||||
Install [Homebrew][],
|
||||
use it to install [pyenv][],
|
||||
use it to install Python,
|
||||
and use it to install Conan:
|
||||
|
||||
[Homebrew]: https://brew.sh/
|
||||
[pyenv]: https://github.com/pyenv/pyenv
|
||||
|
||||
```
|
||||
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
||||
brew update
|
||||
brew install xz
|
||||
brew install pyenv
|
||||
pyenv install 3.10-dev
|
||||
pyenv global 3.10-dev
|
||||
eval "$(pyenv init -)"
|
||||
pip install 'conan<2'
|
||||
```
|
||||
|
||||
Install CMake with Homebrew too:
|
||||
|
||||
```
|
||||
brew install cmake
|
||||
```
|
||||
159
docs/build/install.md
vendored
Normal file
159
docs/build/install.md
vendored
Normal file
@@ -0,0 +1,159 @@
|
||||
This document contains instructions for installing rippled.
|
||||
The APT package manager is common on Debian-based Linux distributions like
|
||||
Ubuntu,
|
||||
while the YUM package manager is common on Red Hat-based Linux distributions
|
||||
like CentOS.
|
||||
Installing from source is an option for all platforms,
|
||||
and the only supported option for installing custom builds.
|
||||
|
||||
|
||||
## From source
|
||||
|
||||
From a source build, you can install rippled and libxrpl using CMake's
|
||||
`--install` mode:
|
||||
|
||||
```
|
||||
cmake --install . --prefix /opt/local
|
||||
```
|
||||
|
||||
The default [prefix][1] is typically `/usr/local` on Linux and macOS and
|
||||
`C:/Program Files/rippled` on Windows.
|
||||
|
||||
[1]: https://cmake.org/cmake/help/latest/variable/CMAKE_INSTALL_PREFIX.html
|
||||
|
||||
|
||||
## With the APT package manager
|
||||
|
||||
1. Update repositories:
|
||||
|
||||
sudo apt update -y
|
||||
|
||||
2. Install utilities:
|
||||
|
||||
sudo apt install -y apt-transport-https ca-certificates wget gnupg
|
||||
|
||||
3. Add Ripple's package-signing GPG key to your list of trusted keys:
|
||||
|
||||
sudo mkdir /usr/local/share/keyrings/
|
||||
wget -q -O - "https://repos.ripple.com/repos/api/gpg/key/public" | gpg --dearmor > ripple-key.gpg
|
||||
sudo mv ripple-key.gpg /usr/local/share/keyrings
|
||||
|
||||
|
||||
4. Check the fingerprint of the newly-added key:
|
||||
|
||||
gpg /usr/local/share/keyrings/ripple-key.gpg
|
||||
|
||||
The output should include an entry for Ripple such as the following:
|
||||
|
||||
gpg: WARNING: no command supplied. Trying to guess what you mean ...
|
||||
pub rsa3072 2019-02-14 [SC] [expires: 2026-02-17]
|
||||
C0010EC205B35A3310DC90DE395F97FFCCAFD9A2
|
||||
uid TechOps Team at Ripple <techops+rippled@ripple.com>
|
||||
sub rsa3072 2019-02-14 [E] [expires: 2026-02-17]
|
||||
|
||||
|
||||
In particular, make sure that the fingerprint matches. (In the above example, the fingerprint is on the third line, starting with `C001`.)
|
||||
|
||||
4. Add the appropriate Ripple repository for your operating system version:
|
||||
|
||||
echo "deb [signed-by=/usr/local/share/keyrings/ripple-key.gpg] https://repos.ripple.com/repos/rippled-deb focal stable" | \
|
||||
sudo tee -a /etc/apt/sources.list.d/ripple.list
|
||||
|
||||
The above example is appropriate for **Ubuntu 20.04 Focal Fossa**. For other operating systems, replace the word `focal` with one of the following:
|
||||
|
||||
- `jammy` for **Ubuntu 22.04 Jammy Jellyfish**
|
||||
- `bionic` for **Ubuntu 18.04 Bionic Beaver**
|
||||
- `bullseye` for **Debian 11 Bullseye**
|
||||
- `buster` for **Debian 10 Buster**
|
||||
|
||||
If you want access to development or pre-release versions of `rippled`, use one of the following instead of `stable`:
|
||||
|
||||
- `unstable` - Pre-release builds ([`release` branch](https://github.com/ripple/rippled/tree/release))
|
||||
- `nightly` - Experimental/development builds ([`develop` branch](https://github.com/ripple/rippled/tree/develop))
|
||||
|
||||
**Warning:** Unstable and nightly builds may be broken at any time. Do not use these builds for production servers.
|
||||
|
||||
5. Fetch the Ripple repository.
|
||||
|
||||
sudo apt -y update
|
||||
|
||||
6. Install the `rippled` software package:
|
||||
|
||||
sudo apt -y install rippled
|
||||
|
||||
7. Check the status of the `rippled` service:
|
||||
|
||||
systemctl status rippled.service
|
||||
|
||||
The `rippled` service should start automatically. If not, you can start it manually:
|
||||
|
||||
sudo systemctl start rippled.service
|
||||
|
||||
8. Optional: allow `rippled` to bind to privileged ports.
|
||||
|
||||
This allows you to serve incoming API requests on port 80 or 443. (If you want to do so, you must also update the config file's port settings.)
|
||||
|
||||
sudo setcap 'cap_net_bind_service=+ep' /opt/ripple/bin/rippled
|
||||
|
||||
|
||||
## With the YUM package manager
|
||||
|
||||
1. Install the Ripple RPM repository:
|
||||
|
||||
Choose the appropriate RPM repository for the stability of releases you want:
|
||||
|
||||
- `stable` for the latest production release (`master` branch)
|
||||
- `unstable` for pre-release builds (`release` branch)
|
||||
- `nightly` for experimental/development builds (`develop` branch)
|
||||
|
||||
*Stable*
|
||||
|
||||
cat << REPOFILE | sudo tee /etc/yum.repos.d/ripple.repo
|
||||
[ripple-stable]
|
||||
name=XRP Ledger Packages
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
repo_gpgcheck=1
|
||||
baseurl=https://repos.ripple.com/repos/rippled-rpm/stable/
|
||||
gpgkey=https://repos.ripple.com/repos/rippled-rpm/stable/repodata/repomd.xml.key
|
||||
REPOFILE
|
||||
|
||||
*Unstable*
|
||||
|
||||
cat << REPOFILE | sudo tee /etc/yum.repos.d/ripple.repo
|
||||
[ripple-unstable]
|
||||
name=XRP Ledger Packages
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
repo_gpgcheck=1
|
||||
baseurl=https://repos.ripple.com/repos/rippled-rpm/unstable/
|
||||
gpgkey=https://repos.ripple.com/repos/rippled-rpm/unstable/repodata/repomd.xml.key
|
||||
REPOFILE
|
||||
|
||||
*Nightly*
|
||||
|
||||
cat << REPOFILE | sudo tee /etc/yum.repos.d/ripple.repo
|
||||
[ripple-nightly]
|
||||
name=XRP Ledger Packages
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
repo_gpgcheck=1
|
||||
baseurl=https://repos.ripple.com/repos/rippled-rpm/nightly/
|
||||
gpgkey=https://repos.ripple.com/repos/rippled-rpm/nightly/repodata/repomd.xml.key
|
||||
REPOFILE
|
||||
|
||||
2. Fetch the latest repo updates:
|
||||
|
||||
sudo yum -y update
|
||||
|
||||
3. Install the new `rippled` package:
|
||||
|
||||
sudo yum install -y rippled
|
||||
|
||||
4. Configure the `rippled` service to start on boot:
|
||||
|
||||
sudo systemctl enable rippled.service
|
||||
|
||||
5. Start the `rippled` service:
|
||||
|
||||
sudo systemctl start rippled.service
|
||||
193
external/rocksdb/conanfile.py
vendored
Normal file
193
external/rocksdb/conanfile.py
vendored
Normal file
@@ -0,0 +1,193 @@
|
||||
import os
|
||||
import shutil
|
||||
from conans import ConanFile, CMake
|
||||
from conan.tools import microsoft as ms
|
||||
|
||||
class RocksDB(ConanFile):
|
||||
name = 'rocksdb'
|
||||
version = '6.27.3'
|
||||
|
||||
license = ('GPL-2.0-only', 'Apache-2.0')
|
||||
url = 'https://github.com/conan-io/conan-center-index'
|
||||
description = 'A library that provides an embeddable, persistent key-value store for fast storage'
|
||||
topics = ('rocksdb', 'database', 'leveldb', 'facebook', 'key-value')
|
||||
|
||||
settings = 'os', 'compiler', 'build_type', 'arch'
|
||||
options = {
|
||||
'enable_sse': [False, 'sse42', 'avx2'],
|
||||
'fPIC': [True, False],
|
||||
'lite': [True, False],
|
||||
'shared': [True, False],
|
||||
'use_rtti': [True, False],
|
||||
'with_gflags': [True, False],
|
||||
'with_jemalloc': [True, False],
|
||||
'with_lz4': [True, False],
|
||||
'with_snappy': [True, False],
|
||||
'with_tbb': [True, False],
|
||||
'with_zlib': [True, False],
|
||||
'with_zstd': [True, False],
|
||||
}
|
||||
default_options = {
|
||||
'enable_sse': False,
|
||||
'fPIC': True,
|
||||
'lite': False,
|
||||
'shared': False,
|
||||
'use_rtti': False,
|
||||
'with_gflags': False,
|
||||
'with_jemalloc': False,
|
||||
'with_lz4': False,
|
||||
'with_snappy': False,
|
||||
'with_tbb': False,
|
||||
'with_zlib': False,
|
||||
'with_zstd': False,
|
||||
}
|
||||
|
||||
def requirements(self):
|
||||
if self.options.with_gflags:
|
||||
self.requires('gflags/2.2.2')
|
||||
if self.options.with_jemalloc:
|
||||
self.requires('jemalloc/5.2.1')
|
||||
if self.options.with_lz4:
|
||||
self.requires('lz4/1.9.3')
|
||||
if self.options.with_snappy:
|
||||
self.requires('snappy/1.1.9')
|
||||
if self.options.with_tbb:
|
||||
self.requires('onetbb/2020.3')
|
||||
if self.options.with_zlib:
|
||||
self.requires('zlib/1.2.11')
|
||||
if self.options.with_zstd:
|
||||
self.requires('zstd/1.5.2')
|
||||
|
||||
def config_options(self):
|
||||
if self.settings.os == 'Windows':
|
||||
del self.options.fPIC
|
||||
|
||||
def configure(self):
|
||||
if self.options.shared:
|
||||
del self.options.fPIC
|
||||
|
||||
generators = 'cmake', 'cmake_find_package'
|
||||
|
||||
scm = {
|
||||
'type': 'git',
|
||||
'url': 'https://github.com/facebook/rocksdb.git',
|
||||
'revision': 'v6.27.3',
|
||||
}
|
||||
|
||||
exports_sources = 'thirdparty.inc'
|
||||
# For out-of-source build.
|
||||
no_copy_source = True
|
||||
|
||||
_cmake = None
|
||||
|
||||
def _configure_cmake(self):
|
||||
if self._cmake:
|
||||
return
|
||||
|
||||
self._cmake = CMake(self)
|
||||
|
||||
self._cmake.definitions['CMAKE_POSITION_INDEPENDENT_CODE'] = True
|
||||
|
||||
self._cmake.definitions['DISABLE_STALL_NOTIF'] = False
|
||||
self._cmake.definitions['FAIL_ON_WARNINGS'] = False
|
||||
self._cmake.definitions['OPTDBG'] = True
|
||||
self._cmake.definitions['WITH_TESTS'] = False
|
||||
self._cmake.definitions['WITH_TOOLS'] = False
|
||||
|
||||
self._cmake.definitions['WITH_GFLAGS'] = self.options.with_gflags
|
||||
self._cmake.definitions['WITH_JEMALLOC'] = self.options.with_jemalloc
|
||||
self._cmake.definitions['WITH_LZ4'] = self.options.with_lz4
|
||||
self._cmake.definitions['WITH_SNAPPY'] = self.options.with_snappy
|
||||
self._cmake.definitions['WITH_TBB'] = self.options.with_tbb
|
||||
self._cmake.definitions['WITH_ZLIB'] = self.options.with_zlib
|
||||
self._cmake.definitions['WITH_ZSTD'] = self.options.with_zstd
|
||||
|
||||
self._cmake.definitions['USE_RTTI'] = self.options.use_rtti
|
||||
self._cmake.definitions['ROCKSDB_LITE'] = self.options.lite
|
||||
self._cmake.definitions['ROCKSDB_INSTALL_ON_WINDOWS'] = (
|
||||
self.settings.os == 'Windows'
|
||||
)
|
||||
|
||||
if not self.options.enable_sse:
|
||||
self._cmake.definitions['PORTABLE'] = True
|
||||
self._cmake.definitions['FORCE_SSE42'] = False
|
||||
elif self.options.enable_sse == 'sse42':
|
||||
self._cmake.definitions['PORTABLE'] = True
|
||||
self._cmake.definitions['FORCE_SSE42'] = True
|
||||
elif self.options.enable_sse == 'avx2':
|
||||
self._cmake.definitions['PORTABLE'] = False
|
||||
self._cmake.definitions['FORCE_SSE42'] = False
|
||||
|
||||
self._cmake.definitions['WITH_ASAN'] = False
|
||||
self._cmake.definitions['WITH_BZ2'] = False
|
||||
self._cmake.definitions['WITH_JNI'] = False
|
||||
self._cmake.definitions['WITH_LIBRADOS'] = False
|
||||
if ms.is_msvc(self):
|
||||
self._cmake.definitions['WITH_MD_LIBRARY'] = (
|
||||
ms.msvc_runtime_flag(self).startswith('MD')
|
||||
)
|
||||
self._cmake.definitions['WITH_RUNTIME_DEBUG'] = (
|
||||
ms.msvc_runtime_flag(self).endswith('d')
|
||||
)
|
||||
self._cmake.definitions['WITH_NUMA'] = False
|
||||
self._cmake.definitions['WITH_TSAN'] = False
|
||||
self._cmake.definitions['WITH_UBSAN'] = False
|
||||
self._cmake.definitions['WITH_WINDOWS_UTF8_FILENAMES'] = False
|
||||
self._cmake.definitions['WITH_XPRESS'] = False
|
||||
self._cmake.definitions['WITH_FALLOCATE'] = True
|
||||
|
||||
|
||||
def build(self):
|
||||
if ms.is_msvc(self):
|
||||
file = os.path.join(
|
||||
self.recipe_folder, '..', 'export_source', 'thirdparty.inc'
|
||||
)
|
||||
shutil.copy(file, self.build_folder)
|
||||
self._configure_cmake()
|
||||
self._cmake.configure()
|
||||
self._cmake.build()
|
||||
|
||||
def package(self):
|
||||
self._configure_cmake()
|
||||
self._cmake.install()
|
||||
|
||||
def package_info(self):
|
||||
self.cpp_info.filenames['cmake_find_package'] = 'RocksDB'
|
||||
self.cpp_info.filenames['cmake_find_package_multi'] = 'RocksDB'
|
||||
self.cpp_info.set_property('cmake_file_name', 'RocksDB')
|
||||
|
||||
self.cpp_info.names['cmake_find_package'] = 'RocksDB'
|
||||
self.cpp_info.names['cmake_find_package_multi'] = 'RocksDB'
|
||||
|
||||
self.cpp_info.components['librocksdb'].names['cmake_find_package'] = 'rocksdb'
|
||||
self.cpp_info.components['librocksdb'].names['cmake_find_package_multi'] = 'rocksdb'
|
||||
self.cpp_info.components['librocksdb'].set_property(
|
||||
'cmake_target_name', 'RocksDB::rocksdb'
|
||||
)
|
||||
|
||||
self.cpp_info.components['librocksdb'].libs = ['rocksdb']
|
||||
|
||||
if self.settings.os == "Windows":
|
||||
self.cpp_info.components["librocksdb"].system_libs = ["shlwapi", "rpcrt4"]
|
||||
if self.options.shared:
|
||||
self.cpp_info.components["librocksdb"].defines = ["ROCKSDB_DLL"]
|
||||
elif self.settings.os in ["Linux", "FreeBSD"]:
|
||||
self.cpp_info.components["librocksdb"].system_libs = ["pthread", "m"]
|
||||
|
||||
if self.options.lite:
|
||||
self.cpp_info.components["librocksdb"].defines.append("ROCKSDB_LITE")
|
||||
|
||||
if self.options.with_gflags:
|
||||
self.cpp_info.components["librocksdb"].requires.append("gflags::gflags")
|
||||
if self.options.with_jemalloc:
|
||||
self.cpp_info.components["librocksdb"].requires.append("jemalloc::jemalloc")
|
||||
if self.options.with_lz4:
|
||||
self.cpp_info.components["librocksdb"].requires.append("lz4::lz4")
|
||||
if self.options.with_snappy:
|
||||
self.cpp_info.components["librocksdb"].requires.append("snappy::snappy")
|
||||
if self.options.with_tbb:
|
||||
self.cpp_info.components["librocksdb"].requires.append("onetbb::onetbb")
|
||||
if self.options.with_zlib:
|
||||
self.cpp_info.components["librocksdb"].requires.append("zlib::zlib")
|
||||
if self.options.with_zstd:
|
||||
self.cpp_info.components["librocksdb"].requires.append("zstd::zstd")
|
||||
62
external/rocksdb/thirdparty.inc
vendored
Normal file
62
external/rocksdb/thirdparty.inc
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
if(WITH_GFLAGS)
|
||||
# Config with namespace available since gflags 2.2.2
|
||||
find_package(gflags REQUIRED)
|
||||
set(GFLAGS_LIB gflags::gflags)
|
||||
list(APPEND THIRDPARTY_LIBS ${GFLAGS_LIB})
|
||||
add_definitions(-DGFLAGS=1)
|
||||
endif()
|
||||
|
||||
if(WITH_SNAPPY)
|
||||
find_package(Snappy REQUIRED)
|
||||
add_definitions(-DSNAPPY)
|
||||
list(APPEND THIRDPARTY_LIBS Snappy::snappy)
|
||||
endif()
|
||||
|
||||
if(WITH_LZ4)
|
||||
find_package(lz4 REQUIRED)
|
||||
add_definitions(-DLZ4)
|
||||
list(APPEND THIRDPARTY_LIBS lz4::lz4)
|
||||
endif()
|
||||
|
||||
if(WITH_ZLIB)
|
||||
find_package(ZLIB REQUIRED)
|
||||
add_definitions(-DZLIB)
|
||||
list(APPEND THIRDPARTY_LIBS ZLIB::ZLIB)
|
||||
endif()
|
||||
|
||||
option(WITH_BZ2 "build with bzip2" OFF)
|
||||
if(WITH_BZ2)
|
||||
find_package(BZip2 REQUIRED)
|
||||
add_definitions(-DBZIP2)
|
||||
list(APPEND THIRDPARTY_LIBS BZip2::BZip2)
|
||||
endif()
|
||||
|
||||
if(WITH_ZSTD)
|
||||
find_package(zstd REQUIRED)
|
||||
add_definitions(-DZSTD)
|
||||
list(APPEND THIRDPARTY_LIBS zstd::zstd)
|
||||
endif()
|
||||
|
||||
# ================================================== XPRESS ==================================================
|
||||
# This makes use of built-in Windows API, no additional includes, links to a system lib
|
||||
|
||||
if(WITH_XPRESS)
|
||||
message(STATUS "XPRESS is enabled")
|
||||
add_definitions(-DXPRESS)
|
||||
# We are using the implementation provided by the system
|
||||
list(APPEND SYSTEM_LIBS Cabinet.lib)
|
||||
else()
|
||||
message(STATUS "XPRESS is disabled")
|
||||
endif()
|
||||
|
||||
# ================================================== JEMALLOC ==================================================
|
||||
if(WITH_JEMALLOC)
|
||||
message(STATUS "JEMALLOC library is enabled")
|
||||
add_definitions(-DROCKSDB_JEMALLOC -DJEMALLOC_EXPORT= -DJEMALLOC_NO_RENAME)
|
||||
list(APPEND THIRDPARTY_LIBS jemalloc::jemalloc)
|
||||
set(ARTIFACT_SUFFIX "_je")
|
||||
|
||||
else ()
|
||||
set(ARTIFACT_SUFFIX "")
|
||||
message(STATUS "JEMALLOC library is disabled")
|
||||
endif ()
|
||||
40
external/snappy/conandata.yml
vendored
Normal file
40
external/snappy/conandata.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
sources:
|
||||
"1.1.10":
|
||||
url: "https://github.com/google/snappy/archive/1.1.10.tar.gz"
|
||||
sha256: "49d831bffcc5f3d01482340fe5af59852ca2fe76c3e05df0e67203ebbe0f1d90"
|
||||
"1.1.9":
|
||||
url: "https://github.com/google/snappy/archive/1.1.9.tar.gz"
|
||||
sha256: "75c1fbb3d618dd3a0483bff0e26d0a92b495bbe5059c8b4f1c962b478b6e06e7"
|
||||
"1.1.8":
|
||||
url: "https://github.com/google/snappy/archive/1.1.8.tar.gz"
|
||||
sha256: "16b677f07832a612b0836178db7f374e414f94657c138e6993cbfc5dcc58651f"
|
||||
"1.1.7":
|
||||
url: "https://github.com/google/snappy/archive/1.1.7.tar.gz"
|
||||
sha256: "3dfa02e873ff51a11ee02b9ca391807f0c8ea0529a4924afa645fbf97163f9d4"
|
||||
patches:
|
||||
"1.1.10":
|
||||
- patch_file: "patches/1.1.10-0001-fix-inlining-failure.patch"
|
||||
patch_description: "disable inlining for compilation error"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.9-0002-no-Werror.patch"
|
||||
patch_description: "disable 'warning as error' options"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.10-0003-fix-clobber-list-older-llvm.patch"
|
||||
patch_description: "disable inline asm on apple-clang"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.9-0004-rtti-by-default.patch"
|
||||
patch_description: "remove 'disable rtti'"
|
||||
patch_type: "conan"
|
||||
"1.1.9":
|
||||
- patch_file: "patches/1.1.9-0001-fix-inlining-failure.patch"
|
||||
patch_description: "disable inlining for compilation error"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.9-0002-no-Werror.patch"
|
||||
patch_description: "disable 'warning as error' options"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.9-0003-fix-clobber-list-older-llvm.patch"
|
||||
patch_description: "disable inline asm on apple-clang"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.9-0004-rtti-by-default.patch"
|
||||
patch_description: "remove 'disable rtti'"
|
||||
patch_type: "conan"
|
||||
89
external/snappy/conanfile.py
vendored
Normal file
89
external/snappy/conanfile.py
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
from conan import ConanFile
|
||||
from conan.tools.build import check_min_cppstd
|
||||
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
|
||||
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, rmdir
|
||||
from conan.tools.scm import Version
|
||||
import os
|
||||
|
||||
required_conan_version = ">=1.54.0"
|
||||
|
||||
|
||||
class SnappyConan(ConanFile):
|
||||
name = "snappy"
|
||||
description = "A fast compressor/decompressor"
|
||||
topics = ("google", "compressor", "decompressor")
|
||||
url = "https://github.com/conan-io/conan-center-index"
|
||||
homepage = "https://github.com/google/snappy"
|
||||
license = "BSD-3-Clause"
|
||||
|
||||
package_type = "library"
|
||||
settings = "os", "arch", "compiler", "build_type"
|
||||
options = {
|
||||
"shared": [True, False],
|
||||
"fPIC": [True, False],
|
||||
}
|
||||
default_options = {
|
||||
"shared": False,
|
||||
"fPIC": True,
|
||||
}
|
||||
|
||||
def export_sources(self):
|
||||
export_conandata_patches(self)
|
||||
|
||||
def config_options(self):
|
||||
if self.settings.os == 'Windows':
|
||||
del self.options.fPIC
|
||||
|
||||
def configure(self):
|
||||
if self.options.shared:
|
||||
self.options.rm_safe("fPIC")
|
||||
|
||||
def layout(self):
|
||||
cmake_layout(self, src_folder="src")
|
||||
|
||||
def validate(self):
|
||||
if self.settings.compiler.get_safe("cppstd"):
|
||||
check_min_cppstd(self, 11)
|
||||
|
||||
def source(self):
|
||||
get(self, **self.conan_data["sources"][self.version], strip_root=True)
|
||||
|
||||
def generate(self):
|
||||
tc = CMakeToolchain(self)
|
||||
tc.variables["SNAPPY_BUILD_TESTS"] = False
|
||||
if Version(self.version) >= "1.1.8":
|
||||
tc.variables["SNAPPY_FUZZING_BUILD"] = False
|
||||
tc.variables["SNAPPY_REQUIRE_AVX"] = False
|
||||
tc.variables["SNAPPY_REQUIRE_AVX2"] = False
|
||||
tc.variables["SNAPPY_INSTALL"] = True
|
||||
if Version(self.version) >= "1.1.9":
|
||||
tc.variables["SNAPPY_BUILD_BENCHMARKS"] = False
|
||||
tc.generate()
|
||||
|
||||
def build(self):
|
||||
apply_conandata_patches(self)
|
||||
cmake = CMake(self)
|
||||
cmake.configure()
|
||||
cmake.build()
|
||||
|
||||
def package(self):
|
||||
copy(self, "COPYING", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
|
||||
cmake = CMake(self)
|
||||
cmake.install()
|
||||
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
|
||||
|
||||
def package_info(self):
|
||||
self.cpp_info.set_property("cmake_file_name", "Snappy")
|
||||
self.cpp_info.set_property("cmake_target_name", "Snappy::snappy")
|
||||
# TODO: back to global scope in conan v2 once cmake_find_package* generators removed
|
||||
self.cpp_info.components["snappylib"].libs = ["snappy"]
|
||||
if not self.options.shared:
|
||||
if self.settings.os in ["Linux", "FreeBSD"]:
|
||||
self.cpp_info.components["snappylib"].system_libs.append("m")
|
||||
|
||||
# TODO: to remove in conan v2 once cmake_find_package* generators removed
|
||||
self.cpp_info.names["cmake_find_package"] = "Snappy"
|
||||
self.cpp_info.names["cmake_find_package_multi"] = "Snappy"
|
||||
self.cpp_info.components["snappylib"].names["cmake_find_package"] = "snappy"
|
||||
self.cpp_info.components["snappylib"].names["cmake_find_package_multi"] = "snappy"
|
||||
self.cpp_info.components["snappylib"].set_property("cmake_target_name", "Snappy::snappy")
|
||||
13
external/snappy/patches/1.1.10-0001-fix-inlining-failure.patch
vendored
Normal file
13
external/snappy/patches/1.1.10-0001-fix-inlining-failure.patch
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
diff --git a/snappy-stubs-internal.h b/snappy-stubs-internal.h
|
||||
index 1548ed7..3b4a9f3 100644
|
||||
--- a/snappy-stubs-internal.h
|
||||
+++ b/snappy-stubs-internal.h
|
||||
@@ -100,7 +100,7 @@
|
||||
|
||||
// Inlining hints.
|
||||
#if HAVE_ATTRIBUTE_ALWAYS_INLINE
|
||||
-#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline))
|
||||
+#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
||||
#else
|
||||
#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
||||
#endif // HAVE_ATTRIBUTE_ALWAYS_INLINE
|
||||
13
external/snappy/patches/1.1.10-0003-fix-clobber-list-older-llvm.patch
vendored
Normal file
13
external/snappy/patches/1.1.10-0003-fix-clobber-list-older-llvm.patch
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
diff --git a/snappy.cc b/snappy.cc
|
||||
index d414718..e4efb59 100644
|
||||
--- a/snappy.cc
|
||||
+++ b/snappy.cc
|
||||
@@ -1132,7 +1132,7 @@ inline size_t AdvanceToNextTagX86Optimized(const uint8_t** ip_p, size_t* tag) {
|
||||
size_t literal_len = *tag >> 2;
|
||||
size_t tag_type = *tag;
|
||||
bool is_literal;
|
||||
-#if defined(__GCC_ASM_FLAG_OUTPUTS__) && defined(__x86_64__)
|
||||
+#if defined(__GCC_ASM_FLAG_OUTPUTS__) && defined(__x86_64__) && ( (!defined(__clang__) && !defined(__APPLE__)) || (!defined(__APPLE__) && defined(__clang__) && (__clang_major__ >= 9)) || (defined(__APPLE__) && defined(__clang__) && (__clang_major__ > 11)) )
|
||||
// TODO clang misses the fact that the (c & 3) already correctly
|
||||
// sets the zero flag.
|
||||
asm("and $3, %k[tag_type]\n\t"
|
||||
14
external/snappy/patches/1.1.9-0001-fix-inlining-failure.patch
vendored
Normal file
14
external/snappy/patches/1.1.9-0001-fix-inlining-failure.patch
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
Fixes the following error:
|
||||
error: inlining failed in call to ‘always_inline’ ‘size_t snappy::AdvanceToNextTag(const uint8_t**, size_t*)’: function body can be overwritten at link time
|
||||
|
||||
--- snappy-stubs-internal.h
|
||||
+++ snappy-stubs-internal.h
|
||||
@@ -100,7 +100,7 @@
|
||||
|
||||
// Inlining hints.
|
||||
#ifdef HAVE_ATTRIBUTE_ALWAYS_INLINE
|
||||
-#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline))
|
||||
+#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
||||
#else
|
||||
#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
||||
#endif
|
||||
12
external/snappy/patches/1.1.9-0002-no-Werror.patch
vendored
Normal file
12
external/snappy/patches/1.1.9-0002-no-Werror.patch
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
--- CMakeLists.txt
|
||||
+++ CMakeLists.txt
|
||||
@@ -69,7 +69,7 @@
|
||||
- # Use -Werror for clang only.
|
||||
+if(0)
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
if(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror")
|
||||
endif(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
|
||||
endif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
-
|
||||
+endif()
|
||||
12
external/snappy/patches/1.1.9-0003-fix-clobber-list-older-llvm.patch
vendored
Normal file
12
external/snappy/patches/1.1.9-0003-fix-clobber-list-older-llvm.patch
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
asm clobbers do not work for clang < 9 and apple-clang < 11 (found by SpaceIm)
|
||||
--- snappy.cc
|
||||
+++ snappy.cc
|
||||
@@ -1026,7 +1026,7 @@
|
||||
size_t literal_len = *tag >> 2;
|
||||
size_t tag_type = *tag;
|
||||
bool is_literal;
|
||||
-#if defined(__GNUC__) && defined(__x86_64__)
|
||||
+#if defined(__GNUC__) && defined(__x86_64__) && ( (!defined(__clang__) && !defined(__APPLE__)) || (!defined(__APPLE__) && defined(__clang__) && (__clang_major__ >= 9)) || (defined(__APPLE__) && defined(__clang__) && (__clang_major__ > 11)) )
|
||||
// TODO clang misses the fact that the (c & 3) already correctly
|
||||
// sets the zero flag.
|
||||
asm("and $3, %k[tag_type]\n\t"
|
||||
20
external/snappy/patches/1.1.9-0004-rtti-by-default.patch
vendored
Normal file
20
external/snappy/patches/1.1.9-0004-rtti-by-default.patch
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
--- a/CMakeLists.txt
|
||||
+++ b/CMakeLists.txt
|
||||
@@ -53,8 +53,6 @@ if(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
add_definitions(-D_HAS_EXCEPTIONS=0)
|
||||
|
||||
# Disable RTTI.
|
||||
- string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
|
||||
else(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
# Use -Wall for clang and gcc.
|
||||
if(NOT CMAKE_CXX_FLAGS MATCHES "-Wall")
|
||||
@@ -78,8 +76,6 @@ endif()
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions")
|
||||
|
||||
# Disable RTTI.
|
||||
- string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
|
||||
endif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
|
||||
# BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make
|
||||
12
external/soci/conandata.yml
vendored
Normal file
12
external/soci/conandata.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
sources:
|
||||
"4.0.3":
|
||||
url: "https://github.com/SOCI/soci/archive/v4.0.3.tar.gz"
|
||||
sha256: "4b1ff9c8545c5d802fbe06ee6cd2886630e5c03bf740e269bb625b45cf934928"
|
||||
patches:
|
||||
"4.0.3":
|
||||
- patch_file: "patches/0001-Remove-hardcoded-INSTALL_NAME_DIR-for-relocatable-li.patch"
|
||||
patch_description: "Generate relocatable libraries on MacOS"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/0002-Fix-soci_backend.patch"
|
||||
patch_description: "Fix variable names for dependencies"
|
||||
patch_type: "conan"
|
||||
212
external/soci/conanfile.py
vendored
Normal file
212
external/soci/conanfile.py
vendored
Normal file
@@ -0,0 +1,212 @@
|
||||
from conan import ConanFile
|
||||
from conan.tools.build import check_min_cppstd
|
||||
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
|
||||
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, rmdir
|
||||
from conan.tools.microsoft import is_msvc
|
||||
from conan.tools.scm import Version
|
||||
from conan.errors import ConanInvalidConfiguration
|
||||
import os
|
||||
|
||||
required_conan_version = ">=1.55.0"
|
||||
|
||||
|
||||
class SociConan(ConanFile):
|
||||
name = "soci"
|
||||
homepage = "https://github.com/SOCI/soci"
|
||||
url = "https://github.com/conan-io/conan-center-index"
|
||||
description = "The C++ Database Access Library "
|
||||
topics = ("mysql", "odbc", "postgresql", "sqlite3")
|
||||
license = "BSL-1.0"
|
||||
|
||||
settings = "os", "arch", "compiler", "build_type"
|
||||
options = {
|
||||
"shared": [True, False],
|
||||
"fPIC": [True, False],
|
||||
"empty": [True, False],
|
||||
"with_sqlite3": [True, False],
|
||||
"with_db2": [True, False],
|
||||
"with_odbc": [True, False],
|
||||
"with_oracle": [True, False],
|
||||
"with_firebird": [True, False],
|
||||
"with_mysql": [True, False],
|
||||
"with_postgresql": [True, False],
|
||||
"with_boost": [True, False],
|
||||
}
|
||||
default_options = {
|
||||
"shared": False,
|
||||
"fPIC": True,
|
||||
"empty": False,
|
||||
"with_sqlite3": False,
|
||||
"with_db2": False,
|
||||
"with_odbc": False,
|
||||
"with_oracle": False,
|
||||
"with_firebird": False,
|
||||
"with_mysql": False,
|
||||
"with_postgresql": False,
|
||||
"with_boost": False,
|
||||
}
|
||||
|
||||
def export_sources(self):
|
||||
export_conandata_patches(self)
|
||||
|
||||
def layout(self):
|
||||
cmake_layout(self, src_folder="src")
|
||||
|
||||
def config_options(self):
|
||||
if self.settings.os == "Windows":
|
||||
self.options.rm_safe("fPIC")
|
||||
|
||||
def configure(self):
|
||||
if self.options.shared:
|
||||
self.options.rm_safe("fPIC")
|
||||
|
||||
def requirements(self):
|
||||
if self.options.with_sqlite3:
|
||||
self.requires("sqlite3/3.41.1")
|
||||
if self.options.with_odbc and self.settings.os != "Windows":
|
||||
self.requires("odbc/2.3.11")
|
||||
if self.options.with_mysql:
|
||||
self.requires("libmysqlclient/8.0.31")
|
||||
if self.options.with_postgresql:
|
||||
self.requires("libpq/14.7")
|
||||
if self.options.with_boost:
|
||||
self.requires("boost/1.81.0")
|
||||
|
||||
@property
|
||||
def _minimum_compilers_version(self):
|
||||
return {
|
||||
"Visual Studio": "14",
|
||||
"gcc": "4.8",
|
||||
"clang": "3.8",
|
||||
"apple-clang": "8.0"
|
||||
}
|
||||
|
||||
def validate(self):
|
||||
if self.settings.compiler.get_safe("cppstd"):
|
||||
check_min_cppstd(self, 11)
|
||||
|
||||
compiler = str(self.settings.compiler)
|
||||
compiler_version = Version(self.settings.compiler.version.value)
|
||||
if compiler not in self._minimum_compilers_version:
|
||||
self.output.warning("{} recipe lacks information about the {} compiler support.".format(self.name, self.settings.compiler))
|
||||
elif compiler_version < self._minimum_compilers_version[compiler]:
|
||||
raise ConanInvalidConfiguration("{} requires a {} version >= {}".format(self.name, compiler, compiler_version))
|
||||
|
||||
prefix = "Dependencies for"
|
||||
message = "not configured in this conan package."
|
||||
if self.options.with_db2:
|
||||
# self.requires("db2/0.0.0") # TODO add support for db2
|
||||
raise ConanInvalidConfiguration("{} DB2 {} ".format(prefix, message))
|
||||
if self.options.with_oracle:
|
||||
# self.requires("oracle_db/0.0.0") # TODO add support for oracle
|
||||
raise ConanInvalidConfiguration("{} ORACLE {} ".format(prefix, message))
|
||||
if self.options.with_firebird:
|
||||
# self.requires("firebird/0.0.0") # TODO add support for firebird
|
||||
raise ConanInvalidConfiguration("{} firebird {} ".format(prefix, message))
|
||||
|
||||
def source(self):
|
||||
get(self, **self.conan_data["sources"][self.version], strip_root=True)
|
||||
|
||||
def generate(self):
|
||||
tc = CMakeToolchain(self)
|
||||
|
||||
tc.variables["SOCI_SHARED"] = self.options.shared
|
||||
tc.variables["SOCI_STATIC"] = not self.options.shared
|
||||
tc.variables["SOCI_TESTS"] = False
|
||||
tc.variables["SOCI_CXX11"] = True
|
||||
tc.variables["SOCI_EMPTY"] = self.options.empty
|
||||
tc.variables["WITH_SQLITE3"] = self.options.with_sqlite3
|
||||
tc.variables["WITH_DB2"] = self.options.with_db2
|
||||
tc.variables["WITH_ODBC"] = self.options.with_odbc
|
||||
tc.variables["WITH_ORACLE"] = self.options.with_oracle
|
||||
tc.variables["WITH_FIREBIRD"] = self.options.with_firebird
|
||||
tc.variables["WITH_MYSQL"] = self.options.with_mysql
|
||||
tc.variables["WITH_POSTGRESQL"] = self.options.with_postgresql
|
||||
tc.variables["WITH_BOOST"] = self.options.with_boost
|
||||
tc.generate()
|
||||
|
||||
deps = CMakeDeps(self)
|
||||
deps.generate()
|
||||
|
||||
def build(self):
|
||||
apply_conandata_patches(self)
|
||||
cmake = CMake(self)
|
||||
cmake.configure()
|
||||
cmake.build()
|
||||
|
||||
def package(self):
|
||||
copy(self, "LICENSE_1_0.txt", dst=os.path.join(self.package_folder, "licenses"), src=self.source_folder)
|
||||
|
||||
cmake = CMake(self)
|
||||
cmake.install()
|
||||
|
||||
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
|
||||
|
||||
def package_info(self):
|
||||
self.cpp_info.set_property("cmake_file_name", "SOCI")
|
||||
|
||||
target_suffix = "" if self.options.shared else "_static"
|
||||
lib_prefix = "lib" if is_msvc(self) and not self.options.shared else ""
|
||||
version = Version(self.version)
|
||||
lib_suffix = "_{}_{}".format(version.major, version.minor) if self.settings.os == "Windows" else ""
|
||||
|
||||
# soci_core
|
||||
self.cpp_info.components["soci_core"].set_property("cmake_target_name", "SOCI::soci_core{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_core"].libs = ["{}soci_core{}".format(lib_prefix, lib_suffix)]
|
||||
if self.options.with_boost:
|
||||
self.cpp_info.components["soci_core"].requires.append("boost::boost")
|
||||
|
||||
# soci_empty
|
||||
if self.options.empty:
|
||||
self.cpp_info.components["soci_empty"].set_property("cmake_target_name", "SOCI::soci_empty{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_empty"].libs = ["{}soci_empty{}".format(lib_prefix, lib_suffix)]
|
||||
self.cpp_info.components["soci_empty"].requires = ["soci_core"]
|
||||
|
||||
# soci_sqlite3
|
||||
if self.options.with_sqlite3:
|
||||
self.cpp_info.components["soci_sqlite3"].set_property("cmake_target_name", "SOCI::soci_sqlite3{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_sqlite3"].libs = ["{}soci_sqlite3{}".format(lib_prefix, lib_suffix)]
|
||||
self.cpp_info.components["soci_sqlite3"].requires = ["soci_core", "sqlite3::sqlite3"]
|
||||
|
||||
# soci_odbc
|
||||
if self.options.with_odbc:
|
||||
self.cpp_info.components["soci_odbc"].set_property("cmake_target_name", "SOCI::soci_odbc{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_odbc"].libs = ["{}soci_odbc{}".format(lib_prefix, lib_suffix)]
|
||||
self.cpp_info.components["soci_odbc"].requires = ["soci_core"]
|
||||
if self.settings.os == "Windows":
|
||||
self.cpp_info.components["soci_odbc"].system_libs.append("odbc32")
|
||||
else:
|
||||
self.cpp_info.components["soci_odbc"].requires.append("odbc::odbc")
|
||||
|
||||
# soci_mysql
|
||||
if self.options.with_mysql:
|
||||
self.cpp_info.components["soci_mysql"].set_property("cmake_target_name", "SOCI::soci_mysql{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_mysql"].libs = ["{}soci_mysql{}".format(lib_prefix, lib_suffix)]
|
||||
self.cpp_info.components["soci_mysql"].requires = ["soci_core", "libmysqlclient::libmysqlclient"]
|
||||
|
||||
# soci_postgresql
|
||||
if self.options.with_postgresql:
|
||||
self.cpp_info.components["soci_postgresql"].set_property("cmake_target_name", "SOCI::soci_postgresql{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_postgresql"].libs = ["{}soci_postgresql{}".format(lib_prefix, lib_suffix)]
|
||||
self.cpp_info.components["soci_postgresql"].requires = ["soci_core", "libpq::libpq"]
|
||||
|
||||
# TODO: to remove in conan v2 once cmake_find_package* generators removed
|
||||
self.cpp_info.names["cmake_find_package"] = "SOCI"
|
||||
self.cpp_info.names["cmake_find_package_multi"] = "SOCI"
|
||||
self.cpp_info.components["soci_core"].names["cmake_find_package"] = "soci_core{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_core"].names["cmake_find_package_multi"] = "soci_core{}".format(target_suffix)
|
||||
if self.options.empty:
|
||||
self.cpp_info.components["soci_empty"].names["cmake_find_package"] = "soci_empty{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_empty"].names["cmake_find_package_multi"] = "soci_empty{}".format(target_suffix)
|
||||
if self.options.with_sqlite3:
|
||||
self.cpp_info.components["soci_sqlite3"].names["cmake_find_package"] = "soci_sqlite3{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_sqlite3"].names["cmake_find_package_multi"] = "soci_sqlite3{}".format(target_suffix)
|
||||
if self.options.with_odbc:
|
||||
self.cpp_info.components["soci_odbc"].names["cmake_find_package"] = "soci_odbc{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_odbc"].names["cmake_find_package_multi"] = "soci_odbc{}".format(target_suffix)
|
||||
if self.options.with_mysql:
|
||||
self.cpp_info.components["soci_mysql"].names["cmake_find_package"] = "soci_mysql{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_mysql"].names["cmake_find_package_multi"] = "soci_mysql{}".format(target_suffix)
|
||||
if self.options.with_postgresql:
|
||||
self.cpp_info.components["soci_postgresql"].names["cmake_find_package"] = "soci_postgresql{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_postgresql"].names["cmake_find_package_multi"] = "soci_postgresql{}".format(target_suffix)
|
||||
39
external/soci/patches/0001-Remove-hardcoded-INSTALL_NAME_DIR-for-relocatable-li.patch
vendored
Normal file
39
external/soci/patches/0001-Remove-hardcoded-INSTALL_NAME_DIR-for-relocatable-li.patch
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
From d491bf7b5040d314ffd0c6310ba01f78ff44c85e Mon Sep 17 00:00:00 2001
|
||||
From: Rasmus Thomsen <rasmus.thomsen@dampsoft.de>
|
||||
Date: Fri, 14 Apr 2023 09:16:29 +0200
|
||||
Subject: [PATCH] Remove hardcoded INSTALL_NAME_DIR for relocatable libraries
|
||||
on MacOS
|
||||
|
||||
---
|
||||
cmake/SociBackend.cmake | 2 +-
|
||||
src/core/CMakeLists.txt | 1 -
|
||||
2 files changed, 1 insertion(+), 2 deletions(-)
|
||||
|
||||
diff --git a/cmake/SociBackend.cmake b/cmake/SociBackend.cmake
|
||||
index 5d4ef0df..39fe1f77 100644
|
||||
--- a/cmake/SociBackend.cmake
|
||||
+++ b/cmake/SociBackend.cmake
|
||||
@@ -171,7 +171,7 @@ macro(soci_backend NAME)
|
||||
set_target_properties(${THIS_BACKEND_TARGET}
|
||||
PROPERTIES
|
||||
SOVERSION ${${PROJECT_NAME}_SOVERSION}
|
||||
- INSTALL_NAME_DIR ${CMAKE_INSTALL_PREFIX}/lib)
|
||||
+ )
|
||||
|
||||
if(APPLE)
|
||||
set_target_properties(${THIS_BACKEND_TARGET}
|
||||
diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt
|
||||
index 3e7deeae..f9eae564 100644
|
||||
--- a/src/core/CMakeLists.txt
|
||||
+++ b/src/core/CMakeLists.txt
|
||||
@@ -59,7 +59,6 @@ if (SOCI_SHARED)
|
||||
PROPERTIES
|
||||
VERSION ${SOCI_VERSION}
|
||||
SOVERSION ${SOCI_SOVERSION}
|
||||
- INSTALL_NAME_DIR ${CMAKE_INSTALL_PREFIX}/lib
|
||||
CLEAN_DIRECT_OUTPUT 1)
|
||||
endif()
|
||||
|
||||
--
|
||||
2.25.1
|
||||
|
||||
24
external/soci/patches/0002-Fix-soci_backend.patch
vendored
Normal file
24
external/soci/patches/0002-Fix-soci_backend.patch
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
diff --git a/cmake/SociBackend.cmake b/cmake/SociBackend.cmake
|
||||
index 0a664667..3fa2ed95 100644
|
||||
--- a/cmake/SociBackend.cmake
|
||||
+++ b/cmake/SociBackend.cmake
|
||||
@@ -31,14 +31,13 @@ macro(soci_backend_deps_found NAME DEPS SUCCESS)
|
||||
if(NOT DEPEND_FOUND)
|
||||
list(APPEND DEPS_NOT_FOUND ${dep})
|
||||
else()
|
||||
- string(TOUPPER "${dep}" DEPU)
|
||||
- if( ${DEPU}_INCLUDE_DIR )
|
||||
- list(APPEND DEPS_INCLUDE_DIRS ${${DEPU}_INCLUDE_DIR})
|
||||
+ if( ${dep}_INCLUDE_DIR )
|
||||
+ list(APPEND DEPS_INCLUDE_DIRS ${${dep}_INCLUDE_DIR})
|
||||
endif()
|
||||
- if( ${DEPU}_INCLUDE_DIRS )
|
||||
- list(APPEND DEPS_INCLUDE_DIRS ${${DEPU}_INCLUDE_DIRS})
|
||||
+ if( ${dep}_INCLUDE_DIRS )
|
||||
+ list(APPEND DEPS_INCLUDE_DIRS ${${dep}_INCLUDE_DIRS})
|
||||
endif()
|
||||
- list(APPEND DEPS_LIBRARIES ${${DEPU}_LIBRARIES})
|
||||
+ list(APPEND DEPS_LIBRARIES ${${dep}_LIBRARIES})
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
194
external/wasmedge/conandata.yml
vendored
Normal file
194
external/wasmedge/conandata.yml
vendored
Normal file
@@ -0,0 +1,194 @@
|
||||
sources:
|
||||
"0.13.5":
|
||||
Windows:
|
||||
"x86_64":
|
||||
Visual Studio:
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.13.5/WasmEdge-0.13.5-windows.zip"
|
||||
sha256: "db533289ba26ec557b5193593c9ed03db75be3bc7aa737e2caa5b56b8eef888a"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.13.5/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Linux:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.13.5/WasmEdge-0.13.5-manylinux2014_x86_64.tar.gz"
|
||||
sha256: "3686e0226871bf17b62ec57e1c15778c2947834b90af0dfad14f2e0202bf9284"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.13.5/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.13.5/WasmEdge-0.13.5-manylinux2014_aarch64.tar.gz"
|
||||
sha256: "472de88e0257c539c120b33fdd1805e1e95063121acc2df1d5626e4676b93529"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Macos:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.13.5/WasmEdge-0.13.5-darwin_x86_64.tar.gz"
|
||||
sha256: "b7fdfaf59805951241f47690917b501ddfa06d9b6f7e0262e44e784efe4a7b33"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.13.5/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.13.5/WasmEdge-0.13.5-darwin_arm64.tar.gz"
|
||||
sha256: "acc93721210294ced0887352f360e42e46dcc05332e6dd78c1452fb3a35d5255"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.13.5/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Android:
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.13.5/WasmEdge-0.13.5-android_aarch64.tar.gz"
|
||||
sha256: "59a0d68a0c7368b51cc65cb5a44a68037d79fd449883ef42792178d57c8784a8"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.13.5/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"0.11.2":
|
||||
Windows:
|
||||
"x86_64":
|
||||
Visual Studio:
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.2/WasmEdge-0.11.2-windows.zip"
|
||||
sha256: "ca49b98c0cf5f187e08c3ba71afc8d71365fde696f10b4219379a4a4d1a91e6d"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.2/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Linux:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.2/WasmEdge-0.11.2-manylinux2014_x86_64.tar.gz"
|
||||
sha256: "784bf1eb25928e2cf02aa88e9372388fad682b4a188485da3cd9162caeedf143"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.2/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.2/WasmEdge-0.11.2-manylinux2014_aarch64.tar.gz"
|
||||
sha256: "a2766a4c1edbaea298a30e5431a4e795003a10d8398a933d923f23d4eb4fa5d1"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Macos:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.2/WasmEdge-0.11.2-darwin_x86_64.tar.gz"
|
||||
sha256: "aedec53f29b1e0b657e46e67dba3e2f32a2924f4d9136e60073ea1aba3073e70"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.2/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.2/WasmEdge-0.11.2-darwin_arm64.tar.gz"
|
||||
sha256: "fe391df90e1eee69cf1e976f5ddf60c20f29b651710aaa4fc03e2ab4fe52c0d3"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.2/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Android:
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.2/WasmEdge-0.11.2-android_aarch64.tar.gz"
|
||||
sha256: "69e308f5927c753b2bb5639569d10219b60598174d8b304bdf310093fd7b2464"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.2/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"0.11.1":
|
||||
Windows:
|
||||
"x86_64":
|
||||
Visual Studio:
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.1/WasmEdge-0.11.1-windows.zip"
|
||||
sha256: "c86f6384555a0484a5dd81faba5636bba78f5e3d6eaf627d880e34843f9e24bf"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Linux:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.1/WasmEdge-0.11.1-manylinux2014_x86_64.tar.gz"
|
||||
sha256: "76ce4ea0eb86adfa52c73f6c6b44383626d94990e0923cae8b1e6f060ef2bf5b"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.1/WasmEdge-0.11.1-manylinux2014_aarch64.tar.gz"
|
||||
sha256: "cb9ea32932360463991cfda80e09879b2cf6c69737f12f3f2b371cd0af4e9ce8"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Macos:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.1/WasmEdge-0.11.1-darwin_x86_64.tar.gz"
|
||||
sha256: "56df2b00669c25b8143ea2c17370256cd6a33f3b316d3b47857dd38d603cb69a"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.1/WasmEdge-0.11.1-darwin_arm64.tar.gz"
|
||||
sha256: "82f7da1a7a36ec1923fb045193784dd090a03109e84da042af97297205a71f08"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Android:
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.1/WasmEdge-0.11.1-android_aarch64.tar.gz"
|
||||
sha256: "af8694e93bf72ac5506450d4caebccc340fbba254dca3d58ec0712e96ec9dedd"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"0.10.0":
|
||||
Windows:
|
||||
"x86_64":
|
||||
Visual Studio:
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.10.0/WasmEdge-0.10.0-windows.zip"
|
||||
sha256: "63b8a02cced52a723aa283dba02bbe887656256ecca69bb0fff17872c0fb5ebc"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.10.0/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Linux:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.10.0/WasmEdge-0.10.0-manylinux2014_x86_64.tar.gz"
|
||||
sha256: "4c1ffca9fd8cbdeb8f0951ddaffbbefe81ae123d5b80f61e80ea8d9b56853cde"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.10.0/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.10.0/WasmEdge-0.10.0-manylinux2014_aarch64.tar.gz"
|
||||
sha256: "c000bf96d0a73a1d360659246c0806c2ce78620b6f78c1147fbf9e2be0280bd9"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.10.0/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"0.9.1":
|
||||
Windows:
|
||||
"x86_64":
|
||||
Visual Studio:
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.9.1/WasmEdge-0.9.1-windows.zip"
|
||||
sha256: "68240d8aee23d44db5cc252d8c1cf5d0c77ab709a122af2747a4b836ba461671"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.9.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Linux:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.9.1/WasmEdge-0.9.1-manylinux2014_x86_64.tar.gz"
|
||||
sha256: "bcb6fe3d6e30db0d0aa267ec3bd9b7248f8c8c387620cef4049d682d293c8371"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.9.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.9.1/WasmEdge-0.9.1-manylinux2014_aarch64.tar.gz"
|
||||
sha256: "515bcac3520cd546d9d14372b7930ab48b43f1c5dc258a9f61a82b22c0107eef"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.9.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"0.9.0":
|
||||
Windows:
|
||||
"x86_64":
|
||||
Visual Studio:
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.9.0/WasmEdge-0.9.0-windows.zip"
|
||||
sha256: "f81bfea4cf09053510e3e74c16c1ee010fc93def8a7e78744443b950f0011c3b"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.9.0/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Linux:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.9.0/WasmEdge-0.9.0-manylinux2014_x86_64.tar.gz"
|
||||
sha256: "27847f15e4294e707486458e857d7cb11806481bb67a26f076a717a1446827ed"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.9.0/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.9.0/WasmEdge-0.9.0-manylinux2014_aarch64.tar.gz"
|
||||
sha256: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.9.0/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Macos:
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.9.0/WasmEdge-0.9.0-darwin_arm64.tar.gz"
|
||||
sha256: "236a407a646f746ab78a1d0a39fa4e85fe28eae219b1635ba49f908d7944686d"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.9.0/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
92
external/wasmedge/conanfile.py
vendored
Normal file
92
external/wasmedge/conanfile.py
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
from conan import ConanFile
|
||||
from conan.tools.files import get, copy, download
|
||||
from conan.tools.scm import Version
|
||||
from conan.errors import ConanInvalidConfiguration
|
||||
|
||||
import os
|
||||
|
||||
required_conan_version = ">=1.53.0"
|
||||
|
||||
class WasmedgeConan(ConanFile):
|
||||
name = "wasmedge"
|
||||
description = ("WasmEdge is a lightweight, high-performance, and extensible WebAssembly runtime"
|
||||
"for cloud native, edge, and decentralized applications."
|
||||
"It powers serverless apps, embedded functions, microservices, smart contracts, and IoT devices.")
|
||||
license = "Apache-2.0"
|
||||
url = "https://github.com/conan-io/conan-center-index"
|
||||
homepage = "https://github.com/WasmEdge/WasmEdge/"
|
||||
topics = ("webassembly", "wasm", "wasi", "emscripten")
|
||||
package_type = "shared-library"
|
||||
settings = "os", "arch", "compiler", "build_type"
|
||||
|
||||
@property
|
||||
def _compiler_alias(self):
|
||||
return {
|
||||
"Visual Studio": "Visual Studio",
|
||||
# "Visual Studio": "msvc",
|
||||
"msvc": "msvc",
|
||||
}.get(str(self.info.settings.compiler), "gcc")
|
||||
|
||||
def configure(self):
|
||||
self.settings.compiler.rm_safe("libcxx")
|
||||
self.settings.compiler.rm_safe("cppstd")
|
||||
|
||||
def validate(self):
|
||||
try:
|
||||
self.conan_data["sources"][self.version][str(self.settings.os)][str(self.settings.arch)][self._compiler_alias]
|
||||
except KeyError:
|
||||
raise ConanInvalidConfiguration("Binaries for this combination of version/os/arch/compiler are not available")
|
||||
|
||||
def package_id(self):
|
||||
del self.info.settings.compiler.version
|
||||
self.info.settings.compiler = self._compiler_alias
|
||||
|
||||
def build(self):
|
||||
# This is packaging binaries so the download needs to be in build
|
||||
get(self, **self.conan_data["sources"][self.version][str(self.settings.os)][str(self.settings.arch)][self._compiler_alias][0],
|
||||
destination=self.source_folder, strip_root=True)
|
||||
download(self, filename="LICENSE",
|
||||
**self.conan_data["sources"][self.version][str(self.settings.os)][str(self.settings.arch)][self._compiler_alias][1])
|
||||
|
||||
def package(self):
|
||||
copy(self, pattern="*.h", dst=os.path.join(self.package_folder, "include"), src=os.path.join(self.source_folder, "include"), keep_path=True)
|
||||
copy(self, pattern="*.inc", dst=os.path.join(self.package_folder, "include"), src=os.path.join(self.source_folder, "include"), keep_path=True)
|
||||
|
||||
srclibdir = os.path.join(self.source_folder, "lib64" if self.settings.os == "Linux" else "lib")
|
||||
srcbindir = os.path.join(self.source_folder, "bin")
|
||||
dstlibdir = os.path.join(self.package_folder, "lib")
|
||||
dstbindir = os.path.join(self.package_folder, "bin")
|
||||
if Version(self.version) >= "0.11.1":
|
||||
copy(self, pattern="wasmedge.lib", src=srclibdir, dst=dstlibdir, keep_path=False)
|
||||
copy(self, pattern="wasmedge.dll", src=srcbindir, dst=dstbindir, keep_path=False)
|
||||
copy(self, pattern="libwasmedge.so*", src=srclibdir, dst=dstlibdir, keep_path=False)
|
||||
copy(self, pattern="libwasmedge*.dylib", src=srclibdir, dst=dstlibdir, keep_path=False)
|
||||
else:
|
||||
copy(self, pattern="wasmedge_c.lib", src=srclibdir, dst=dstlibdir, keep_path=False)
|
||||
copy(self, pattern="wasmedge_c.dll", src=srcbindir, dst=dstbindir, keep_path=False)
|
||||
copy(self, pattern="libwasmedge_c.so*", src=srclibdir, dst=dstlibdir, keep_path=False)
|
||||
copy(self, pattern="libwasmedge_c*.dylib", src=srclibdir, dst=dstlibdir, keep_path=False)
|
||||
|
||||
copy(self, pattern="wasmedge*", src=srcbindir, dst=dstbindir, keep_path=False)
|
||||
copy(self, pattern="LICENSE", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"), keep_path=False)
|
||||
|
||||
def package_info(self):
|
||||
if Version(self.version) >= "0.11.1":
|
||||
self.cpp_info.libs = ["wasmedge"]
|
||||
else:
|
||||
self.cpp_info.libs = ["wasmedge_c"]
|
||||
|
||||
bindir = os.path.join(self.package_folder, "bin")
|
||||
self.output.info("Appending PATH environment variable: {}".format(bindir))
|
||||
self.env_info.PATH.append(bindir)
|
||||
|
||||
if self.settings.os == "Windows":
|
||||
self.cpp_info.system_libs.append("ws2_32")
|
||||
self.cpp_info.system_libs.append("wsock32")
|
||||
self.cpp_info.system_libs.append("shlwapi")
|
||||
|
||||
if self.settings.os in ["Linux", "FreeBSD"]:
|
||||
self.cpp_info.system_libs.append("m")
|
||||
self.cpp_info.system_libs.append("dl")
|
||||
self.cpp_info.system_libs.append("rt")
|
||||
self.cpp_info.system_libs.append("pthread")
|
||||
29
hook/generate_sfcodes.sh
Executable file
29
hook/generate_sfcodes.sh
Executable file
@@ -0,0 +1,29 @@
|
||||
#/bin/bash
|
||||
RIPPLED_ROOT="../src/ripple"
|
||||
echo '// For documentation please see: https://xrpl-hooks.readme.io/reference/'
|
||||
echo '// Generated using generate_sfcodes.sh'
|
||||
cat $RIPPLED_ROOT/protocol/impl/SField.cpp | grep -E '^CONSTRUCT_' |
|
||||
sed 's/UINT16,/1,/g' |
|
||||
sed 's/UINT32,/2,/g' |
|
||||
sed 's/UINT64,/3,/g' |
|
||||
sed 's/HASH128,/4,/g' |
|
||||
sed 's/HASH256,/5,/g' |
|
||||
sed 's/UINT128,/4,/g' |
|
||||
sed 's/UINT256,/5,/g' |
|
||||
sed 's/AMOUNT,/6,/g' |
|
||||
sed 's/VL,/7,/g' |
|
||||
sed 's/ACCOUNT,/8,/g' |
|
||||
sed 's/OBJECT,/14,/g' |
|
||||
sed 's/ARRAY,/15,/g' |
|
||||
sed 's/UINT8,/16,/g' |
|
||||
sed 's/HASH160,/17,/g' |
|
||||
sed 's/UINT160,/17,/g' |
|
||||
sed 's/PATHSET,/18,/g' |
|
||||
sed 's/VECTOR256,/19,/g' |
|
||||
sed 's/UINT96,/20,/g' |
|
||||
sed 's/UINT192,/21,/g' |
|
||||
sed 's/UINT384,/22,/g' |
|
||||
sed 's/UINT512,/23,/g' |
|
||||
grep -Eo '"([^"]+)", *([0-9]+), *([0-9]+)' |
|
||||
sed 's/"//g' | sed 's/ *//g' | sed 's/,/ /g' |
|
||||
awk '{print ("#define sf"$1" (("$2"U << 16U) + "$3"U)")}'
|
||||
463
hook/macro.h
463
hook/macro.h
@@ -1,5 +1,5 @@
|
||||
/**
|
||||
* These are helper macros for writing hooks, all of them are optional as is including hookmacro.h at all
|
||||
* These are helper macros for writing hooks, all of them are optional as is including macro.h at all
|
||||
*/
|
||||
|
||||
#include <stdint.h>
|
||||
@@ -9,13 +9,32 @@
|
||||
#ifndef HOOKMACROS_INCLUDED
|
||||
#define HOOKMACROS_INCLUDED 1
|
||||
|
||||
|
||||
#ifdef NDEBUG
|
||||
#define DEBUG 0
|
||||
#else
|
||||
#define DEBUG 1
|
||||
#endif
|
||||
|
||||
#define DONEEMPTY()\
|
||||
accept(0,0,__LINE__)
|
||||
|
||||
#define DONEMSG(msg)\
|
||||
accept(msg, sizeof(msg),__LINE__)
|
||||
|
||||
#define DONE(x)\
|
||||
accept(SVAR(x),(uint32_t)__LINE__);
|
||||
|
||||
#define ASSERT(x)\
|
||||
{\
|
||||
if (!(x))\
|
||||
rollback(0,0,__LINE__);\
|
||||
}
|
||||
|
||||
#define NOPE(x)\
|
||||
{\
|
||||
return rollback((x), sizeof(x), __LINE__);\
|
||||
}
|
||||
|
||||
#define TRACEVAR(v) if (DEBUG) trace_num((uint32_t)(#v), (uint32_t)(sizeof(#v) - 1), (int64_t)v);
|
||||
#define TRACEHEX(v) if (DEBUG) trace((uint32_t)(#v), (uint32_t)(sizeof(#v) - 1), (uint32_t)(v), (uint32_t)(sizeof(v)), 1);
|
||||
#define TRACEXFL(v) if (DEBUG) trace_float((uint32_t)(#v), (uint32_t)(sizeof(#v) - 1), (int64_t)v);
|
||||
@@ -26,6 +45,7 @@
|
||||
#define GUARDM(maxiter, n) _g(( (1ULL << 31U) + (__LINE__ << 16) + n), (maxiter)+1)
|
||||
|
||||
#define SBUF(str) (uint32_t)(str), sizeof(str)
|
||||
#define SVAR(x) &x, sizeof(x)
|
||||
|
||||
#define REQUIRE(cond, str)\
|
||||
{\
|
||||
@@ -138,6 +158,17 @@ int out_len = 0;\
|
||||
*(((uint64_t*)(buf1)) + 2) == *(((uint64_t*)(buf2)) + 2) &&\
|
||||
*(((uint64_t*)(buf1)) + 3) == *(((uint64_t*)(buf2)) + 3))
|
||||
|
||||
#define BUFFER_EQUAL_64(buf1, buf2) \
|
||||
( \
|
||||
(*((uint64_t*)(buf1) + 0) == *((uint64_t*)(buf2) + 0)) && \
|
||||
(*((uint64_t*)(buf1) + 1) == *((uint64_t*)(buf2) + 1)) && \
|
||||
(*((uint64_t*)(buf1) + 2) == *((uint64_t*)(buf2) + 2)) && \
|
||||
(*((uint64_t*)(buf1) + 3) == *((uint64_t*)(buf2) + 3)) && \
|
||||
(*((uint64_t*)(buf1) + 4) == *((uint64_t*)(buf2) + 4)) && \
|
||||
(*((uint64_t*)(buf1) + 5) == *((uint64_t*)(buf2) + 5)) && \
|
||||
(*((uint64_t*)(buf1) + 6) == *((uint64_t*)(buf2) + 6)) && \
|
||||
(*((uint64_t*)(buf1) + 7) == *((uint64_t*)(buf2) + 7)) \
|
||||
)
|
||||
|
||||
// when using this macro buf1len may be dynamic but buf2len must be static
|
||||
// provide n >= 1 to indicate how many times the macro will be hit on the line of code
|
||||
@@ -185,6 +216,18 @@ int out_len = 0;\
|
||||
#define BUFFER_EQUAL(output, buf1, buf2, compare_len)\
|
||||
BUFFER_EQUAL_GUARD(output, buf1, compare_len, buf2, compare_len, 1)
|
||||
|
||||
|
||||
#define UINT8_TO_BUF(buf_raw, i)\
|
||||
{\
|
||||
unsigned char* buf = (unsigned char*)buf_raw;\
|
||||
buf[0] = (((uint8_t)i) >> 0) & 0xFFUL;\
|
||||
if (i < 0) buf[0] |= 0x80U;\
|
||||
}
|
||||
|
||||
#define UINT8_FROM_BUF(buf)\
|
||||
(((uint8_t)((buf)[0]) << 0))
|
||||
|
||||
|
||||
#define UINT16_TO_BUF(buf_raw, i)\
|
||||
{\
|
||||
unsigned char* buf = (unsigned char*)buf_raw;\
|
||||
@@ -205,7 +248,6 @@ int out_len = 0;\
|
||||
buf[3] = (((uint64_t)i) >> 0) & 0xFFUL;\
|
||||
}
|
||||
|
||||
|
||||
#define UINT32_FROM_BUF(buf)\
|
||||
(((uint64_t)((buf)[0]) << 24) +\
|
||||
((uint64_t)((buf)[1]) << 16) +\
|
||||
@@ -225,7 +267,6 @@ int out_len = 0;\
|
||||
buf[7] = (((uint64_t)i) >> 0) & 0xFFUL;\
|
||||
}
|
||||
|
||||
|
||||
#define UINT64_FROM_BUF(buf)\
|
||||
(((uint64_t)((buf)[0]) << 56) +\
|
||||
((uint64_t)((buf)[1]) << 48) +\
|
||||
@@ -236,17 +277,6 @@ int out_len = 0;\
|
||||
((uint64_t)((buf)[6]) << 8) +\
|
||||
((uint64_t)((buf)[7]) << 0))
|
||||
|
||||
|
||||
#define INT64_FROM_BUF(buf)\
|
||||
((((uint64_t)((buf)[0] & 0x7FU) << 56) +\
|
||||
((uint64_t)((buf)[1]) << 48) +\
|
||||
((uint64_t)((buf)[2]) << 40) +\
|
||||
((uint64_t)((buf)[3]) << 32) +\
|
||||
((uint64_t)((buf)[4]) << 24) +\
|
||||
((uint64_t)((buf)[5]) << 16) +\
|
||||
((uint64_t)((buf)[6]) << 8) +\
|
||||
((uint64_t)((buf)[7]) << 0)) * (buf[0] & 0x80U ? -1 : 1))
|
||||
|
||||
#define INT64_TO_BUF(buf_raw, i)\
|
||||
{\
|
||||
unsigned char* buf = (unsigned char*)buf_raw;\
|
||||
@@ -261,6 +291,65 @@ int out_len = 0;\
|
||||
if (i < 0) buf[0] |= 0x80U;\
|
||||
}
|
||||
|
||||
#define INT64_FROM_BUF(buf)\
|
||||
((((uint64_t)((buf)[0] & 0x7FU) << 56) +\
|
||||
((uint64_t)((buf)[1]) << 48) +\
|
||||
((uint64_t)((buf)[2]) << 40) +\
|
||||
((uint64_t)((buf)[3]) << 32) +\
|
||||
((uint64_t)((buf)[4]) << 24) +\
|
||||
((uint64_t)((buf)[5]) << 16) +\
|
||||
((uint64_t)((buf)[6]) << 8) +\
|
||||
((uint64_t)((buf)[7]) << 0)) * (buf[0] & 0x80U ? -1 : 1))
|
||||
|
||||
|
||||
#define BYTES20_TO_BUF(buf_raw, i)\
|
||||
{\
|
||||
unsigned char* buf = (unsigned char*)buf_raw;\
|
||||
*(uint64_t*)(buf + 0) = *(uint64_t*)(i + 0);\
|
||||
*(uint64_t*)(buf + 8) = *(uint64_t*)(i + 8);\
|
||||
*(uint32_t*)(buf + 16) = *(uint32_t*)(i + 16);\
|
||||
}
|
||||
|
||||
#define BYTES20_FROM_BUF(buf_raw, i)\
|
||||
{\
|
||||
const unsigned char* buf = (const unsigned char*)buf_raw;\
|
||||
*(uint64_t*)(i + 0) = *(const uint64_t*)(buf + 0);\
|
||||
*(uint64_t*)(i + 8) = *(const uint64_t*)(buf + 8);\
|
||||
*(uint32_t*)(i + 16) = *(const uint32_t*)(buf + 16);\
|
||||
}
|
||||
|
||||
#define BYTES32_TO_BUF(buf_raw, i)\
|
||||
{\
|
||||
unsigned char* buf = (unsigned char*)buf_raw;\
|
||||
*(uint64_t*)(buf + 0) = *(uint64_t*)(i + 0);\
|
||||
*(uint64_t*)(buf + 8) = *(uint64_t*)(i + 8);\
|
||||
*(uint64_t*)(buf + 16) = *(uint64_t*)(i + 16);\
|
||||
*(uint64_t*)(buf + 24) = *(uint64_t*)(i + 24);\
|
||||
}
|
||||
|
||||
#define BYTES32_FROM_BUF(buf_raw, i)\
|
||||
{\
|
||||
const unsigned char* buf = (const unsigned char*)buf_raw;\
|
||||
*(uint64_t*)(i + 0) = *(const uint64_t*)(buf + 0);\
|
||||
*(uint64_t*)(i + 8) = *(const uint64_t*)(buf + 8);\
|
||||
*(uint64_t*)(i + 16) = *(const uint64_t*)(buf + 16);\
|
||||
*(uint64_t*)(i + 24) = *(const uint64_t*)(buf + 24);\
|
||||
}
|
||||
|
||||
#define FLIP_ENDIAN_32(n) ((uint32_t) (((n & 0xFFU) << 24U) | \
|
||||
((n & 0xFF00U) << 8U) | \
|
||||
((n & 0xFF0000U) >> 8U) | \
|
||||
((n & 0xFF000000U) >> 24U)))
|
||||
|
||||
#define FLIP_ENDIAN_64(n) ((uint64_t)(((n & 0xFFULL) << 56ULL) | \
|
||||
((n & 0xFF00ULL) << 40ULL) | \
|
||||
((n & 0xFF0000ULL) << 24ULL) | \
|
||||
((n & 0xFF000000ULL) << 8ULL) | \
|
||||
((n & 0xFF00000000ULL) >> 8ULL) | \
|
||||
((n & 0xFF0000000000ULL) >> 24ULL) | \
|
||||
((n & 0xFF000000000000ULL) >> 40ULL) | \
|
||||
((n & 0xFF00000000000000ULL) >> 56ULL)))
|
||||
|
||||
#define tfCANONICAL 0x80000000UL
|
||||
|
||||
#define atACCOUNT 1U
|
||||
@@ -287,350 +376,6 @@ int out_len = 0;\
|
||||
#define amRIPPLEESCROW 17U
|
||||
#define amDELIVEREDAMOUNT 18U
|
||||
|
||||
/**
|
||||
* RH NOTE -- PAY ATTENTION
|
||||
*
|
||||
* ALL 'ENCODE' MACROS INCREMENT BUF_OUT
|
||||
* THIS IS TO MAKE CHAINING EASY
|
||||
* BUF_OUT IS A SACRIFICIAL POINTER
|
||||
*
|
||||
* 'ENCODE' MACROS WITH CONSTANTS HAVE
|
||||
* ALIASING TO ASSIST YOU WITH ORDER
|
||||
* _TYPECODE_FIELDCODE_ENCODE_MACRO
|
||||
* TO PRODUCE A SERIALIZED OBJECT
|
||||
* IN CANONICAL FORMAT YOU MUST ORDER
|
||||
* FIRST BY TYPE CODE THEN BY FIELD CODE
|
||||
*
|
||||
* ALL 'PREPARE' MACROS PRESERVE POINTERS
|
||||
*
|
||||
**/
|
||||
|
||||
|
||||
#define ENCODE_TL_SIZE 49
|
||||
#define ENCODE_TL(buf_out, tlamt, amount_type)\
|
||||
{\
|
||||
uint8_t uat = amount_type; \
|
||||
buf_out[0] = 0x60U +(uat & 0x0FU ); \
|
||||
for (int i = 1; GUARDM(48, 1), i < 49; ++i)\
|
||||
buf_out[i] = tlamt[i-1];\
|
||||
buf_out += ENCODE_TL_SIZE;\
|
||||
}
|
||||
#define _06_XX_ENCODE_TL(buf_out, drops, amount_type )\
|
||||
ENCODE_TL(buf_out, drops, amount_type );
|
||||
#define ENCODE_TL_AMOUNT(buf_out, drops )\
|
||||
ENCODE_TL(buf_out, drops, amAMOUNT );
|
||||
#define _06_01_ENCODE_TL_AMOUNT(buf_out, drops )\
|
||||
ENCODE_TL_AMOUNT(buf_out, drops );
|
||||
|
||||
|
||||
// Encode drops to serialization format
|
||||
// consumes 9 bytes
|
||||
#define ENCODE_DROPS_SIZE 9
|
||||
#define ENCODE_DROPS(buf_out, drops, amount_type ) \
|
||||
{\
|
||||
uint8_t uat = amount_type; \
|
||||
uint64_t udrops = drops; \
|
||||
buf_out[0] = 0x60U +(uat & 0x0FU ); \
|
||||
buf_out[1] = 0b01000000 + (( udrops >> 56 ) & 0b00111111 ); \
|
||||
buf_out[2] = (udrops >> 48) & 0xFFU; \
|
||||
buf_out[3] = (udrops >> 40) & 0xFFU; \
|
||||
buf_out[4] = (udrops >> 32) & 0xFFU; \
|
||||
buf_out[5] = (udrops >> 24) & 0xFFU; \
|
||||
buf_out[6] = (udrops >> 16) & 0xFFU; \
|
||||
buf_out[7] = (udrops >> 8) & 0xFFU; \
|
||||
buf_out[8] = (udrops >> 0) & 0xFFU; \
|
||||
buf_out += ENCODE_DROPS_SIZE; \
|
||||
}
|
||||
|
||||
#define _06_XX_ENCODE_DROPS(buf_out, drops, amount_type )\
|
||||
ENCODE_DROPS(buf_out, drops, amount_type );
|
||||
|
||||
#define ENCODE_DROPS_AMOUNT(buf_out, drops )\
|
||||
ENCODE_DROPS(buf_out, drops, amAMOUNT );
|
||||
#define _06_01_ENCODE_DROPS_AMOUNT(buf_out, drops )\
|
||||
ENCODE_DROPS_AMOUNT(buf_out, drops );
|
||||
|
||||
#define ENCODE_DROPS_FEE(buf_out, drops )\
|
||||
ENCODE_DROPS(buf_out, drops, amFEE );
|
||||
#define _06_08_ENCODE_DROPS_FEE(buf_out, drops )\
|
||||
ENCODE_DROPS_FEE(buf_out, drops );
|
||||
|
||||
#define ENCODE_TT_SIZE 3
|
||||
#define ENCODE_TT(buf_out, tt )\
|
||||
{\
|
||||
uint8_t utt = tt;\
|
||||
buf_out[0] = 0x12U;\
|
||||
buf_out[1] =(utt >> 8 ) & 0xFFU;\
|
||||
buf_out[2] =(utt >> 0 ) & 0xFFU;\
|
||||
buf_out += ENCODE_TT_SIZE; \
|
||||
}
|
||||
#define _01_02_ENCODE_TT(buf_out, tt)\
|
||||
ENCODE_TT(buf_out, tt);
|
||||
|
||||
|
||||
#define ENCODE_ACCOUNT_SIZE 22
|
||||
#define ENCODE_ACCOUNT(buf_out, account_id, account_type)\
|
||||
{\
|
||||
uint8_t uat = account_type;\
|
||||
buf_out[0] = 0x80U + uat;\
|
||||
buf_out[1] = 0x14U;\
|
||||
*(uint64_t*)(buf_out + 2) = *(uint64_t*)(account_id + 0);\
|
||||
*(uint64_t*)(buf_out + 10) = *(uint64_t*)(account_id + 8);\
|
||||
*(uint32_t*)(buf_out + 18) = *(uint32_t*)(account_id + 16);\
|
||||
buf_out += ENCODE_ACCOUNT_SIZE;\
|
||||
}
|
||||
#define _08_XX_ENCODE_ACCOUNT(buf_out, account_id, account_type)\
|
||||
ENCODE_ACCOUNT(buf_out, account_id, account_type);
|
||||
|
||||
#define ENCODE_ACCOUNT_SRC_SIZE 22
|
||||
#define ENCODE_ACCOUNT_SRC(buf_out, account_id)\
|
||||
ENCODE_ACCOUNT(buf_out, account_id, atACCOUNT);
|
||||
#define _08_01_ENCODE_ACCOUNT_SRC(buf_out, account_id)\
|
||||
ENCODE_ACCOUNT_SRC(buf_out, account_id);
|
||||
|
||||
#define ENCODE_ACCOUNT_DST_SIZE 22
|
||||
#define ENCODE_ACCOUNT_DST(buf_out, account_id)\
|
||||
ENCODE_ACCOUNT(buf_out, account_id, atDESTINATION);
|
||||
#define _08_03_ENCODE_ACCOUNT_DST(buf_out, account_id)\
|
||||
ENCODE_ACCOUNT_DST(buf_out, account_id);
|
||||
|
||||
#define ENCODE_ACCOUNT_OWNER_SIZE 22
|
||||
#define ENCODE_ACCOUNT_OWNER(buf_out, account_id) \
|
||||
ENCODE_ACCOUNT(buf_out, account_id, atOWNER);
|
||||
#define _08_02_ENCODE_ACCOUNT_OWNER(buf_out, account_id) \
|
||||
ENCODE_ACCOUNT_OWNER(buf_out, account_id);
|
||||
|
||||
#define ENCODE_UINT32_COMMON_SIZE 5U
|
||||
#define ENCODE_UINT32_COMMON(buf_out, i, field)\
|
||||
{\
|
||||
uint32_t ui = i; \
|
||||
uint8_t uf = field; \
|
||||
buf_out[0] = 0x20U +(uf & 0x0FU); \
|
||||
buf_out[1] =(ui >> 24 ) & 0xFFU; \
|
||||
buf_out[2] =(ui >> 16 ) & 0xFFU; \
|
||||
buf_out[3] =(ui >> 8 ) & 0xFFU; \
|
||||
buf_out[4] =(ui >> 0 ) & 0xFFU; \
|
||||
buf_out += ENCODE_UINT32_COMMON_SIZE; \
|
||||
}
|
||||
#define _02_XX_ENCODE_UINT32_COMMON(buf_out, i, field)\
|
||||
ENCODE_UINT32_COMMON(buf_out, i, field)\
|
||||
|
||||
#define ENCODE_UINT32_UNCOMMON_SIZE 6U
|
||||
#define ENCODE_UINT32_UNCOMMON(buf_out, i, field)\
|
||||
{\
|
||||
uint32_t ui = i; \
|
||||
uint8_t uf = field; \
|
||||
buf_out[0] = 0x20U; \
|
||||
buf_out[1] = uf; \
|
||||
buf_out[2] =(ui >> 24 ) & 0xFFU; \
|
||||
buf_out[3] =(ui >> 16 ) & 0xFFU; \
|
||||
buf_out[4] =(ui >> 8 ) & 0xFFU; \
|
||||
buf_out[5] =(ui >> 0 ) & 0xFFU; \
|
||||
buf_out += ENCODE_UINT32_UNCOMMON_SIZE; \
|
||||
}
|
||||
#define _02_XX_ENCODE_UINT32_UNCOMMON(buf_out, i, field)\
|
||||
ENCODE_UINT32_UNCOMMON(buf_out, i, field)\
|
||||
|
||||
#define ENCODE_LLS_SIZE 6U
|
||||
#define ENCODE_LLS(buf_out, lls )\
|
||||
ENCODE_UINT32_UNCOMMON(buf_out, lls, 0x1B );
|
||||
#define _02_27_ENCODE_LLS(buf_out, lls )\
|
||||
ENCODE_LLS(buf_out, lls );
|
||||
|
||||
#define ENCODE_FLS_SIZE 6U
|
||||
#define ENCODE_FLS(buf_out, fls )\
|
||||
ENCODE_UINT32_UNCOMMON(buf_out, fls, 0x1A );
|
||||
#define _02_26_ENCODE_FLS(buf_out, fls )\
|
||||
ENCODE_FLS(buf_out, fls );
|
||||
|
||||
#define ENCODE_TAG_SRC_SIZE 5
|
||||
#define ENCODE_TAG_SRC(buf_out, tag )\
|
||||
ENCODE_UINT32_COMMON(buf_out, tag, 0x3U );
|
||||
#define _02_03_ENCODE_TAG_SRC(buf_out, tag )\
|
||||
ENCODE_TAG_SRC(buf_out, tag );
|
||||
|
||||
#define ENCODE_TAG_DST_SIZE 5
|
||||
#define ENCODE_TAG_DST(buf_out, tag )\
|
||||
ENCODE_UINT32_COMMON(buf_out, tag, 0xEU );
|
||||
#define _02_14_ENCODE_TAG_DST(buf_out, tag )\
|
||||
ENCODE_TAG_DST(buf_out, tag );
|
||||
|
||||
#define ENCODE_SEQUENCE_SIZE 5
|
||||
#define ENCODE_SEQUENCE(buf_out, sequence )\
|
||||
ENCODE_UINT32_COMMON(buf_out, sequence, 0x4U );
|
||||
#define _02_04_ENCODE_SEQUENCE(buf_out, sequence )\
|
||||
ENCODE_SEQUENCE(buf_out, sequence );
|
||||
|
||||
#define ENCODE_FLAGS_SIZE 5
|
||||
#define ENCODE_FLAGS(buf_out, tag )\
|
||||
ENCODE_UINT32_COMMON(buf_out, tag, 0x2U );
|
||||
#define _02_02_ENCODE_FLAGS(buf_out, tag )\
|
||||
ENCODE_FLAGS(buf_out, tag );
|
||||
|
||||
#define ENCODE_SIGNING_PUBKEY_SIZE 35
|
||||
#define ENCODE_SIGNING_PUBKEY(buf_out, pkey )\
|
||||
{\
|
||||
buf_out[0] = 0x73U;\
|
||||
buf_out[1] = 0x21U;\
|
||||
*(uint64_t*)(buf_out + 2) = *(uint64_t*)(pkey + 0);\
|
||||
*(uint64_t*)(buf_out + 10) = *(uint64_t*)(pkey + 8);\
|
||||
*(uint64_t*)(buf_out + 18) = *(uint64_t*)(pkey + 16);\
|
||||
*(uint64_t*)(buf_out + 26) = *(uint64_t*)(pkey + 24);\
|
||||
buf[34] = pkey[32];\
|
||||
buf_out += ENCODE_SIGNING_PUBKEY_SIZE;\
|
||||
}
|
||||
|
||||
#define _07_03_ENCODE_SIGNING_PUBKEY(buf_out, pkey )\
|
||||
ENCODE_SIGNING_PUBKEY(buf_out, pkey );
|
||||
|
||||
#define ENCODE_SIGNING_PUBKEY_NULL_SIZE 2
|
||||
#define ENCODE_SIGNING_PUBKEY_NULL(buf_out )\
|
||||
{\
|
||||
*buf_out++ = 0x73U;\
|
||||
*buf_out++ = 0x00U;\
|
||||
}
|
||||
|
||||
#define _07_03_ENCODE_SIGNING_PUBKEY_NULL(buf_out )\
|
||||
ENCODE_SIGNING_PUBKEY_NULL(buf_out );
|
||||
|
||||
|
||||
#define _0E_0E_ENCODE_HOOKOBJ(buf_out, hhash)\
|
||||
{\
|
||||
uint8_t* hook0 = (hhash);\
|
||||
*buf_out++ = 0xEEU; /* hook obj start */ \
|
||||
if (hook0 == 0) /* noop */\
|
||||
{\
|
||||
/* do nothing */ \
|
||||
}\
|
||||
else\
|
||||
{\
|
||||
*buf_out++ = 0x22U; /* flags = override */\
|
||||
*buf_out++ = 0x00U;\
|
||||
*buf_out++ = 0x00U;\
|
||||
*buf_out++ = 0x00U;\
|
||||
*buf_out++ = 0x01U;\
|
||||
if (hook0 == 0xFFFFFFFFUL) /* delete operation */ \
|
||||
{\
|
||||
*buf_out++ = 0x7BU; /* empty createcode */ \
|
||||
*buf_out++ = 0x00U;\
|
||||
}\
|
||||
else\
|
||||
{\
|
||||
*buf_out++ = 0x50U; /* HookHash */\
|
||||
*buf_out++ = 0x1FU;\
|
||||
uint64_t* d = (uint64_t*)buf_out;\
|
||||
uint64_t* s = (uint64_t*)hook0;\
|
||||
*d++ = *s++;\
|
||||
*d++ = *s++;\
|
||||
*d++ = *s++;\
|
||||
*d++ = *s++;\
|
||||
buf_out+=32;\
|
||||
}\
|
||||
}\
|
||||
*buf_out++ = 0xE1U;\
|
||||
}
|
||||
|
||||
#define PREPARE_HOOKSET(buf_out_master, maxlen, h, sizeout)\
|
||||
{\
|
||||
uint8_t* buf_out = (buf_out_master); \
|
||||
uint8_t acc[20]; \
|
||||
uint32_t cls = (uint32_t)ledger_seq(); \
|
||||
hook_account(SBUF(acc)); \
|
||||
_01_02_ENCODE_TT (buf_out, ttHOOK_SET ); \
|
||||
_02_02_ENCODE_FLAGS (buf_out, tfCANONICAL ); \
|
||||
_02_04_ENCODE_SEQUENCE (buf_out, 0 ); \
|
||||
_02_26_ENCODE_FLS (buf_out, cls + 1 ); \
|
||||
_02_27_ENCODE_LLS (buf_out, cls + 5 ); \
|
||||
uint8_t* fee_ptr = buf_out; \
|
||||
_06_08_ENCODE_DROPS_FEE (buf_out, 0 ); \
|
||||
_07_03_ENCODE_SIGNING_PUBKEY_NULL (buf_out ); \
|
||||
_08_01_ENCODE_ACCOUNT_SRC (buf_out, acc ); \
|
||||
uint32_t remaining_size = (maxlen) - (buf_out - (buf_out_master)); \
|
||||
int64_t edlen = etxn_details((uint32_t)buf_out, remaining_size); \
|
||||
buf_out += edlen; \
|
||||
*buf_out++ = 0xFBU; /* hook array start */ \
|
||||
_0E_0E_ENCODE_HOOKOBJ (buf_out, h[0]); \
|
||||
_0E_0E_ENCODE_HOOKOBJ (buf_out, h[1]); \
|
||||
_0E_0E_ENCODE_HOOKOBJ (buf_out, h[2]); \
|
||||
_0E_0E_ENCODE_HOOKOBJ (buf_out, h[3]); \
|
||||
_0E_0E_ENCODE_HOOKOBJ (buf_out, h[4]); \
|
||||
_0E_0E_ENCODE_HOOKOBJ (buf_out, h[5]); \
|
||||
_0E_0E_ENCODE_HOOKOBJ (buf_out, h[6]); \
|
||||
_0E_0E_ENCODE_HOOKOBJ (buf_out, h[7]); \
|
||||
_0E_0E_ENCODE_HOOKOBJ (buf_out, h[8]); \
|
||||
_0E_0E_ENCODE_HOOKOBJ (buf_out, h[9]); \
|
||||
*buf_out++ = 0xF1U; /* hook array end */ \
|
||||
sizeout = (buf_out - (buf_out_master)); \
|
||||
int64_t fee = etxn_fee_base(buf_out_master, sizeout); \
|
||||
_06_08_ENCODE_DROPS_FEE (fee_ptr, fee ); \
|
||||
}
|
||||
|
||||
#ifdef HAS_CALLBACK
|
||||
#define PREPARE_PAYMENT_SIMPLE_SIZE 270U
|
||||
#else
|
||||
#define PREPARE_PAYMENT_SIMPLE_SIZE 248U
|
||||
#endif
|
||||
|
||||
#define PREPARE_PAYMENT_SIMPLE(buf_out_master, drops_amount_raw, to_address, dest_tag_raw, src_tag_raw)\
|
||||
{\
|
||||
uint8_t* buf_out = buf_out_master;\
|
||||
uint8_t acc[20];\
|
||||
uint64_t drops_amount = (drops_amount_raw);\
|
||||
uint32_t dest_tag = (dest_tag_raw);\
|
||||
uint32_t src_tag = (src_tag_raw);\
|
||||
uint32_t cls = (uint32_t)ledger_seq();\
|
||||
hook_account(SBUF(acc));\
|
||||
_01_02_ENCODE_TT (buf_out, ttPAYMENT ); /* uint16 | size 3 */ \
|
||||
_02_02_ENCODE_FLAGS (buf_out, tfCANONICAL ); /* uint32 | size 5 */ \
|
||||
_02_03_ENCODE_TAG_SRC (buf_out, src_tag ); /* uint32 | size 5 */ \
|
||||
_02_04_ENCODE_SEQUENCE (buf_out, 0 ); /* uint32 | size 5 */ \
|
||||
_02_14_ENCODE_TAG_DST (buf_out, dest_tag ); /* uint32 | size 5 */ \
|
||||
_02_26_ENCODE_FLS (buf_out, cls + 1 ); /* uint32 | size 6 */ \
|
||||
_02_27_ENCODE_LLS (buf_out, cls + 5 ); /* uint32 | size 6 */ \
|
||||
_06_01_ENCODE_DROPS_AMOUNT (buf_out, drops_amount ); /* amount | size 9 */ \
|
||||
uint8_t* fee_ptr = buf_out;\
|
||||
_06_08_ENCODE_DROPS_FEE (buf_out, 0 ); /* amount | size 9 */ \
|
||||
_07_03_ENCODE_SIGNING_PUBKEY_NULL (buf_out ); /* pk | size 35 */ \
|
||||
_08_01_ENCODE_ACCOUNT_SRC (buf_out, acc ); /* account | size 22 */ \
|
||||
_08_03_ENCODE_ACCOUNT_DST (buf_out, to_address ); /* account | size 22 */ \
|
||||
int64_t edlen = etxn_details((uint32_t)buf_out, PREPARE_PAYMENT_SIMPLE_SIZE); /* emitdet | size 1?? */ \
|
||||
int64_t fee = etxn_fee_base(buf_out_master, PREPARE_PAYMENT_SIMPLE_SIZE); \
|
||||
_06_08_ENCODE_DROPS_FEE (fee_ptr, fee ); \
|
||||
}
|
||||
|
||||
#ifdef HAS_CALLBACK
|
||||
#define PREPARE_PAYMENT_SIMPLE_TRUSTLINE_SIZE 309
|
||||
#else
|
||||
#define PREPARE_PAYMENT_SIMPLE_TRUSTLINE_SIZE 287
|
||||
#endif
|
||||
#define PREPARE_PAYMENT_SIMPLE_TRUSTLINE(buf_out_master, tlamt, to_address, dest_tag_raw, src_tag_raw)\
|
||||
{\
|
||||
uint8_t* buf_out = buf_out_master;\
|
||||
uint8_t acc[20];\
|
||||
uint32_t dest_tag = (dest_tag_raw);\
|
||||
uint32_t src_tag = (src_tag_raw);\
|
||||
uint32_t cls = (uint32_t)ledger_seq();\
|
||||
hook_account(SBUF(acc));\
|
||||
_01_02_ENCODE_TT (buf_out, ttPAYMENT ); /* uint16 | size 3 */ \
|
||||
_02_02_ENCODE_FLAGS (buf_out, tfCANONICAL ); /* uint32 | size 5 */ \
|
||||
_02_03_ENCODE_TAG_SRC (buf_out, src_tag ); /* uint32 | size 5 */ \
|
||||
_02_04_ENCODE_SEQUENCE (buf_out, 0 ); /* uint32 | size 5 */ \
|
||||
_02_14_ENCODE_TAG_DST (buf_out, dest_tag ); /* uint32 | size 5 */ \
|
||||
_02_26_ENCODE_FLS (buf_out, cls + 1 ); /* uint32 | size 6 */ \
|
||||
_02_27_ENCODE_LLS (buf_out, cls + 5 ); /* uint32 | size 6 */ \
|
||||
_06_01_ENCODE_TL_AMOUNT (buf_out, tlamt ); /* amount | size 48 */ \
|
||||
uint8_t* fee_ptr = buf_out;\
|
||||
_06_08_ENCODE_DROPS_FEE (buf_out, 0 ); /* amount | size 9 */ \
|
||||
_07_03_ENCODE_SIGNING_PUBKEY_NULL (buf_out ); /* pk | size 35 */ \
|
||||
_08_01_ENCODE_ACCOUNT_SRC (buf_out, acc ); /* account | size 22 */ \
|
||||
_08_03_ENCODE_ACCOUNT_DST (buf_out, to_address ); /* account | size 22 */ \
|
||||
etxn_details((uint32_t)buf_out, PREPARE_PAYMENT_SIMPLE_TRUSTLINE_SIZE); /* emitdet | size 1?? */ \
|
||||
int64_t fee = etxn_fee_base(buf_out_master, PREPARE_PAYMENT_SIMPLE_TRUSTLINE_SIZE); \
|
||||
_06_08_ENCODE_DROPS_FEE (fee_ptr, fee ); \
|
||||
}
|
||||
|
||||
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
|
||||
@@ -60,7 +60,10 @@
|
||||
#define sfBurnedNFTokens ((2U << 16U) + 44U)
|
||||
#define sfHookStateCount ((2U << 16U) + 45U)
|
||||
#define sfEmitGeneration ((2U << 16U) + 46U)
|
||||
#define sfLockCount ((2U << 16U) + 47U)
|
||||
#define sfLockCount ((2U << 16U) + 49U)
|
||||
#define sfFirstNFTokenSequence ((2U << 16U) + 50U)
|
||||
#define sfXahauActivationLgrSeq ((2U << 16U) + 96U)
|
||||
#define sfImportSequence ((2U << 16U) + 97U)
|
||||
#define sfRewardTime ((2U << 16U) + 98U)
|
||||
#define sfRewardLgrFirst ((2U << 16U) + 99U)
|
||||
#define sfRewardLgrLast ((2U << 16U) + 100U)
|
||||
@@ -80,12 +83,15 @@
|
||||
#define sfHookInstructionCount ((3U << 16U) + 17U)
|
||||
#define sfHookReturnCode ((3U << 16U) + 18U)
|
||||
#define sfReferenceCount ((3U << 16U) + 19U)
|
||||
#define sfTouchCount ((3U << 16U) + 97U)
|
||||
#define sfAccountIndex ((3U << 16U) + 98U)
|
||||
#define sfAccountCount ((3U << 16U) + 99U)
|
||||
#define sfRewardAccumulator ((3U << 16U) + 100U)
|
||||
#define sfEmailHash ((4U << 16U) + 1U)
|
||||
#define sfTakerPaysCurrency ((10U << 16U) + 1U)
|
||||
#define sfTakerPaysIssuer ((10U << 16U) + 2U)
|
||||
#define sfTakerGetsCurrency ((10U << 16U) + 3U)
|
||||
#define sfTakerGetsIssuer ((10U << 16U) + 4U)
|
||||
#define sfTakerPaysCurrency ((17U << 16U) + 1U)
|
||||
#define sfTakerPaysIssuer ((17U << 16U) + 2U)
|
||||
#define sfTakerGetsCurrency ((17U << 16U) + 3U)
|
||||
#define sfTakerGetsIssuer ((17U << 16U) + 4U)
|
||||
#define sfLedgerHash ((5U << 16U) + 1U)
|
||||
#define sfParentHash ((5U << 16U) + 2U)
|
||||
#define sfTransactionHash ((5U << 16U) + 3U)
|
||||
@@ -120,6 +126,9 @@
|
||||
#define sfOfferID ((5U << 16U) + 34U)
|
||||
#define sfEscrowID ((5U << 16U) + 35U)
|
||||
#define sfURITokenID ((5U << 16U) + 36U)
|
||||
#define sfGovernanceFlags ((5U << 16U) + 99U)
|
||||
#define sfGovernanceMarks ((5U << 16U) + 98U)
|
||||
#define sfEmittedTxnID ((5U << 16U) + 97U)
|
||||
#define sfAmount ((6U << 16U) + 1U)
|
||||
#define sfBalance ((6U << 16U) + 2U)
|
||||
#define sfLimitAmount ((6U << 16U) + 3U)
|
||||
@@ -136,6 +145,9 @@
|
||||
#define sfNFTokenBrokerFee ((6U << 16U) + 19U)
|
||||
#define sfHookCallbackFee ((6U << 16U) + 20U)
|
||||
#define sfLockedBalance ((6U << 16U) + 21U)
|
||||
#define sfBaseFeeDrops ((6U << 16U) + 22U)
|
||||
#define sfReserveBaseDrops ((6U << 16U) + 23U)
|
||||
#define sfReserveIncrementDrops ((6U << 16U) + 24U)
|
||||
#define sfPublicKey ((7U << 16U) + 1U)
|
||||
#define sfMessageKey ((7U << 16U) + 2U)
|
||||
#define sfSigningPubKey ((7U << 16U) + 3U)
|
||||
@@ -171,11 +183,13 @@
|
||||
#define sfNFTokenMinter ((8U << 16U) + 9U)
|
||||
#define sfEmitCallback ((8U << 16U) + 10U)
|
||||
#define sfHookAccount ((8U << 16U) + 16U)
|
||||
#define sfInform ((8U << 16U) + 99U)
|
||||
#define sfIndexes ((19U << 16U) + 1U)
|
||||
#define sfHashes ((19U << 16U) + 2U)
|
||||
#define sfAmendments ((19U << 16U) + 3U)
|
||||
#define sfNFTokenOffers ((19U << 16U) + 4U)
|
||||
#define sfHookNamespaces ((19U << 16U) + 5U)
|
||||
#define sfURITokenIDs ((19U << 16U) + 99U)
|
||||
#define sfPaths ((18U << 16U) + 1U)
|
||||
#define sfTransactionMetaData ((14U << 16U) + 2U)
|
||||
#define sfCreatedNode ((14U << 16U) + 3U)
|
||||
@@ -198,6 +212,12 @@
|
||||
#define sfHookDefinition ((14U << 16U) + 22U)
|
||||
#define sfHookParameter ((14U << 16U) + 23U)
|
||||
#define sfHookGrant ((14U << 16U) + 24U)
|
||||
#define sfGenesisMint ((14U << 16U) + 96U)
|
||||
#define sfActiveValidator ((14U << 16U) + 95U)
|
||||
#define sfImportVLKey ((14U << 16U) + 94U)
|
||||
#define sfHookEmission ((14U << 16U) + 93U)
|
||||
#define sfMintURIToken ((14U << 16U) + 92U)
|
||||
#define sfAmountEntry ((14U << 16U) + 91U)
|
||||
#define sfSigners ((15U << 16U) + 3U)
|
||||
#define sfSignerEntries ((15U << 16U) + 4U)
|
||||
#define sfTemplate ((15U << 16U) + 5U)
|
||||
@@ -212,4 +232,8 @@
|
||||
#define sfHookExecutions ((15U << 16U) + 18U)
|
||||
#define sfHookParameters ((15U << 16U) + 19U)
|
||||
#define sfHookGrants ((15U << 16U) + 20U)
|
||||
#define sfGenesisMints ((15U << 16U) + 96U)
|
||||
#define sfActiveValidators ((15U << 16U) + 95U)
|
||||
#define sfImportVLKeys ((15U << 16U) + 94U)
|
||||
#define sfHookEmissions ((15U << 16U) + 93U)
|
||||
#define sfAmounts ((15U << 16U) + 92U)
|
||||
@@ -31,6 +31,7 @@
|
||||
#define ttURITOKEN_BUY 47
|
||||
#define ttURITOKEN_CREATE_SELL_OFFER 48
|
||||
#define ttURITOKEN_CANCEL_SELL_OFFER 49
|
||||
#define ttREMIT 95
|
||||
#define ttGENESIS_MINT 96
|
||||
#define ttIMPORT 97
|
||||
#define ttCLAIM_REWARD 98
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,9 @@
|
||||
#!/bin/bash
|
||||
#!/bin/bash -u
|
||||
# We use set -e and bash with -u to bail on first non zero exit code of any
|
||||
# processes launched or upon any unbound variable.
|
||||
# We use set -x to print commands before running them to help with
|
||||
# debugging.
|
||||
set -ex
|
||||
|
||||
echo "START BUILDING (HOST)"
|
||||
|
||||
@@ -12,10 +17,18 @@ if [[ "$GITHUB_REPOSITORY" == "" ]]; then
|
||||
BUILD_CORES=8
|
||||
fi
|
||||
|
||||
# Ensure still works outside of GH Actions by setting these to /dev/null
|
||||
# GA will run this script and then delete it at the end of the job
|
||||
JOB_CLEANUP_SCRIPT=${JOB_CLEANUP_SCRIPT:-/dev/null}
|
||||
NORMALIZED_WORKFLOW=$(echo "$GITHUB_WORKFLOW" | tr -c 'a-zA-Z0-9' '-')
|
||||
NORMALIZED_REF=$(echo "$GITHUB_REF" | tr -c 'a-zA-Z0-9' '-')
|
||||
CONTAINER_NAME="xahaud_cached_builder_${NORMALIZED_WORKFLOW}-${NORMALIZED_REF}"
|
||||
|
||||
echo "-- BUILD CORES: $BUILD_CORES"
|
||||
echo "-- GITHUB_REPOSITORY: $GITHUB_REPOSITORY"
|
||||
echo "-- GITHUB_SHA: $GITHUB_SHA"
|
||||
echo "-- GITHUB_RUN_NUMBER: $GITHUB_RUN_NUMBER"
|
||||
echo "-- CONTAINER_NAME: $CONTAINER_NAME"
|
||||
|
||||
which docker 2> /dev/null 2> /dev/null
|
||||
if [ "$?" -eq "1" ]
|
||||
@@ -31,13 +44,14 @@ then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
STATIC_CONTAINER=$(docker ps -a | grep xahaud_cached_builder |wc -l)
|
||||
STATIC_CONTAINER=$(docker ps -a | grep $CONTAINER_NAME |wc -l)
|
||||
|
||||
if [[ "$STATIC_CONTAINER" -gt "0" && "$GITHUB_REPOSITORY" != "" ]]; then
|
||||
# if [[ "$STATIC_CONTAINER" -gt "0" && "$GITHUB_REPOSITORY" != "" ]]; then
|
||||
if false; then
|
||||
echo "Static container, execute in static container to have max. cache"
|
||||
docker start xahaud_cached_builder
|
||||
docker exec -i xahaud_cached_builder /hbb_exe/activate-exec bash -x /io/build-core.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
|
||||
docker stop xahaud_cached_builder
|
||||
docker start $CONTAINER_NAME
|
||||
docker exec -i $CONTAINER_NAME /hbb_exe/activate-exec bash -x /io/build-core.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
|
||||
docker stop $CONTAINER_NAME
|
||||
else
|
||||
echo "No static container, build on temp container"
|
||||
rm -rf release-build;
|
||||
@@ -50,10 +64,12 @@ else
|
||||
else
|
||||
# GH Action, runner
|
||||
echo "GH Action, runner, clean & re-create create persistent container"
|
||||
docker rm -f xahaud_cached_builder
|
||||
docker run -di --user 0:$(id -g) --name xahaud_cached_builder -v /data/builds:/data/builds -v `pwd`:/io --network host ghcr.io/foobarwidget/holy-build-box-x64 /hbb_exe/activate-exec bash
|
||||
docker exec -i xahaud_cached_builder /hbb_exe/activate-exec bash -x /io/build-full.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
|
||||
docker stop xahaud_cached_builder
|
||||
docker rm -f $CONTAINER_NAME
|
||||
echo "echo 'Stopping container: $CONTAINER_NAME'" >> "$JOB_CLEANUP_SCRIPT"
|
||||
echo "docker stop --time=15 \"$CONTAINER_NAME\" || echo 'Failed to stop container or container not running'" >> "$JOB_CLEANUP_SCRIPT"
|
||||
docker run -di --user 0:$(id -g) --name $CONTAINER_NAME -v /data/builds:/data/builds -v `pwd`:/io --network host ghcr.io/foobarwidget/holy-build-box-x64 /hbb_exe/activate-exec bash
|
||||
docker exec -i $CONTAINER_NAME /hbb_exe/activate-exec bash -x /io/build-full.sh "$GITHUB_REPOSITORY" "$GITHUB_SHA" "$BUILD_CORES" "$GITHUB_RUN_NUMBER"
|
||||
docker stop $CONTAINER_NAME
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
48
src/ed25519-donna/CMakeLists.txt
Normal file
48
src/ed25519-donna/CMakeLists.txt
Normal file
@@ -0,0 +1,48 @@
|
||||
cmake_minimum_required(VERSION 3.11)
|
||||
|
||||
project(ed25519
|
||||
LANGUAGES C
|
||||
)
|
||||
|
||||
if(PROJECT_NAME STREQUAL CMAKE_PROJECT_NAME)
|
||||
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}/output/$<CONFIG>/lib")
|
||||
endif()
|
||||
|
||||
if(NOT TARGET OpenSSL::SSL)
|
||||
find_package(OpenSSL)
|
||||
endif()
|
||||
|
||||
add_library(ed25519 STATIC
|
||||
ed25519.c
|
||||
)
|
||||
add_library(ed25519::ed25519 ALIAS ed25519)
|
||||
target_link_libraries(ed25519 PUBLIC OpenSSL::SSL)
|
||||
|
||||
include(GNUInstallDirs)
|
||||
|
||||
#[=========================================================[
|
||||
NOTE for macos:
|
||||
https://github.com/floodyberry/ed25519-donna/issues/29
|
||||
our source for ed25519-donna-portable.h has been
|
||||
patched to workaround this.
|
||||
#]=========================================================]
|
||||
target_include_directories(ed25519 PUBLIC
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>
|
||||
)
|
||||
|
||||
install(
|
||||
TARGETS ed25519
|
||||
EXPORT ${PROJECT_NAME}-exports
|
||||
ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}"
|
||||
)
|
||||
install(
|
||||
EXPORT ${PROJECT_NAME}-exports
|
||||
DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}"
|
||||
FILE ${PROJECT_NAME}-targets.cmake
|
||||
NAMESPACE ${PROJECT_NAME}::
|
||||
)
|
||||
install(
|
||||
FILES ed25519.h
|
||||
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}"
|
||||
)
|
||||
@@ -5,11 +5,11 @@
|
||||
// | | | | (_| | (_| | | (__ | |____| | | | |_| | | | | | | | |____|_| |_|
|
||||
// |_| |_|\__,_|\__, |_|\___| |______|_| |_|\__,_|_| |_| |_| \_____|
|
||||
// __/ | https://github.com/Neargye/magic_enum
|
||||
// |___/ version 0.9.3
|
||||
// |___/ version 0.9.5
|
||||
//
|
||||
// Licensed under the MIT License <http://opensource.org/licenses/MIT>.
|
||||
// SPDX-License-Identifier: MIT
|
||||
// Copyright (c) 2019 - 2023 Daniil Goncharov <neargye@gmail.com>.
|
||||
// Copyright (c) 2019 - 2024 Daniil Goncharov <neargye@gmail.com>.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
@@ -34,7 +34,7 @@
|
||||
|
||||
#define MAGIC_ENUM_VERSION_MAJOR 0
|
||||
#define MAGIC_ENUM_VERSION_MINOR 9
|
||||
#define MAGIC_ENUM_VERSION_PATCH 3
|
||||
#define MAGIC_ENUM_VERSION_PATCH 5
|
||||
|
||||
#include <array>
|
||||
#include <cstddef>
|
||||
@@ -60,7 +60,7 @@
|
||||
|
||||
#if defined(MAGIC_ENUM_NO_ASSERT)
|
||||
# define MAGIC_ENUM_ASSERT(...) static_cast<void>(0)
|
||||
#else
|
||||
#elif !defined(MAGIC_ENUM_ASSERT)
|
||||
# include <cassert>
|
||||
# define MAGIC_ENUM_ASSERT(...) assert((__VA_ARGS__))
|
||||
#endif
|
||||
@@ -69,9 +69,11 @@
|
||||
# pragma clang diagnostic push
|
||||
# pragma clang diagnostic ignored "-Wunknown-warning-option"
|
||||
# pragma clang diagnostic ignored "-Wenum-constexpr-conversion"
|
||||
# pragma clang diagnostic ignored "-Wuseless-cast" // suppresses 'static_cast<char_type>('\0')' for char_type = char (common on Linux).
|
||||
#elif defined(__GNUC__)
|
||||
# pragma GCC diagnostic push
|
||||
# pragma GCC diagnostic ignored "-Wmaybe-uninitialized" // May be used uninitialized 'return {};'.
|
||||
# pragma GCC diagnostic ignored "-Wuseless-cast" // suppresses 'static_cast<char_type>('\0')' for char_type = char (common on Linux).
|
||||
#elif defined(_MSC_VER)
|
||||
# pragma warning(push)
|
||||
# pragma warning(disable : 26495) // Variable 'static_str<N>::chars_' is uninitialized.
|
||||
@@ -164,14 +166,11 @@ namespace customize {
|
||||
// If need another range for specific enum type, add specialization enum_range for necessary enum type.
|
||||
template <typename E>
|
||||
struct enum_range {
|
||||
static_assert(std::is_enum_v<E>, "magic_enum::customize::enum_range requires enum type.");
|
||||
static constexpr int min = MAGIC_ENUM_RANGE_MIN;
|
||||
static constexpr int max = MAGIC_ENUM_RANGE_MAX;
|
||||
static_assert(max > min, "magic_enum::customize::enum_range requires max > min.");
|
||||
};
|
||||
|
||||
static_assert(MAGIC_ENUM_RANGE_MAX > MAGIC_ENUM_RANGE_MIN, "MAGIC_ENUM_RANGE_MAX must be greater than MAGIC_ENUM_RANGE_MIN.");
|
||||
static_assert((MAGIC_ENUM_RANGE_MAX - MAGIC_ENUM_RANGE_MIN) < (std::numeric_limits<std::uint16_t>::max)(), "MAGIC_ENUM_RANGE must be less than UINT16_MAX.");
|
||||
|
||||
namespace detail {
|
||||
|
||||
@@ -216,9 +215,9 @@ namespace detail {
|
||||
template <typename T>
|
||||
struct supported
|
||||
#if defined(MAGIC_ENUM_SUPPORTED) && MAGIC_ENUM_SUPPORTED || defined(MAGIC_ENUM_NO_CHECK_SUPPORT)
|
||||
: std::true_type {};
|
||||
: std::true_type {};
|
||||
#else
|
||||
: std::false_type {};
|
||||
: std::false_type {};
|
||||
#endif
|
||||
|
||||
template <auto V, typename E = std::decay_t<decltype(V)>, std::enable_if_t<std::is_enum_v<E>, int> = 0>
|
||||
@@ -423,10 +422,20 @@ constexpr auto n() noexcept {
|
||||
constexpr auto name_ptr = MAGIC_ENUM_GET_TYPE_NAME_BUILTIN(E);
|
||||
constexpr auto name = name_ptr ? str_view{name_ptr, std::char_traits<char>::length(name_ptr)} : str_view{};
|
||||
#elif defined(__clang__)
|
||||
auto name = str_view{__PRETTY_FUNCTION__ + 34, sizeof(__PRETTY_FUNCTION__) - 36};
|
||||
str_view name;
|
||||
if constexpr (sizeof(__PRETTY_FUNCTION__) == sizeof(__FUNCTION__)) {
|
||||
static_assert(always_false_v<E>, "magic_enum::detail::n requires __PRETTY_FUNCTION__.");
|
||||
return str_view{};
|
||||
} else {
|
||||
name.size_ = sizeof(__PRETTY_FUNCTION__) - 36;
|
||||
name.str_ = __PRETTY_FUNCTION__ + 34;
|
||||
}
|
||||
#elif defined(__GNUC__)
|
||||
auto name = str_view{__PRETTY_FUNCTION__, sizeof(__PRETTY_FUNCTION__) - 1};
|
||||
if (name.str_[name.size_ - 1] == ']') {
|
||||
if constexpr (sizeof(__PRETTY_FUNCTION__) == sizeof(__FUNCTION__)) {
|
||||
static_assert(always_false_v<E>, "magic_enum::detail::n requires __PRETTY_FUNCTION__.");
|
||||
return str_view{};
|
||||
} else if (name.str_[name.size_ - 1] == ']') {
|
||||
name.size_ -= 50;
|
||||
name.str_ += 49;
|
||||
} else {
|
||||
@@ -489,7 +498,14 @@ constexpr auto n() noexcept {
|
||||
constexpr auto name_ptr = MAGIC_ENUM_GET_ENUM_NAME_BUILTIN(V);
|
||||
auto name = name_ptr ? str_view{name_ptr, std::char_traits<char>::length(name_ptr)} : str_view{};
|
||||
#elif defined(__clang__)
|
||||
auto name = str_view{__PRETTY_FUNCTION__ + 34, sizeof(__PRETTY_FUNCTION__) - 36};
|
||||
str_view name;
|
||||
if constexpr (sizeof(__PRETTY_FUNCTION__) == sizeof(__FUNCTION__)) {
|
||||
static_assert(always_false_v<decltype(V)>, "magic_enum::detail::n requires __PRETTY_FUNCTION__.");
|
||||
return str_view{};
|
||||
} else {
|
||||
name.size_ = sizeof(__PRETTY_FUNCTION__) - 36;
|
||||
name.str_ = __PRETTY_FUNCTION__ + 34;
|
||||
}
|
||||
if (name.size_ > 22 && name.str_[0] == '(' && name.str_[1] == 'a' && name.str_[10] == ' ' && name.str_[22] == ':') {
|
||||
name.size_ -= 23;
|
||||
name.str_ += 23;
|
||||
@@ -499,7 +515,10 @@ constexpr auto n() noexcept {
|
||||
}
|
||||
#elif defined(__GNUC__)
|
||||
auto name = str_view{__PRETTY_FUNCTION__, sizeof(__PRETTY_FUNCTION__) - 1};
|
||||
if (name.str_[name.size_ - 1] == ']') {
|
||||
if constexpr (sizeof(__PRETTY_FUNCTION__) == sizeof(__FUNCTION__)) {
|
||||
static_assert(always_false_v<decltype(V)>, "magic_enum::detail::n requires __PRETTY_FUNCTION__.");
|
||||
return str_view{};
|
||||
} else if (name.str_[name.size_ - 1] == ']') {
|
||||
name.size_ -= 55;
|
||||
name.str_ += 54;
|
||||
} else {
|
||||
@@ -698,7 +717,7 @@ constexpr void valid_count(bool* valid, std::size_t& count) noexcept {
|
||||
} \
|
||||
}
|
||||
|
||||
MAGIC_ENUM_FOR_EACH_256(MAGIC_ENUM_V);
|
||||
MAGIC_ENUM_FOR_EACH_256(MAGIC_ENUM_V)
|
||||
|
||||
if constexpr ((I + 256) < Size) {
|
||||
valid_count<E, S, Size, Min, I + 256>(valid, count);
|
||||
@@ -750,7 +769,6 @@ constexpr auto values() noexcept {
|
||||
constexpr auto max = reflected_max<E, S>();
|
||||
constexpr auto range_size = max - min + 1;
|
||||
static_assert(range_size > 0, "magic_enum::enum_range requires valid size.");
|
||||
static_assert(range_size < (std::numeric_limits<std::uint16_t>::max)(), "magic_enum::enum_range requires valid size.");
|
||||
|
||||
return values<E, S, range_size, min>();
|
||||
}
|
||||
@@ -807,7 +825,8 @@ inline constexpr auto max_v = (count_v<E, S> > 0) ? static_cast<U>(values_v<E, S
|
||||
|
||||
template <typename E, enum_subtype S, std::size_t... I>
|
||||
constexpr auto names(std::index_sequence<I...>) noexcept {
|
||||
return std::array<string_view, sizeof...(I)>{{enum_name_v<E, values_v<E, S>[I]>...}};
|
||||
constexpr auto names = std::array<string_view, sizeof...(I)>{{enum_name_v<E, values_v<E, S>[I]>...}};
|
||||
return names;
|
||||
}
|
||||
|
||||
template <typename E, enum_subtype S>
|
||||
@@ -818,7 +837,8 @@ using names_t = decltype((names_v<D, S>));
|
||||
|
||||
template <typename E, enum_subtype S, std::size_t... I>
|
||||
constexpr auto entries(std::index_sequence<I...>) noexcept {
|
||||
return std::array<std::pair<E, string_view>, sizeof...(I)>{{{values_v<E, S>[I], enum_name_v<E, values_v<E, S>[I]>}...}};
|
||||
constexpr auto entries = std::array<std::pair<E, string_view>, sizeof...(I)>{{{values_v<E, S>[I], enum_name_v<E, values_v<E, S>[I]>}...}};
|
||||
return entries;
|
||||
}
|
||||
|
||||
template <typename E, enum_subtype S>
|
||||
@@ -845,17 +865,16 @@ constexpr bool is_sparse() noexcept {
|
||||
template <typename E, enum_subtype S = subtype_v<E>>
|
||||
inline constexpr bool is_sparse_v = is_sparse<E, S>();
|
||||
|
||||
template <typename E, enum_subtype S, typename U = std::underlying_type_t<E>>
|
||||
constexpr U values_ors() noexcept {
|
||||
static_assert(S == enum_subtype::flags, "magic_enum::detail::values_ors requires valid subtype.");
|
||||
template <typename E, enum_subtype S>
|
||||
struct is_reflected
|
||||
#if defined(MAGIC_ENUM_NO_CHECK_REFLECTED_ENUM)
|
||||
: std::true_type {};
|
||||
#else
|
||||
: std::bool_constant<std::is_enum_v<E> && (count_v<E, S> != 0)> {};
|
||||
#endif
|
||||
|
||||
auto ors = U{0};
|
||||
for (std::size_t i = 0; i < count_v<E, S>; ++i) {
|
||||
ors |= static_cast<U>(values_v<E, S>[i]);
|
||||
}
|
||||
|
||||
return ors;
|
||||
}
|
||||
template <typename E, enum_subtype S>
|
||||
inline constexpr bool is_reflected_v = is_reflected<std::decay_t<E>, S>{};
|
||||
|
||||
template <bool, typename R>
|
||||
struct enable_if_enum {};
|
||||
@@ -1156,6 +1175,7 @@ template <typename E, detail::enum_subtype S = detail::subtype_v<E>>
|
||||
template <typename E, detail::enum_subtype S = detail::subtype_v<E>>
|
||||
[[nodiscard]] constexpr auto enum_value(std::size_t index) noexcept -> detail::enable_if_t<E, std::decay_t<E>> {
|
||||
using D = std::decay_t<E>;
|
||||
static_assert(detail::is_reflected_v<D, S>, "magic_enum requires enum implementation and valid max and min.");
|
||||
|
||||
if constexpr (detail::is_sparse_v<D, S>) {
|
||||
return MAGIC_ENUM_ASSERT(index < detail::count_v<D, S>), detail::values_v<D, S>[index];
|
||||
@@ -1170,6 +1190,7 @@ template <typename E, detail::enum_subtype S = detail::subtype_v<E>>
|
||||
template <typename E, std::size_t I, detail::enum_subtype S = detail::subtype_v<E>>
|
||||
[[nodiscard]] constexpr auto enum_value() noexcept -> detail::enable_if_t<E, std::decay_t<E>> {
|
||||
using D = std::decay_t<E>;
|
||||
static_assert(detail::is_reflected_v<D, S>, "magic_enum requires enum implementation and valid max and min.");
|
||||
static_assert(I < detail::count_v<D, S>, "magic_enum::enum_value out of range.");
|
||||
|
||||
return enum_value<D, S>(I);
|
||||
@@ -1178,7 +1199,10 @@ template <typename E, std::size_t I, detail::enum_subtype S = detail::subtype_v<
|
||||
// Returns std::array with enum values, sorted by enum value.
|
||||
template <typename E, detail::enum_subtype S = detail::subtype_v<E>>
|
||||
[[nodiscard]] constexpr auto enum_values() noexcept -> detail::enable_if_t<E, detail::values_t<E, S>> {
|
||||
return detail::values_v<std::decay_t<E>, S>;
|
||||
using D = std::decay_t<E>;
|
||||
static_assert(detail::is_reflected_v<D, S>, "magic_enum requires enum implementation and valid max and min.");
|
||||
|
||||
return detail::values_v<D, S>;
|
||||
}
|
||||
|
||||
// Returns integer value from enum value.
|
||||
@@ -1199,11 +1223,9 @@ template <typename E, detail::enum_subtype S = detail::subtype_v<E>>
|
||||
[[nodiscard]] constexpr auto enum_index(E value) noexcept -> detail::enable_if_t<E, optional<std::size_t>> {
|
||||
using D = std::decay_t<E>;
|
||||
using U = underlying_type_t<D>;
|
||||
static_assert(detail::is_reflected_v<D, S>, "magic_enum requires enum implementation and valid max and min.");
|
||||
|
||||
if constexpr (detail::count_v<D, S> == 0) {
|
||||
static_cast<void>(value);
|
||||
return {}; // Empty enum.
|
||||
} else if constexpr (detail::is_sparse_v<D, S> || (S == detail::enum_subtype::flags)) {
|
||||
if constexpr (detail::is_sparse_v<D, S> || (S == detail::enum_subtype::flags)) {
|
||||
#if defined(MAGIC_ENUM_ENABLE_HASH)
|
||||
return detail::constexpr_switch<&detail::values_v<D, S>, detail::case_call_t::index>(
|
||||
[](std::size_t i) { return optional<std::size_t>{i}; },
|
||||
@@ -1231,14 +1253,17 @@ template <typename E, detail::enum_subtype S = detail::subtype_v<E>>
|
||||
template <detail::enum_subtype S, typename E>
|
||||
[[nodiscard]] constexpr auto enum_index(E value) noexcept -> detail::enable_if_t<E, optional<std::size_t>> {
|
||||
using D = std::decay_t<E>;
|
||||
static_assert(detail::is_reflected_v<D, S>, "magic_enum requires enum implementation and valid max and min.");
|
||||
|
||||
return enum_index<D, S>(value);
|
||||
}
|
||||
|
||||
// Obtains index in enum values from static storage enum variable.
|
||||
template <auto V, detail::enum_subtype S = detail::subtype_v<std::decay_t<decltype(V)>>>
|
||||
[[nodiscard]] constexpr auto enum_index() noexcept -> detail::enable_if_t<decltype(V), std::size_t> {
|
||||
constexpr auto index = enum_index<std::decay_t<decltype(V)>, S>(V);
|
||||
[[nodiscard]] constexpr auto enum_index() noexcept -> detail::enable_if_t<decltype(V), std::size_t> {\
|
||||
using D = std::decay_t<decltype(V)>;
|
||||
static_assert(detail::is_reflected_v<D, S>, "magic_enum requires enum implementation and valid max and min.");
|
||||
constexpr auto index = enum_index<D, S>(V);
|
||||
static_assert(index, "magic_enum::enum_index enum value does not have a index.");
|
||||
|
||||
return *index;
|
||||
@@ -1259,6 +1284,7 @@ template <auto V>
|
||||
template <typename E, detail::enum_subtype S = detail::subtype_v<E>>
|
||||
[[nodiscard]] constexpr auto enum_name(E value) noexcept -> detail::enable_if_t<E, string_view> {
|
||||
using D = std::decay_t<E>;
|
||||
static_assert(detail::is_reflected_v<D, S>, "magic_enum requires enum implementation and valid max and min.");
|
||||
|
||||
if (const auto i = enum_index<D, S>(value)) {
|
||||
return detail::names_v<D, S>[*i];
|
||||
@@ -1271,6 +1297,7 @@ template <typename E, detail::enum_subtype S = detail::subtype_v<E>>
|
||||
template <detail::enum_subtype S, typename E>
|
||||
[[nodiscard]] constexpr auto enum_name(E value) -> detail::enable_if_t<E, string_view> {
|
||||
using D = std::decay_t<E>;
|
||||
static_assert(detail::is_reflected_v<D, S>, "magic_enum requires enum implementation and valid max and min.");
|
||||
|
||||
return enum_name<D, S>(value);
|
||||
}
|
||||
@@ -1278,13 +1305,19 @@ template <detail::enum_subtype S, typename E>
|
||||
// Returns std::array with names, sorted by enum value.
|
||||
template <typename E, detail::enum_subtype S = detail::subtype_v<E>>
|
||||
[[nodiscard]] constexpr auto enum_names() noexcept -> detail::enable_if_t<E, detail::names_t<E, S>> {
|
||||
return detail::names_v<std::decay_t<E>, S>;
|
||||
using D = std::decay_t<E>;
|
||||
static_assert(detail::is_reflected_v<D, S>, "magic_enum requires enum implementation and valid max and min.");
|
||||
|
||||
return detail::names_v<D, S>;
|
||||
}
|
||||
|
||||
// Returns std::array with pairs (value, name), sorted by enum value.
|
||||
template <typename E, detail::enum_subtype S = detail::subtype_v<E>>
|
||||
[[nodiscard]] constexpr auto enum_entries() noexcept -> detail::enable_if_t<E, detail::entries_t<E, S>> {
|
||||
return detail::entries_v<std::decay_t<E>, S>;
|
||||
using D = std::decay_t<E>;
|
||||
static_assert(detail::is_reflected_v<D, S>, "magic_enum requires enum implementation and valid max and min.");
|
||||
|
||||
return detail::entries_v<D, S>;
|
||||
}
|
||||
|
||||
// Allows you to write magic_enum::enum_cast<foo>("bar", magic_enum::case_insensitive);
|
||||
@@ -1295,31 +1328,27 @@ inline constexpr auto case_insensitive = detail::case_insensitive<>{};
|
||||
template <typename E, detail::enum_subtype S = detail::subtype_v<E>>
|
||||
[[nodiscard]] constexpr auto enum_cast(underlying_type_t<E> value) noexcept -> detail::enable_if_t<E, optional<std::decay_t<E>>> {
|
||||
using D = std::decay_t<E>;
|
||||
static_assert(detail::is_reflected_v<D, S>, "magic_enum requires enum implementation and valid max and min.");
|
||||
|
||||
if constexpr (detail::count_v<D, S> == 0) {
|
||||
static_cast<void>(value);
|
||||
return {}; // Empty enum.
|
||||
} else {
|
||||
if constexpr (detail::is_sparse_v<D, S> || (S == detail::enum_subtype::flags)) {
|
||||
if constexpr (detail::is_sparse_v<D, S> || (S == detail::enum_subtype::flags)) {
|
||||
#if defined(MAGIC_ENUM_ENABLE_HASH)
|
||||
return detail::constexpr_switch<&detail::values_v<D, S>, detail::case_call_t::value>(
|
||||
[](D v) { return optional<D>{v}; },
|
||||
static_cast<D>(value),
|
||||
detail::default_result_type_lambda<optional<D>>);
|
||||
return detail::constexpr_switch<&detail::values_v<D, S>, detail::case_call_t::value>(
|
||||
[](D v) { return optional<D>{v}; },
|
||||
static_cast<D>(value),
|
||||
detail::default_result_type_lambda<optional<D>>);
|
||||
#else
|
||||
for (std::size_t i = 0; i < detail::count_v<D, S>; ++i) {
|
||||
if (value == static_cast<underlying_type_t<D>>(enum_value<D, S>(i))) {
|
||||
return static_cast<D>(value);
|
||||
}
|
||||
}
|
||||
return {}; // Invalid value or out of range.
|
||||
#endif
|
||||
} else {
|
||||
if (value >= detail::min_v<D, S> && value <= detail::max_v<D, S>) {
|
||||
for (std::size_t i = 0; i < detail::count_v<D, S>; ++i) {
|
||||
if (value == static_cast<underlying_type_t<D>>(enum_value<D, S>(i))) {
|
||||
return static_cast<D>(value);
|
||||
}
|
||||
return {}; // Invalid value or out of range.
|
||||
}
|
||||
return {}; // Invalid value or out of range.
|
||||
#endif
|
||||
} else {
|
||||
if (value >= detail::min_v<D, S> && value <= detail::max_v<D, S>) {
|
||||
return static_cast<D>(value);
|
||||
}
|
||||
return {}; // Invalid value or out of range.
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1328,26 +1357,23 @@ template <typename E, detail::enum_subtype S = detail::subtype_v<E>>
|
||||
template <typename E, detail::enum_subtype S = detail::subtype_v<E>, typename BinaryPredicate = std::equal_to<>>
|
||||
[[nodiscard]] constexpr auto enum_cast(string_view value, [[maybe_unused]] BinaryPredicate p = {}) noexcept(detail::is_nothrow_invocable<BinaryPredicate>()) -> detail::enable_if_t<E, optional<std::decay_t<E>>, BinaryPredicate> {
|
||||
using D = std::decay_t<E>;
|
||||
static_assert(detail::is_reflected_v<D, S>, "magic_enum requires enum implementation and valid max and min.");
|
||||
|
||||
if constexpr (detail::count_v<D, S> == 0) {
|
||||
static_cast<void>(value);
|
||||
return {}; // Empty enum.
|
||||
#if defined(MAGIC_ENUM_ENABLE_HASH)
|
||||
} else if constexpr (detail::is_default_predicate<BinaryPredicate>()) {
|
||||
return detail::constexpr_switch<&detail::names_v<D, S>, detail::case_call_t::index>(
|
||||
[](std::size_t i) { return optional<D>{detail::values_v<D, S>[i]}; },
|
||||
value,
|
||||
detail::default_result_type_lambda<optional<D>>,
|
||||
[&p](string_view lhs, string_view rhs) { return detail::cmp_equal(lhs, rhs, p); });
|
||||
#endif
|
||||
} else {
|
||||
for (std::size_t i = 0; i < detail::count_v<D, S>; ++i) {
|
||||
if (detail::cmp_equal(value, detail::names_v<D, S>[i], p)) {
|
||||
return enum_value<D, S>(i);
|
||||
}
|
||||
}
|
||||
return {}; // Invalid value or out of range.
|
||||
if constexpr (detail::is_default_predicate<BinaryPredicate>()) {
|
||||
return detail::constexpr_switch<&detail::names_v<D, S>, detail::case_call_t::index>(
|
||||
[](std::size_t i) { return optional<D>{detail::values_v<D, S>[i]}; },
|
||||
value,
|
||||
detail::default_result_type_lambda<optional<D>>,
|
||||
[&p](string_view lhs, string_view rhs) { return detail::cmp_equal(lhs, rhs, p); });
|
||||
}
|
||||
#endif
|
||||
for (std::size_t i = 0; i < detail::count_v<D, S>; ++i) {
|
||||
if (detail::cmp_equal(value, detail::names_v<D, S>[i], p)) {
|
||||
return enum_value<D, S>(i);
|
||||
}
|
||||
}
|
||||
return {}; // Invalid value or out of range.
|
||||
}
|
||||
|
||||
// Checks whether enum contains value with such value.
|
||||
|
||||
@@ -134,8 +134,12 @@ RCLConsensus::Adaptor::acquireLedger(LedgerHash const& hash)
|
||||
acquiringLedger_ = hash;
|
||||
|
||||
app_.getJobQueue().addJob(
|
||||
jtADVANCE, "getConsensusLedger", [id = hash, &app = app_]() {
|
||||
app.getInboundLedgers().acquire(
|
||||
jtADVANCE,
|
||||
"getConsensusLedger1",
|
||||
[id = hash, &app = app_, this]() {
|
||||
JLOG(j_.debug())
|
||||
<< "JOB advanceLedger getConsensusLedger1 started";
|
||||
app.getInboundLedgers().acquireAsync(
|
||||
id, 0, InboundLedger::Reason::CONSENSUS);
|
||||
});
|
||||
}
|
||||
@@ -182,7 +186,7 @@ RCLConsensus::Adaptor::share(RCLCxTx const& tx)
|
||||
if (app_.getHashRouter().shouldRelay(tx.id()))
|
||||
{
|
||||
JLOG(j_.debug()) << "Relaying disputed tx " << tx.id();
|
||||
auto const slice = tx.tx_.slice();
|
||||
auto const slice = tx.tx_->slice();
|
||||
protocol::TMTransaction msg;
|
||||
msg.set_rawtransaction(slice.data(), slice.size());
|
||||
msg.set_status(protocol::tsNEW);
|
||||
@@ -326,7 +330,7 @@ RCLConsensus::Adaptor::onClose(
|
||||
tx.first->add(s);
|
||||
initialSet->addItem(
|
||||
SHAMapNodeType::tnTRANSACTION_NM,
|
||||
SHAMapItem(tx.first->getTransactionID(), s.slice()));
|
||||
make_shamapitem(tx.first->getTransactionID(), s.slice()));
|
||||
}
|
||||
|
||||
// Add pseudo-transactions to the set
|
||||
@@ -370,7 +374,8 @@ RCLConsensus::Adaptor::onClose(
|
||||
RCLCensorshipDetector<TxID, LedgerIndex>::TxIDSeqVec proposed;
|
||||
|
||||
initialSet->visitLeaves(
|
||||
[&proposed, seq](std::shared_ptr<SHAMapItem const> const& item) {
|
||||
[&proposed,
|
||||
seq](boost::intrusive_ptr<SHAMapItem const> const& item) {
|
||||
proposed.emplace_back(item->key(), seq);
|
||||
});
|
||||
|
||||
@@ -493,15 +498,11 @@ RCLConsensus::Adaptor::doAccept(
|
||||
|
||||
for (auto const& item : *result.txns.map_)
|
||||
{
|
||||
#ifndef DEBUG
|
||||
try
|
||||
{
|
||||
#endif
|
||||
retriableTxs.insert(
|
||||
std::make_shared<STTx const>(SerialIter{item.slice()}));
|
||||
JLOG(j_.debug()) << " Tx: " << item.key();
|
||||
|
||||
#ifndef DEBUG
|
||||
}
|
||||
catch (std::exception const& ex)
|
||||
{
|
||||
@@ -509,7 +510,6 @@ RCLConsensus::Adaptor::doAccept(
|
||||
JLOG(j_.warn())
|
||||
<< " Tx: " << item.key() << " throws: " << ex.what();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
auto built = buildLCL(
|
||||
@@ -535,7 +535,7 @@ RCLConsensus::Adaptor::doAccept(
|
||||
std::vector<TxID> accepted;
|
||||
|
||||
result.txns.map_->visitLeaves(
|
||||
[&accepted](std::shared_ptr<SHAMapItem const> const& item) {
|
||||
[&accepted](boost::intrusive_ptr<SHAMapItem const> const& item) {
|
||||
accepted.push_back(item->key());
|
||||
});
|
||||
|
||||
@@ -610,7 +610,7 @@ RCLConsensus::Adaptor::doAccept(
|
||||
<< "Test applying disputed transaction that did"
|
||||
<< " not get in " << dispute.tx().id();
|
||||
|
||||
SerialIter sit(dispute.tx().tx_.slice());
|
||||
SerialIter sit(dispute.tx().tx_->slice());
|
||||
auto txn = std::make_shared<STTx const>(sit);
|
||||
|
||||
// Disputed pseudo-transactions that were not accepted
|
||||
|
||||
@@ -42,7 +42,7 @@ public:
|
||||
|
||||
@param txn The transaction to wrap
|
||||
*/
|
||||
RCLCxTx(SHAMapItem const& txn) : tx_{txn}
|
||||
RCLCxTx(boost::intrusive_ptr<SHAMapItem const> txn) : tx_(std::move(txn))
|
||||
{
|
||||
}
|
||||
|
||||
@@ -50,11 +50,11 @@ public:
|
||||
ID const&
|
||||
id() const
|
||||
{
|
||||
return tx_.key();
|
||||
return tx_->key();
|
||||
}
|
||||
|
||||
//! The SHAMapItem that represents the transaction.
|
||||
SHAMapItem const tx_;
|
||||
boost::intrusive_ptr<SHAMapItem const> tx_;
|
||||
};
|
||||
|
||||
/** Represents a set of transactions in RCLConsensus.
|
||||
@@ -90,8 +90,7 @@ public:
|
||||
bool
|
||||
insert(Tx const& t)
|
||||
{
|
||||
return map_->addItem(
|
||||
SHAMapNodeType::tnTRANSACTION_NM, SHAMapItem{t.tx_});
|
||||
return map_->addItem(SHAMapNodeType::tnTRANSACTION_NM, t.tx_);
|
||||
}
|
||||
|
||||
/** Remove a transaction from the set.
|
||||
@@ -145,7 +144,7 @@ public:
|
||||
code uses the shared_ptr semantics to know whether the find
|
||||
was successful and properly creates a Tx as needed.
|
||||
*/
|
||||
std::shared_ptr<const SHAMapItem> const&
|
||||
boost::intrusive_ptr<SHAMapItem const> const&
|
||||
find(Tx::ID const& entry) const
|
||||
{
|
||||
return map_->peekItem(entry);
|
||||
|
||||
@@ -135,8 +135,10 @@ RCLValidationsAdaptor::acquire(LedgerHash const& hash)
|
||||
Application* pApp = &app_;
|
||||
|
||||
app_.getJobQueue().addJob(
|
||||
jtADVANCE, "getConsensusLedger", [pApp, hash]() {
|
||||
pApp->getInboundLedgers().acquire(
|
||||
jtADVANCE, "getConsensusLedger2", [pApp, hash, this]() {
|
||||
JLOG(j_.debug())
|
||||
<< "JOB advanceLedger getConsensusLedger2 started";
|
||||
pApp->getInboundLedgers().acquireAsync(
|
||||
hash, 0, InboundLedger::Reason::CONSENSUS);
|
||||
});
|
||||
return std::nullopt;
|
||||
@@ -152,7 +154,9 @@ void
|
||||
handleNewValidation(
|
||||
Application& app,
|
||||
std::shared_ptr<STValidation> const& val,
|
||||
std::string const& source)
|
||||
std::string const& source,
|
||||
BypassAccept const bypassAccept,
|
||||
std::optional<beast::Journal> j)
|
||||
{
|
||||
auto const& signingKey = val->getSignerPublic();
|
||||
auto const& hash = val->getLedgerHash();
|
||||
@@ -177,7 +181,23 @@ handleNewValidation(
|
||||
if (outcome == ValStatus::current)
|
||||
{
|
||||
if (val->isTrusted())
|
||||
app.getLedgerMaster().checkAccept(hash, seq);
|
||||
{
|
||||
// Was: app.getLedgerMaster().checkAccept(hash, seq);
|
||||
// https://github.com/XRPLF/rippled/commit/fbbea9e6e25795a8a6bd1bf64b780771933a9579
|
||||
if (bypassAccept == BypassAccept::yes)
|
||||
{
|
||||
assert(j.has_value());
|
||||
if (j.has_value())
|
||||
{
|
||||
JLOG(j->trace()) << "Bypassing checkAccept for validation "
|
||||
<< val->getLedgerHash();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
app.getLedgerMaster().checkAccept(hash, seq);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -25,12 +25,16 @@
|
||||
#include <ripple/protocol/Protocol.h>
|
||||
#include <ripple/protocol/RippleLedgerHash.h>
|
||||
#include <ripple/protocol/STValidation.h>
|
||||
#include <optional>
|
||||
#include <set>
|
||||
#include <vector>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
class Application;
|
||||
|
||||
enum class BypassAccept : bool { no = false, yes };
|
||||
|
||||
/** Wrapper over STValidation for generic Validation code
|
||||
|
||||
Wraps an STValidation for compatibility with the generic validation code.
|
||||
@@ -248,7 +252,9 @@ void
|
||||
handleNewValidation(
|
||||
Application& app,
|
||||
std::shared_ptr<STValidation> const& val,
|
||||
std::string const& source);
|
||||
std::string const& source,
|
||||
BypassAccept const bypassAccept = BypassAccept::no,
|
||||
std::optional<beast::Journal> j = std::nullopt);
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
|
||||
@@ -66,6 +66,13 @@ maxNamespaces(void)
|
||||
return 256;
|
||||
}
|
||||
|
||||
// maximum number of entires in a namespace to delete with ns delete
|
||||
inline uint32_t
|
||||
maxNamespaceDelete(void)
|
||||
{
|
||||
return 256;
|
||||
}
|
||||
|
||||
enum TSHFlags : uint8_t {
|
||||
tshNONE = 0b000,
|
||||
tshROLLBACK = 0b001,
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#include <iostream>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <optional>
|
||||
#include <ostream>
|
||||
#include <stack>
|
||||
#include <string>
|
||||
@@ -271,7 +272,19 @@ check_guard(
|
||||
int guard_func_idx,
|
||||
int last_import_idx,
|
||||
GuardLog guardLog,
|
||||
std::string guardLogAccStr)
|
||||
std::string guardLogAccStr,
|
||||
/* RH NOTE:
|
||||
* rules version is a bit field, so rule update 1 is 0x01, update 2 is 0x02
|
||||
* and update 3 is 0x04 ideally at rule version 3 all bits so far are set
|
||||
* (0b111) so the ruleVersion = 7, however if a specific rule update must be
|
||||
* rolled back due to unforeseen behaviour then this may no longer be the
|
||||
* case. using a bit field here leaves us flexible to rollback changes that
|
||||
* might have unforeseen consequences, without also rolling back further
|
||||
* changes that are fine.
|
||||
*/
|
||||
uint64_t rulesVersion = 0
|
||||
|
||||
)
|
||||
{
|
||||
#define MAX_GUARD_CALLS 1024
|
||||
uint32_t guard_count = 0;
|
||||
@@ -621,11 +634,17 @@ check_guard(
|
||||
}
|
||||
else if (fc_type == 10) // memory.copy
|
||||
{
|
||||
if (rulesVersion & 0x02U)
|
||||
GUARD_ERROR("Memory.copy instruction is not allowed.");
|
||||
|
||||
REQUIRE(2);
|
||||
ADVANCE(2);
|
||||
}
|
||||
else if (fc_type == 11) // memory.fill
|
||||
{
|
||||
if (rulesVersion & 0x02U)
|
||||
GUARD_ERROR("Memory.fill instruction is not allowed.");
|
||||
|
||||
ADVANCE(1);
|
||||
}
|
||||
else if (fc_type <= 7) // numeric instructions
|
||||
@@ -807,6 +826,15 @@ validateGuards(
|
||||
std::vector<uint8_t> const& wasm,
|
||||
GuardLog guardLog,
|
||||
std::string guardLogAccStr,
|
||||
/* RH NOTE:
|
||||
* rules version is a bit field, so rule update 1 is 0x01, update 2 is 0x02
|
||||
* and update 3 is 0x04 ideally at rule version 3 all bits so far are set
|
||||
* (0b111) so the ruleVersion = 7, however if a specific rule update must be
|
||||
* rolled back due to unforeseen behaviour then this may no longer be the
|
||||
* case. using a bit field here leaves us flexible to rollback changes that
|
||||
* might have unforeseen consequences, without also rolling back further
|
||||
* changes that are fine.
|
||||
*/
|
||||
uint64_t rulesVersion = 0)
|
||||
{
|
||||
uint64_t byteCount = wasm.size();
|
||||
@@ -1477,7 +1505,8 @@ validateGuards(
|
||||
guard_import_number,
|
||||
last_import_number,
|
||||
guardLog,
|
||||
guardLogAccStr);
|
||||
guardLogAccStr,
|
||||
rulesVersion);
|
||||
|
||||
if (!valid)
|
||||
return {};
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
* were then used.
|
||||
*/
|
||||
|
||||
#define LPAREN (
|
||||
#define LPAREN (
|
||||
#define RPAREN )
|
||||
#define COMMA ,
|
||||
|
||||
@@ -428,6 +428,12 @@ namespace hook {
|
||||
bool
|
||||
canHook(ripple::TxType txType, ripple::uint256 hookOn);
|
||||
|
||||
bool
|
||||
canEmit(ripple::TxType txType, ripple::uint256 hookCanEmit);
|
||||
|
||||
ripple::uint256
|
||||
getHookCanEmit(ripple::STObject const& hookObj, SLE::pointer const& hookDef);
|
||||
|
||||
struct HookResult;
|
||||
|
||||
HookResult
|
||||
@@ -436,6 +442,7 @@ apply(
|
||||
used for caching (one day) */
|
||||
ripple::uint256 const&
|
||||
hookHash, /* hash of the actual hook byte code, used for metadata */
|
||||
ripple::uint256 const& hookCanEmit,
|
||||
ripple::uint256 const& hookNamespace,
|
||||
ripple::Blob const& wasm,
|
||||
std::map<
|
||||
@@ -472,6 +479,7 @@ struct HookResult
|
||||
{
|
||||
ripple::uint256 const hookSetTxnID;
|
||||
ripple::uint256 const hookHash;
|
||||
ripple::uint256 const hookCanEmit;
|
||||
ripple::Keylet const accountKeylet;
|
||||
ripple::Keylet const ownerDirKeylet;
|
||||
ripple::Keylet const hookKeylet;
|
||||
|
||||
@@ -79,7 +79,7 @@ main(int argc, char** argv)
|
||||
|
||||
close(fd);
|
||||
|
||||
auto result = validateGuards(hook, std::cout, "", 1);
|
||||
auto result = validateGuards(hook, std::cout, "", 3);
|
||||
|
||||
if (!result)
|
||||
{
|
||||
|
||||
@@ -1,12 +1,17 @@
|
||||
#include <ripple/app/hook/applyHook.h>
|
||||
#include <ripple/app/ledger/OpenLedger.h>
|
||||
#include <ripple/app/ledger/TransactionMaster.h>
|
||||
#include <ripple/app/misc/HashRouter.h>
|
||||
#include <ripple/app/misc/NetworkOPs.h>
|
||||
#include <ripple/app/misc/Transaction.h>
|
||||
#include <ripple/app/misc/TxQ.h>
|
||||
#include <ripple/app/tx/impl/Import.h>
|
||||
#include <ripple/app/tx/impl/details/NFTokenUtils.h>
|
||||
#include <ripple/basics/Log.h>
|
||||
#include <ripple/basics/Slice.h>
|
||||
#include <ripple/protocol/ErrorCodes.h>
|
||||
#include <ripple/protocol/TxFlags.h>
|
||||
#include <ripple/protocol/st.h>
|
||||
#include <ripple/protocol/tokens.h>
|
||||
#include <boost/multiprecision/cpp_dec_float.hpp>
|
||||
#include <any>
|
||||
@@ -70,6 +75,45 @@ getTransactionalStakeHolders(STTx const& tx, ReadView const& rv)
|
||||
|
||||
switch (tt)
|
||||
{
|
||||
case ttREMIT: {
|
||||
if (destAcc)
|
||||
ADD_TSH(*destAcc, tshSTRONG);
|
||||
|
||||
if (tx.isFieldPresent(sfInform))
|
||||
{
|
||||
auto const inform = tx.getAccountID(sfInform);
|
||||
if (*otxnAcc != inform && *destAcc != inform)
|
||||
ADD_TSH(inform, tshWEAK);
|
||||
}
|
||||
|
||||
if (tx.isFieldPresent(sfURITokenIDs))
|
||||
{
|
||||
STVector256 tokenIds = tx.getFieldV256(sfURITokenIDs);
|
||||
for (uint256 const klRaw : tokenIds)
|
||||
{
|
||||
Keylet const id{ltURI_TOKEN, klRaw};
|
||||
if (!rv.exists(id))
|
||||
continue;
|
||||
|
||||
auto const ut = rv.read(id);
|
||||
if (!ut ||
|
||||
ut->getFieldU16(sfLedgerEntryType) != ltURI_TOKEN)
|
||||
continue;
|
||||
|
||||
auto const owner = ut->getAccountID(sfOwner);
|
||||
auto const issuer = ut->getAccountID(sfIssuer);
|
||||
if (issuer != owner && issuer != *destAcc)
|
||||
{
|
||||
ADD_TSH(
|
||||
issuer,
|
||||
(ut->getFlags() & lsfBurnable) ? tshSTRONG
|
||||
: tshWEAK);
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case ttIMPORT: {
|
||||
if (tx.isFieldPresent(sfIssuer))
|
||||
ADD_TSH(tx.getAccountID(sfIssuer), fixV2 ? tshWEAK : tshSTRONG);
|
||||
@@ -256,14 +300,14 @@ getTransactionalStakeHolders(STTx const& tx, ReadView const& rv)
|
||||
{
|
||||
ADD_TSH(bo->getAccountID(sfOwner), tshSTRONG);
|
||||
if (bo->isFieldPresent(sfDestination))
|
||||
ADD_TSH(bo->getAccountID(sfDestination), tshWEAK);
|
||||
ADD_TSH(bo->getAccountID(sfDestination), tshSTRONG);
|
||||
}
|
||||
|
||||
if (so)
|
||||
{
|
||||
ADD_TSH(so->getAccountID(sfOwner), tshSTRONG);
|
||||
if (so->isFieldPresent(sfDestination))
|
||||
ADD_TSH(so->getAccountID(sfDestination), tshWEAK);
|
||||
ADD_TSH(so->getAccountID(sfDestination), tshSTRONG);
|
||||
}
|
||||
|
||||
break;
|
||||
@@ -279,7 +323,7 @@ getTransactionalStakeHolders(STTx const& tx, ReadView const& rv)
|
||||
auto const offer = getNFTOffer(offerID, rv);
|
||||
if (offer)
|
||||
{
|
||||
ADD_TSH(offer->getAccountID(sfOwner), tshSTRONG);
|
||||
ADD_TSH(offer->getAccountID(sfOwner), tshWEAK);
|
||||
if (offer->isFieldPresent(sfDestination))
|
||||
ADD_TSH(offer->getAccountID(sfDestination), tshWEAK);
|
||||
|
||||
@@ -984,6 +1028,29 @@ hook::canHook(ripple::TxType txType, ripple::uint256 hookOn)
|
||||
return (hookOn & UINT256_BIT[txType]) != beast::zero;
|
||||
}
|
||||
|
||||
bool
|
||||
hook::canEmit(ripple::TxType txType, ripple::uint256 hookCanEmit)
|
||||
{
|
||||
return hook::canHook(txType, hookCanEmit);
|
||||
}
|
||||
|
||||
ripple::uint256
|
||||
hook::getHookCanEmit(
|
||||
ripple::STObject const& hookObj,
|
||||
SLE::pointer const& hookDef)
|
||||
{
|
||||
// default allows all transaction types
|
||||
uint256 defaultHookCanEmit = UINT256_BIT[ttHOOK_SET];
|
||||
|
||||
uint256 hookCanEmit =
|
||||
(hookObj.isFieldPresent(sfHookCanEmit)
|
||||
? hookObj.getFieldH256(sfHookCanEmit)
|
||||
: hookDef->isFieldPresent(sfHookCanEmit)
|
||||
? hookDef->getFieldH256(sfHookCanEmit)
|
||||
: defaultHookCanEmit);
|
||||
return hookCanEmit;
|
||||
}
|
||||
|
||||
// Update HookState ledger objects for the hook... only called after accept()
|
||||
// assumes the specified acc has already been checked for authoriation (hook
|
||||
// grants)
|
||||
@@ -1135,6 +1202,7 @@ hook::apply(
|
||||
used for caching (one day) */
|
||||
ripple::uint256 const&
|
||||
hookHash, /* hash of the actual hook byte code, used for metadata */
|
||||
ripple::uint256 const& hookCanEmit,
|
||||
ripple::uint256 const& hookNamespace,
|
||||
ripple::Blob const& wasm,
|
||||
std::map<
|
||||
@@ -1162,6 +1230,7 @@ hook::apply(
|
||||
.result =
|
||||
{.hookSetTxnID = hookSetTxnID,
|
||||
.hookHash = hookHash,
|
||||
.hookCanEmit = hookCanEmit,
|
||||
.accountKeylet = keylet::account(account),
|
||||
.ownerDirKeylet = keylet::ownerDir(account),
|
||||
.hookKeylet = keylet::hook(account),
|
||||
@@ -1173,9 +1242,10 @@ hook::apply(
|
||||
.hookParamOverrides = hookParamOverrides,
|
||||
.hookParams = hookParams,
|
||||
.hookSkips = {},
|
||||
.exitType =
|
||||
hook_api::ExitType::ROLLBACK, // default is to rollback unless
|
||||
// hook calls accept()
|
||||
.exitType = applyCtx.view().rules().enabled(fixXahauV3)
|
||||
? hook_api::ExitType::UNSET
|
||||
: hook_api::ExitType::ROLLBACK, // default is to rollback
|
||||
// unless hook calls accept()
|
||||
.exitReason = std::string(""),
|
||||
.exitCode = -1,
|
||||
.hasCallback = hasCallback,
|
||||
@@ -1759,7 +1829,7 @@ hook::finalizeHookState(
|
||||
|
||||
TER result = setHookState(applyCtx, acc, ns, key, slice);
|
||||
|
||||
if (result != tesSUCCESS)
|
||||
if (!isTesSuccess(result))
|
||||
{
|
||||
JLOG(j.warn())
|
||||
<< "HookError[TX:" << txnID
|
||||
@@ -3225,6 +3295,16 @@ DEFINE_HOOK_FUNCTION(
|
||||
return EMISSION_FAILURE;
|
||||
}
|
||||
|
||||
ripple::TxType txType = stpTrans->getTxnType();
|
||||
|
||||
ripple::uint256 const& hookCanEmit = hookCtx.result.hookCanEmit;
|
||||
if (!hook::canEmit(txType, hookCanEmit))
|
||||
{
|
||||
JLOG(j.trace()) << "HookEmit[" << HC_ACC()
|
||||
<< "]: Hook cannot emit this txn.";
|
||||
return EMISSION_FAILURE;
|
||||
}
|
||||
|
||||
// check the emitted txn is valid
|
||||
/* Emitted TXN rules
|
||||
* 0. Account must match the hook account
|
||||
@@ -3488,7 +3568,7 @@ DEFINE_HOOK_FUNCTION(
|
||||
ripple::ApplyFlags::tapPREFLIGHT_EMIT,
|
||||
j);
|
||||
|
||||
if (preflightResult.ter != tesSUCCESS)
|
||||
if (!isTesSuccess(preflightResult.ter))
|
||||
{
|
||||
JLOG(j.trace()) << "HookEmit[" << HC_ACC()
|
||||
<< "]: Transaction preflight failure: "
|
||||
@@ -4573,6 +4653,8 @@ DEFINE_HOOK_FUNCTION(
|
||||
}
|
||||
catch (std::exception& e)
|
||||
{
|
||||
JLOG(j.trace()) << "HookInfo[" << HC_ACC()
|
||||
<< "]: etxn_fee_base exception: " << e.what();
|
||||
return INVALID_TXN;
|
||||
}
|
||||
|
||||
@@ -4744,7 +4826,7 @@ DEFINE_HOOK_FUNCTION(
|
||||
|
||||
if (float1 == 0)
|
||||
{
|
||||
j.trace() << "HookTrace[" << HC_ACC() << "]:"
|
||||
j.trace() << "HookTrace[" << HC_ACC() << "]: "
|
||||
<< (read_len == 0
|
||||
? ""
|
||||
: std::string_view(
|
||||
@@ -5358,7 +5440,7 @@ DEFINE_HOOK_FUNCTION(
|
||||
const int64_t float_one_internal = make_float(1000000000000000ull, -15, false);
|
||||
|
||||
inline int64_t
|
||||
float_divide_internal(int64_t float1, int64_t float2)
|
||||
float_divide_internal(int64_t float1, int64_t float2, bool hasFix)
|
||||
{
|
||||
RETURN_IF_INVALID_FLOAT(float1);
|
||||
RETURN_IF_INVALID_FLOAT(float2);
|
||||
@@ -5411,8 +5493,16 @@ float_divide_internal(int64_t float1, int64_t float2)
|
||||
while (man2 > 0)
|
||||
{
|
||||
int i = 0;
|
||||
for (; man1 > man2; man1 -= man2, ++i)
|
||||
;
|
||||
if (hasFix)
|
||||
{
|
||||
for (; man1 >= man2; man1 -= man2, ++i)
|
||||
;
|
||||
}
|
||||
else
|
||||
{
|
||||
for (; man1 > man2; man1 -= man2, ++i)
|
||||
;
|
||||
}
|
||||
|
||||
man3 *= 10;
|
||||
man3 += i;
|
||||
@@ -5432,7 +5522,8 @@ DEFINE_HOOK_FUNCTION(int64_t, float_divide, int64_t float1, int64_t float2)
|
||||
HOOK_SETUP(); // populates memory_ctx, memory, memory_length, applyCtx,
|
||||
// hookCtx on current stack
|
||||
|
||||
return float_divide_internal(float1, float2);
|
||||
bool const hasFix = view.rules().enabled(fixFloatDivide);
|
||||
return float_divide_internal(float1, float2, hasFix);
|
||||
|
||||
HOOK_TEARDOWN();
|
||||
}
|
||||
@@ -5451,7 +5542,9 @@ DEFINE_HOOK_FUNCTION(int64_t, float_invert, int64_t float1)
|
||||
return DIVISION_BY_ZERO;
|
||||
if (float1 == float_one_internal)
|
||||
return float_one_internal;
|
||||
return float_divide_internal(float_one_internal, float1);
|
||||
|
||||
bool const fixV3 = view.rules().enabled(fixFloatDivide);
|
||||
return float_divide_internal(float_one_internal, float1, fixV3);
|
||||
|
||||
HOOK_TEARDOWN();
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ BookListeners::removeSubscriber(std::uint64_t seq)
|
||||
|
||||
void
|
||||
BookListeners::publish(
|
||||
Json::Value const& jvObj,
|
||||
MultiApiJson const& jvObj,
|
||||
hash_set<std::uint64_t>& havePublished)
|
||||
{
|
||||
std::lock_guard sl(mLock);
|
||||
@@ -54,7 +54,8 @@ BookListeners::publish(
|
||||
// Only publish jvObj if this is the first occurence
|
||||
if (havePublished.emplace(p->getSeq()).second)
|
||||
{
|
||||
p->send(jvObj, true);
|
||||
p->send(
|
||||
jvObj.select(apiVersionSelector(p->getApiVersion())), true);
|
||||
}
|
||||
++it;
|
||||
}
|
||||
|
||||
@@ -20,7 +20,9 @@
|
||||
#ifndef RIPPLE_APP_LEDGER_BOOKLISTENERS_H_INCLUDED
|
||||
#define RIPPLE_APP_LEDGER_BOOKLISTENERS_H_INCLUDED
|
||||
|
||||
#include <ripple/json/MultivarJson.h>
|
||||
#include <ripple/net/InfoSub.h>
|
||||
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
|
||||
@@ -58,7 +60,7 @@ public:
|
||||
|
||||
*/
|
||||
void
|
||||
publish(Json::Value const& jvObj, hash_set<std::uint64_t>& havePublished);
|
||||
publish(MultiApiJson const& jvObj, hash_set<std::uint64_t>& havePublished);
|
||||
|
||||
private:
|
||||
std::recursive_mutex mLock;
|
||||
|
||||
@@ -197,14 +197,6 @@ private:
|
||||
std::unique_ptr<PeerSet> mPeerSet;
|
||||
};
|
||||
|
||||
/** Deserialize a ledger header from a byte array. */
|
||||
LedgerInfo
|
||||
deserializeHeader(Slice data, bool hasHash = false);
|
||||
|
||||
/** Deserialize a ledger header (prefixed with 4 bytes) from a byte array. */
|
||||
LedgerInfo
|
||||
deserializePrefixedHeader(Slice data, bool hasHash = false);
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
#endif
|
||||
|
||||
@@ -38,10 +38,21 @@ public:
|
||||
virtual ~InboundLedgers() = default;
|
||||
|
||||
// VFALCO TODO Should this be called findOrAdd ?
|
||||
// Callers should use this if they possibly need an authoritative
|
||||
// response immediately.
|
||||
//
|
||||
virtual std::shared_ptr<Ledger const>
|
||||
acquire(uint256 const& hash, std::uint32_t seq, InboundLedger::Reason) = 0;
|
||||
|
||||
// Callers should use this if they are known to be executing on the Job
|
||||
// Queue. TODO review whether all callers of acquire() can use this
|
||||
// instead. Inbound ledger acquisition is asynchronous anyway.
|
||||
virtual void
|
||||
acquireAsync(
|
||||
uint256 const& hash,
|
||||
std::uint32_t seq,
|
||||
InboundLedger::Reason reason) = 0;
|
||||
|
||||
virtual std::shared_ptr<InboundLedger>
|
||||
find(LedgerHash const& hash) = 0;
|
||||
|
||||
@@ -83,6 +94,9 @@ public:
|
||||
|
||||
virtual void
|
||||
stop() = 0;
|
||||
|
||||
virtual std::size_t
|
||||
cacheSize() = 0;
|
||||
};
|
||||
|
||||
std::unique_ptr<InboundLedgers>
|
||||
|
||||
@@ -119,9 +119,8 @@ public:
|
||||
sles_type::value_type
|
||||
dereference() const override
|
||||
{
|
||||
auto const item = *iter_;
|
||||
SerialIter sit(item.slice());
|
||||
return std::make_shared<SLE const>(sit, item.key());
|
||||
SerialIter sit(iter_->slice());
|
||||
return std::make_shared<SLE const>(sit, iter_->key());
|
||||
}
|
||||
};
|
||||
|
||||
@@ -168,7 +167,7 @@ public:
|
||||
txs_type::value_type
|
||||
dereference() const override
|
||||
{
|
||||
auto const item = *iter_;
|
||||
auto const& item = *iter_;
|
||||
if (metadata_)
|
||||
return deserializeTxPlusMeta(item);
|
||||
return {deserializeTx(item), nullptr};
|
||||
@@ -183,8 +182,8 @@ Ledger::Ledger(
|
||||
std::vector<uint256> const& amendments,
|
||||
Family& family)
|
||||
: mImmutable(false)
|
||||
, txMap_(std::make_shared<SHAMap>(SHAMapType::TRANSACTION, family))
|
||||
, stateMap_(std::make_shared<SHAMap>(SHAMapType::STATE, family))
|
||||
, txMap_(SHAMapType::TRANSACTION, family)
|
||||
, stateMap_(SHAMapType::STATE, family)
|
||||
, rules_{config.features}
|
||||
, j_(beast::Journal(beast::Journal::getNullSink()))
|
||||
{
|
||||
@@ -247,7 +246,7 @@ Ledger::Ledger(
|
||||
rawInsert(sle);
|
||||
}
|
||||
|
||||
stateMap_->flushDirty(hotACCOUNT_NODE);
|
||||
stateMap_.flushDirty(hotACCOUNT_NODE);
|
||||
setImmutable();
|
||||
}
|
||||
|
||||
@@ -259,12 +258,8 @@ Ledger::Ledger(
|
||||
Family& family,
|
||||
beast::Journal j)
|
||||
: mImmutable(true)
|
||||
, txMap_(std::make_shared<SHAMap>(
|
||||
SHAMapType::TRANSACTION,
|
||||
info.txHash,
|
||||
family))
|
||||
, stateMap_(
|
||||
std::make_shared<SHAMap>(SHAMapType::STATE, info.accountHash, family))
|
||||
, txMap_(SHAMapType::TRANSACTION, info.txHash, family)
|
||||
, stateMap_(SHAMapType::STATE, info.accountHash, family)
|
||||
, rules_(config.features)
|
||||
, info_(info)
|
||||
, j_(j)
|
||||
@@ -272,7 +267,7 @@ Ledger::Ledger(
|
||||
loaded = true;
|
||||
|
||||
if (info_.txHash.isNonZero() &&
|
||||
!txMap_->fetchRoot(SHAMapHash{info_.txHash}, nullptr))
|
||||
!txMap_.fetchRoot(SHAMapHash{info_.txHash}, nullptr))
|
||||
{
|
||||
if (config.reporting())
|
||||
{
|
||||
@@ -284,7 +279,7 @@ Ledger::Ledger(
|
||||
}
|
||||
|
||||
if (info_.accountHash.isNonZero() &&
|
||||
!stateMap_->fetchRoot(SHAMapHash{info_.accountHash}, nullptr))
|
||||
!stateMap_.fetchRoot(SHAMapHash{info_.accountHash}, nullptr))
|
||||
{
|
||||
if (config.reporting())
|
||||
{
|
||||
@@ -295,8 +290,8 @@ Ledger::Ledger(
|
||||
JLOG(j.warn()) << "Don't have state data root for ledger" << info_.seq;
|
||||
}
|
||||
|
||||
txMap_->setImmutable();
|
||||
stateMap_->setImmutable();
|
||||
txMap_.setImmutable();
|
||||
stateMap_.setImmutable();
|
||||
|
||||
defaultFees(config);
|
||||
if (!setup())
|
||||
@@ -310,13 +305,25 @@ Ledger::Ledger(
|
||||
}
|
||||
}
|
||||
|
||||
Ledger::Ledger(
|
||||
LedgerInfo& info,
|
||||
Config const& config,
|
||||
Family& family,
|
||||
SHAMap const& baseState)
|
||||
: mImmutable(false)
|
||||
, txMap_(SHAMapType::TRANSACTION, family)
|
||||
, stateMap_(baseState, true)
|
||||
, rules_{config.features}
|
||||
, info_(info)
|
||||
, j_(beast::Journal(beast::Journal::getNullSink()))
|
||||
{
|
||||
}
|
||||
|
||||
// Create a new ledger that follows this one
|
||||
Ledger::Ledger(Ledger const& prevLedger, NetClock::time_point closeTime)
|
||||
: mImmutable(false)
|
||||
, txMap_(std::make_shared<SHAMap>(
|
||||
SHAMapType::TRANSACTION,
|
||||
prevLedger.stateMap_->family()))
|
||||
, stateMap_(prevLedger.stateMap_->snapShot(true))
|
||||
, txMap_(SHAMapType::TRANSACTION, prevLedger.txMap_.family())
|
||||
, stateMap_(prevLedger.stateMap_, true)
|
||||
, fees_(prevLedger.fees_)
|
||||
, rules_(prevLedger.rules_)
|
||||
, j_(beast::Journal(beast::Journal::getNullSink()))
|
||||
@@ -345,12 +352,8 @@ Ledger::Ledger(Ledger const& prevLedger, NetClock::time_point closeTime)
|
||||
|
||||
Ledger::Ledger(LedgerInfo const& info, Config const& config, Family& family)
|
||||
: mImmutable(true)
|
||||
, txMap_(std::make_shared<SHAMap>(
|
||||
SHAMapType::TRANSACTION,
|
||||
info.txHash,
|
||||
family))
|
||||
, stateMap_(
|
||||
std::make_shared<SHAMap>(SHAMapType::STATE, info.accountHash, family))
|
||||
, txMap_(SHAMapType::TRANSACTION, info.txHash, family)
|
||||
, stateMap_(SHAMapType::STATE, info.accountHash, family)
|
||||
, rules_{config.features}
|
||||
, info_(info)
|
||||
, j_(beast::Journal(beast::Journal::getNullSink()))
|
||||
@@ -364,8 +367,8 @@ Ledger::Ledger(
|
||||
Config const& config,
|
||||
Family& family)
|
||||
: mImmutable(false)
|
||||
, txMap_(std::make_shared<SHAMap>(SHAMapType::TRANSACTION, family))
|
||||
, stateMap_(std::make_shared<SHAMap>(SHAMapType::STATE, family))
|
||||
, txMap_(SHAMapType::TRANSACTION, family)
|
||||
, stateMap_(SHAMapType::STATE, family)
|
||||
, rules_{config.features}
|
||||
, j_(beast::Journal(beast::Journal::getNullSink()))
|
||||
{
|
||||
@@ -383,19 +386,32 @@ Ledger::setImmutable(bool rehash)
|
||||
// place the hash transitions to valid
|
||||
if (!mImmutable && rehash)
|
||||
{
|
||||
info_.txHash = txMap_->getHash().as_uint256();
|
||||
info_.accountHash = stateMap_->getHash().as_uint256();
|
||||
info_.txHash = txMap_.getHash().as_uint256();
|
||||
info_.accountHash = stateMap_.getHash().as_uint256();
|
||||
}
|
||||
|
||||
if (rehash)
|
||||
info_.hash = calculateLedgerHash(info_);
|
||||
|
||||
mImmutable = true;
|
||||
txMap_->setImmutable();
|
||||
stateMap_->setImmutable();
|
||||
txMap_.setImmutable();
|
||||
stateMap_.setImmutable();
|
||||
setup();
|
||||
}
|
||||
|
||||
// raw setters for catalogue
|
||||
void
|
||||
Ledger::setCloseFlags(int closeFlags)
|
||||
{
|
||||
info_.closeFlags = closeFlags;
|
||||
}
|
||||
|
||||
void
|
||||
Ledger::setDrops(uint64_t drops)
|
||||
{
|
||||
info_.drops = drops;
|
||||
}
|
||||
|
||||
void
|
||||
Ledger::setAccepted(
|
||||
NetClock::time_point closeTime,
|
||||
@@ -415,8 +431,8 @@ bool
|
||||
Ledger::addSLE(SLE const& sle)
|
||||
{
|
||||
auto const s = sle.getSerializer();
|
||||
SHAMapItem item(sle.key(), s.slice());
|
||||
return stateMap_->addItem(SHAMapNodeType::tnACCOUNT_STATE, std::move(item));
|
||||
return stateMap_.addItem(
|
||||
SHAMapNodeType::tnACCOUNT_STATE, make_shamapitem(sle.key(), s.slice()));
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
@@ -451,20 +467,20 @@ bool
|
||||
Ledger::exists(Keylet const& k) const
|
||||
{
|
||||
// VFALCO NOTE Perhaps check the type for debug builds?
|
||||
return stateMap_->hasItem(k.key);
|
||||
return stateMap_.hasItem(k.key);
|
||||
}
|
||||
|
||||
bool
|
||||
Ledger::exists(uint256 const& key) const
|
||||
{
|
||||
return stateMap_->hasItem(key);
|
||||
return stateMap_.hasItem(key);
|
||||
}
|
||||
|
||||
std::optional<uint256>
|
||||
Ledger::succ(uint256 const& key, std::optional<uint256> const& last) const
|
||||
{
|
||||
auto item = stateMap_->upper_bound(key);
|
||||
if (item == stateMap_->end())
|
||||
auto item = stateMap_.upper_bound(key);
|
||||
if (item == stateMap_.end())
|
||||
return std::nullopt;
|
||||
if (last && item->key() >= last)
|
||||
return std::nullopt;
|
||||
@@ -479,7 +495,7 @@ Ledger::read(Keylet const& k) const
|
||||
assert(false);
|
||||
return nullptr;
|
||||
}
|
||||
auto const& item = stateMap_->peekItem(k.key);
|
||||
auto const& item = stateMap_.peekItem(k.key);
|
||||
if (!item)
|
||||
return nullptr;
|
||||
auto sle = std::make_shared<SLE>(SerialIter{item->slice()}, item->key());
|
||||
@@ -493,45 +509,44 @@ Ledger::read(Keylet const& k) const
|
||||
auto
|
||||
Ledger::slesBegin() const -> std::unique_ptr<sles_type::iter_base>
|
||||
{
|
||||
return std::make_unique<sles_iter_impl>(stateMap_->begin());
|
||||
return std::make_unique<sles_iter_impl>(stateMap_.begin());
|
||||
}
|
||||
|
||||
auto
|
||||
Ledger::slesEnd() const -> std::unique_ptr<sles_type::iter_base>
|
||||
{
|
||||
return std::make_unique<sles_iter_impl>(stateMap_->end());
|
||||
return std::make_unique<sles_iter_impl>(stateMap_.end());
|
||||
}
|
||||
|
||||
auto
|
||||
Ledger::slesUpperBound(uint256 const& key) const
|
||||
-> std::unique_ptr<sles_type::iter_base>
|
||||
{
|
||||
return std::make_unique<sles_iter_impl>(stateMap_->upper_bound(key));
|
||||
return std::make_unique<sles_iter_impl>(stateMap_.upper_bound(key));
|
||||
}
|
||||
|
||||
auto
|
||||
Ledger::txsBegin() const -> std::unique_ptr<txs_type::iter_base>
|
||||
{
|
||||
return std::make_unique<txs_iter_impl>(!open(), txMap_->begin());
|
||||
return std::make_unique<txs_iter_impl>(!open(), txMap_.begin());
|
||||
}
|
||||
|
||||
auto
|
||||
Ledger::txsEnd() const -> std::unique_ptr<txs_type::iter_base>
|
||||
{
|
||||
return std::make_unique<txs_iter_impl>(!open(), txMap_->end());
|
||||
return std::make_unique<txs_iter_impl>(!open(), txMap_.end());
|
||||
}
|
||||
|
||||
bool
|
||||
Ledger::txExists(uint256 const& key) const
|
||||
{
|
||||
return txMap_->hasItem(key);
|
||||
return txMap_.hasItem(key);
|
||||
}
|
||||
|
||||
auto
|
||||
Ledger::txRead(key_type const& key) const -> tx_type
|
||||
{
|
||||
assert(txMap_);
|
||||
auto const& item = txMap_->peekItem(key);
|
||||
auto const& item = txMap_.peekItem(key);
|
||||
if (!item)
|
||||
return {};
|
||||
if (!open())
|
||||
@@ -548,7 +563,7 @@ Ledger::digest(key_type const& key) const -> std::optional<digest_type>
|
||||
SHAMapHash digest;
|
||||
// VFALCO Unfortunately this loads the item
|
||||
// from the NodeStore needlessly.
|
||||
if (!stateMap_->peekItem(key, digest))
|
||||
if (!stateMap_.peekItem(key, digest))
|
||||
return std::nullopt;
|
||||
return digest.as_uint256();
|
||||
}
|
||||
@@ -558,14 +573,14 @@ Ledger::digest(key_type const& key) const -> std::optional<digest_type>
|
||||
void
|
||||
Ledger::rawErase(std::shared_ptr<SLE> const& sle)
|
||||
{
|
||||
if (!stateMap_->delItem(sle->key()))
|
||||
if (!stateMap_.delItem(sle->key()))
|
||||
LogicError("Ledger::rawErase: key not found");
|
||||
}
|
||||
|
||||
void
|
||||
Ledger::rawErase(uint256 const& key)
|
||||
{
|
||||
if (!stateMap_->delItem(key))
|
||||
if (!stateMap_.delItem(key))
|
||||
LogicError("Ledger::rawErase: key not found");
|
||||
}
|
||||
|
||||
@@ -574,9 +589,9 @@ Ledger::rawInsert(std::shared_ptr<SLE> const& sle)
|
||||
{
|
||||
Serializer ss;
|
||||
sle->add(ss);
|
||||
if (!stateMap_->addGiveItem(
|
||||
if (!stateMap_.addGiveItem(
|
||||
SHAMapNodeType::tnACCOUNT_STATE,
|
||||
std::make_shared<SHAMapItem const>(sle->key(), ss.slice())))
|
||||
make_shamapitem(sle->key(), ss.slice())))
|
||||
LogicError("Ledger::rawInsert: key already exists");
|
||||
}
|
||||
|
||||
@@ -585,9 +600,9 @@ Ledger::rawReplace(std::shared_ptr<SLE> const& sle)
|
||||
{
|
||||
Serializer ss;
|
||||
sle->add(ss);
|
||||
if (!stateMap_->updateGiveItem(
|
||||
if (!stateMap_.updateGiveItem(
|
||||
SHAMapNodeType::tnACCOUNT_STATE,
|
||||
std::make_shared<SHAMapItem const>(sle->key(), ss.slice())))
|
||||
make_shamapitem(sle->key(), ss.slice())))
|
||||
LogicError("Ledger::rawReplace: key not found");
|
||||
}
|
||||
|
||||
@@ -603,9 +618,8 @@ Ledger::rawTxInsert(
|
||||
Serializer s(txn->getDataLength() + metaData->getDataLength() + 16);
|
||||
s.addVL(txn->peekData());
|
||||
s.addVL(metaData->peekData());
|
||||
if (!txMap().addGiveItem(
|
||||
SHAMapNodeType::tnTRANSACTION_MD,
|
||||
std::make_shared<SHAMapItem const>(key, s.slice())))
|
||||
if (!txMap_.addGiveItem(
|
||||
SHAMapNodeType::tnTRANSACTION_MD, make_shamapitem(key, s.slice())))
|
||||
LogicError("duplicate_tx: " + to_string(key));
|
||||
}
|
||||
|
||||
@@ -621,9 +635,9 @@ Ledger::rawTxInsertWithHash(
|
||||
Serializer s(txn->getDataLength() + metaData->getDataLength() + 16);
|
||||
s.addVL(txn->peekData());
|
||||
s.addVL(metaData->peekData());
|
||||
auto item = std::make_shared<SHAMapItem const>(key, s.slice());
|
||||
auto item = make_shamapitem(key, s.slice());
|
||||
auto hash = sha512Half(HashPrefix::txNode, item->slice(), item->key());
|
||||
if (!txMap().addGiveItem(SHAMapNodeType::tnTRANSACTION_MD, std::move(item)))
|
||||
if (!txMap_.addGiveItem(SHAMapNodeType::tnTRANSACTION_MD, std::move(item)))
|
||||
LogicError("duplicate_tx: " + to_string(key));
|
||||
|
||||
return hash;
|
||||
@@ -723,7 +737,7 @@ Ledger::defaultFees(Config const& config)
|
||||
std::shared_ptr<SLE>
|
||||
Ledger::peek(Keylet const& k) const
|
||||
{
|
||||
auto const& value = stateMap_->peekItem(k.key);
|
||||
auto const& value = stateMap_.peekItem(k.key);
|
||||
if (!value)
|
||||
return nullptr;
|
||||
auto sle = std::make_shared<SLE>(SerialIter{value->slice()}, value->key());
|
||||
@@ -845,8 +859,8 @@ Ledger::walkLedger(beast::Journal j, bool parallel) const
|
||||
std::vector<SHAMapMissingNode> missingNodes1;
|
||||
std::vector<SHAMapMissingNode> missingNodes2;
|
||||
|
||||
if (stateMap_->getHash().isZero() && !info_.accountHash.isZero() &&
|
||||
!stateMap_->fetchRoot(SHAMapHash{info_.accountHash}, nullptr))
|
||||
if (stateMap_.getHash().isZero() && !info_.accountHash.isZero() &&
|
||||
!stateMap_.fetchRoot(SHAMapHash{info_.accountHash}, nullptr))
|
||||
{
|
||||
missingNodes1.emplace_back(
|
||||
SHAMapType::STATE, SHAMapHash{info_.accountHash});
|
||||
@@ -854,9 +868,9 @@ Ledger::walkLedger(beast::Journal j, bool parallel) const
|
||||
else
|
||||
{
|
||||
if (parallel)
|
||||
return stateMap_->walkMapParallel(missingNodes1, 32);
|
||||
return stateMap_.walkMapParallel(missingNodes1, 32);
|
||||
else
|
||||
stateMap_->walkMap(missingNodes1, 32);
|
||||
stateMap_.walkMap(missingNodes1, 32);
|
||||
}
|
||||
|
||||
if (!missingNodes1.empty())
|
||||
@@ -868,15 +882,15 @@ Ledger::walkLedger(beast::Journal j, bool parallel) const
|
||||
}
|
||||
}
|
||||
|
||||
if (txMap_->getHash().isZero() && info_.txHash.isNonZero() &&
|
||||
!txMap_->fetchRoot(SHAMapHash{info_.txHash}, nullptr))
|
||||
if (txMap_.getHash().isZero() && info_.txHash.isNonZero() &&
|
||||
!txMap_.fetchRoot(SHAMapHash{info_.txHash}, nullptr))
|
||||
{
|
||||
missingNodes2.emplace_back(
|
||||
SHAMapType::TRANSACTION, SHAMapHash{info_.txHash});
|
||||
}
|
||||
else
|
||||
{
|
||||
txMap_->walkMap(missingNodes2, 32);
|
||||
txMap_.walkMap(missingNodes2, 32);
|
||||
}
|
||||
|
||||
if (!missingNodes2.empty())
|
||||
@@ -893,9 +907,9 @@ Ledger::walkLedger(beast::Journal j, bool parallel) const
|
||||
bool
|
||||
Ledger::assertSensible(beast::Journal ledgerJ) const
|
||||
{
|
||||
if (info_.hash.isNonZero() && info_.accountHash.isNonZero() && stateMap_ &&
|
||||
txMap_ && (info_.accountHash == stateMap_->getHash().as_uint256()) &&
|
||||
(info_.txHash == txMap_->getHash().as_uint256()))
|
||||
if (info_.hash.isNonZero() && info_.accountHash.isNonZero() &&
|
||||
(info_.accountHash == stateMap_.getHash().as_uint256()) &&
|
||||
(info_.txHash == txMap_.getHash().as_uint256()))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
@@ -1057,15 +1071,14 @@ pendSaveValidated(
|
||||
return true;
|
||||
}
|
||||
|
||||
JobType const jobType{isCurrent ? jtPUBLEDGER : jtPUBOLDLEDGER};
|
||||
char const* const jobName{
|
||||
isCurrent ? "Ledger::pendSave" : "Ledger::pendOldSave"};
|
||||
|
||||
// See if we can use the JobQueue.
|
||||
if (!isSynchronous &&
|
||||
app.getJobQueue().addJob(jobType, jobName, [&app, ledger, isCurrent]() {
|
||||
saveValidatedLedger(app, ledger, isCurrent);
|
||||
}))
|
||||
app.getJobQueue().addJob(
|
||||
isCurrent ? jtPUBLEDGER : jtPUBOLDLEDGER,
|
||||
std::to_string(ledger->seq()),
|
||||
[&app, ledger, isCurrent]() {
|
||||
saveValidatedLedger(app, ledger, isCurrent);
|
||||
}))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
@@ -1077,15 +1090,15 @@ pendSaveValidated(
|
||||
void
|
||||
Ledger::unshare() const
|
||||
{
|
||||
stateMap_->unshare();
|
||||
txMap_->unshare();
|
||||
stateMap_.unshare();
|
||||
txMap_.unshare();
|
||||
}
|
||||
|
||||
void
|
||||
Ledger::invariants() const
|
||||
{
|
||||
stateMap_->invariants();
|
||||
txMap_->invariants();
|
||||
stateMap_.invariants();
|
||||
txMap_.invariants();
|
||||
}
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -83,6 +83,10 @@ public:
|
||||
Ledger&
|
||||
operator=(Ledger const&) = delete;
|
||||
|
||||
Ledger(Ledger&&) = delete;
|
||||
Ledger&
|
||||
operator=(Ledger&&) = delete;
|
||||
|
||||
/** Create the Genesis ledger.
|
||||
|
||||
The Genesis ledger contains a single account whose
|
||||
@@ -117,6 +121,13 @@ public:
|
||||
Family& family,
|
||||
beast::Journal j);
|
||||
|
||||
// used when loading ledgers from catalogue files
|
||||
Ledger(
|
||||
LedgerInfo& info,
|
||||
Config const& config,
|
||||
Family& family,
|
||||
SHAMap const& baseState);
|
||||
|
||||
/** Create a new ledger following a previous ledger
|
||||
|
||||
The ledger will have the sequence number that
|
||||
@@ -271,6 +282,12 @@ public:
|
||||
void
|
||||
setImmutable(bool rehash = true);
|
||||
|
||||
void
|
||||
setCloseFlags(int closeFlags);
|
||||
|
||||
void
|
||||
setDrops(uint64_t drops);
|
||||
|
||||
bool
|
||||
isImmutable() const
|
||||
{
|
||||
@@ -290,10 +307,10 @@ public:
|
||||
void
|
||||
setFull() const
|
||||
{
|
||||
txMap_->setFull();
|
||||
stateMap_->setFull();
|
||||
txMap_->setLedgerSeq(info_.seq);
|
||||
stateMap_->setLedgerSeq(info_.seq);
|
||||
txMap_.setFull();
|
||||
txMap_.setLedgerSeq(info_.seq);
|
||||
stateMap_.setFull();
|
||||
stateMap_.setLedgerSeq(info_.seq);
|
||||
}
|
||||
|
||||
void
|
||||
@@ -305,25 +322,25 @@ public:
|
||||
SHAMap const&
|
||||
stateMap() const
|
||||
{
|
||||
return *stateMap_;
|
||||
return stateMap_;
|
||||
}
|
||||
|
||||
SHAMap&
|
||||
stateMap()
|
||||
{
|
||||
return *stateMap_;
|
||||
return stateMap_;
|
||||
}
|
||||
|
||||
SHAMap const&
|
||||
txMap() const
|
||||
{
|
||||
return *txMap_;
|
||||
return txMap_;
|
||||
}
|
||||
|
||||
SHAMap&
|
||||
txMap()
|
||||
{
|
||||
return *txMap_;
|
||||
return txMap_;
|
||||
}
|
||||
|
||||
// returns false on error
|
||||
@@ -401,8 +418,11 @@ private:
|
||||
|
||||
bool mImmutable;
|
||||
|
||||
std::shared_ptr<SHAMap> txMap_;
|
||||
std::shared_ptr<SHAMap> stateMap_;
|
||||
// A SHAMap containing the transactions associated with this ledger.
|
||||
SHAMap mutable txMap_;
|
||||
|
||||
// A SHAMap containing the state objects for this ledger.
|
||||
SHAMap mutable stateMap_;
|
||||
|
||||
// Protects fee variables
|
||||
std::mutex mutable mutex_;
|
||||
|
||||
@@ -51,7 +51,9 @@ LedgerHistory::LedgerHistory(
|
||||
}
|
||||
|
||||
bool
|
||||
LedgerHistory::insert(std::shared_ptr<Ledger const> ledger, bool validated)
|
||||
LedgerHistory::insert(
|
||||
std::shared_ptr<Ledger const> const& ledger,
|
||||
bool validated)
|
||||
{
|
||||
if (!ledger->isImmutable())
|
||||
LogicError("mutable Ledger in insert");
|
||||
@@ -72,12 +74,9 @@ LedgerHash
|
||||
LedgerHistory::getLedgerHash(LedgerIndex index)
|
||||
{
|
||||
std::unique_lock sl(m_ledgers_by_hash.peekMutex());
|
||||
auto it = mLedgersByIndex.find(index);
|
||||
|
||||
if (it != mLedgersByIndex.end())
|
||||
if (auto it = mLedgersByIndex.find(index); it != mLedgersByIndex.end())
|
||||
return it->second;
|
||||
|
||||
return uint256();
|
||||
return {};
|
||||
}
|
||||
|
||||
std::shared_ptr<Ledger const>
|
||||
@@ -167,19 +166,19 @@ log_metadata_difference(
|
||||
uint256 const& tx,
|
||||
beast::Journal j)
|
||||
{
|
||||
auto getMeta = [](ReadView const& ledger,
|
||||
uint256 const& txID) -> std::shared_ptr<TxMeta> {
|
||||
auto meta = ledger.txRead(txID).second;
|
||||
if (!meta)
|
||||
return {};
|
||||
return std::make_shared<TxMeta>(txID, ledger.seq(), *meta);
|
||||
auto getMeta = [](ReadView const& ledger, uint256 const& txID) {
|
||||
std::optional<TxMeta> ret;
|
||||
if (auto meta = ledger.txRead(txID).second)
|
||||
ret.emplace(txID, ledger.seq(), *meta);
|
||||
return ret;
|
||||
};
|
||||
|
||||
auto validMetaData = getMeta(validLedger, tx);
|
||||
auto builtMetaData = getMeta(builtLedger, tx);
|
||||
assert(validMetaData != nullptr || builtMetaData != nullptr);
|
||||
|
||||
if (validMetaData != nullptr && builtMetaData != nullptr)
|
||||
assert(validMetaData || builtMetaData);
|
||||
|
||||
if (validMetaData && builtMetaData)
|
||||
{
|
||||
auto const& validNodes = validMetaData->getNodes();
|
||||
auto const& builtNodes = builtMetaData->getNodes();
|
||||
@@ -280,17 +279,21 @@ log_metadata_difference(
|
||||
<< validNodes.getJson(JsonOptions::none);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
else if (validMetaData != nullptr)
|
||||
|
||||
if (validMetaData)
|
||||
{
|
||||
JLOG(j.error()) << "MISMATCH on TX " << tx
|
||||
<< ": Metadata Difference (built has none)\n"
|
||||
<< ": Metadata Difference. Valid=\n"
|
||||
<< validMetaData->getJson(JsonOptions::none);
|
||||
}
|
||||
else // builtMetaData != nullptr
|
||||
|
||||
if (builtMetaData)
|
||||
{
|
||||
JLOG(j.error()) << "MISMATCH on TX " << tx
|
||||
<< ": Metadata Difference (valid has none)\n"
|
||||
<< ": Metadata Difference. Built=\n"
|
||||
<< builtMetaData->getJson(JsonOptions::none);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ public:
|
||||
@return `true` if the ledger was already tracked
|
||||
*/
|
||||
bool
|
||||
insert(std::shared_ptr<Ledger const> ledger, bool validated);
|
||||
insert(std::shared_ptr<Ledger const> const& ledger, bool validated);
|
||||
|
||||
/** Get the ledgers_by_hash cache hit rate
|
||||
@return the hit rate
|
||||
@@ -70,8 +70,6 @@ public:
|
||||
LedgerHash
|
||||
getLedgerHash(LedgerIndex ledgerIndex);
|
||||
|
||||
/** Remove stale cache entries
|
||||
*/
|
||||
void
|
||||
sweep()
|
||||
{
|
||||
@@ -95,7 +93,7 @@ public:
|
||||
/** Repair a hash to index mapping
|
||||
@param ledgerIndex The index whose mapping is to be repaired
|
||||
@param ledgerHash The hash it is to be mapped to
|
||||
@return `true` if the mapping was repaired
|
||||
@return `false` if the mapping was repaired
|
||||
*/
|
||||
bool
|
||||
fixIndex(LedgerIndex ledgerIndex, LedgerHash const& ledgerHash);
|
||||
|
||||
@@ -128,7 +128,7 @@ public:
|
||||
getEarliestFetch();
|
||||
|
||||
bool
|
||||
storeLedger(std::shared_ptr<Ledger const> ledger);
|
||||
storeLedger(std::shared_ptr<Ledger const> ledger, bool pin = false);
|
||||
|
||||
void
|
||||
setFullLedger(
|
||||
@@ -152,6 +152,15 @@ public:
|
||||
std::string
|
||||
getCompleteLedgers();
|
||||
|
||||
std::string
|
||||
getPinnedLedgers();
|
||||
|
||||
RangeSet<std::uint32_t>
|
||||
getCompleteLedgersRangeSet();
|
||||
|
||||
RangeSet<std::uint32_t>
|
||||
getPinnedLedgersRangeSet();
|
||||
|
||||
/** Apply held transactions to the open ledger
|
||||
This is normally called as we close the ledger.
|
||||
The open ledger remains open to handle new transactions
|
||||
@@ -197,7 +206,10 @@ public:
|
||||
getLedgerByHash(uint256 const& hash);
|
||||
|
||||
void
|
||||
setLedgerRangePresent(std::uint32_t minV, std::uint32_t maxV);
|
||||
setLedgerRangePresent(
|
||||
std::uint32_t minV,
|
||||
std::uint32_t maxV,
|
||||
bool pin = false /* if true, do not let these leaders be removed */);
|
||||
|
||||
std::optional<NetClock::time_point>
|
||||
getCloseTimeBySeq(LedgerIndex ledgerIndex);
|
||||
@@ -215,6 +227,8 @@ public:
|
||||
void
|
||||
clearLedger(std::uint32_t seq);
|
||||
bool
|
||||
isValidated(ReadView const& ledger);
|
||||
bool
|
||||
getValidatedRange(std::uint32_t& minVal, std::uint32_t& maxVal);
|
||||
bool
|
||||
getFullValidatedRange(std::uint32_t& minVal, std::uint32_t& maxVal);
|
||||
@@ -370,6 +384,7 @@ private:
|
||||
|
||||
std::recursive_mutex mCompleteLock;
|
||||
RangeSet<std::uint32_t> mCompleteLedgers;
|
||||
RangeSet<std::uint32_t> mPinnedLedgers; // Track pinned ledger ranges
|
||||
|
||||
// Publish thread is running.
|
||||
bool mAdvanceThread{false};
|
||||
|
||||
@@ -105,7 +105,7 @@ public:
|
||||
void
|
||||
gotSkipList(
|
||||
LedgerInfo const& info,
|
||||
std::shared_ptr<SHAMapItem const> const& data);
|
||||
boost::intrusive_ptr<SHAMapItem const> const& data);
|
||||
|
||||
/**
|
||||
* Process a ledger delta (extracted from a TMReplayDeltaResponse message)
|
||||
@@ -125,6 +125,27 @@ public:
|
||||
void
|
||||
stop();
|
||||
|
||||
std::size_t
|
||||
tasksSize() const
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(mtx_);
|
||||
return tasks_.size();
|
||||
}
|
||||
|
||||
std::size_t
|
||||
deltasSize() const
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(mtx_);
|
||||
return deltas_.size();
|
||||
}
|
||||
|
||||
std::size_t
|
||||
skipListsSize() const
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(mtx_);
|
||||
return skipLists_.size();
|
||||
}
|
||||
|
||||
private:
|
||||
mutable std::mutex mtx_;
|
||||
std::vector<std::shared_ptr<LedgerReplayTask>> tasks_;
|
||||
|
||||
@@ -21,11 +21,14 @@
|
||||
#define RIPPLE_APP_LEDGER_LEDGERTOJSON_H_INCLUDED
|
||||
|
||||
#include <ripple/app/ledger/Ledger.h>
|
||||
#include <ripple/app/ledger/LedgerMaster.h>
|
||||
#include <ripple/app/misc/TxQ.h>
|
||||
#include <ripple/basics/StringUtilities.h>
|
||||
#include <ripple/basics/chrono.h>
|
||||
#include <ripple/json/Object.h>
|
||||
#include <ripple/protocol/STTx.h>
|
||||
#include <ripple/protocol/jss.h>
|
||||
#include <ripple/protocol/serialize.h>
|
||||
#include <ripple/rpc/Context.h>
|
||||
|
||||
namespace ripple {
|
||||
@@ -40,6 +43,8 @@ struct LedgerFill
|
||||
LedgerEntryType t = ltANY)
|
||||
: ledger(l), options(o), txQueue(std::move(q)), type(t), context(ctx)
|
||||
{
|
||||
if (context)
|
||||
closeTime = context->ledgerMaster.getCloseTimeBySeq(ledger.seq());
|
||||
}
|
||||
|
||||
enum Options {
|
||||
@@ -57,6 +62,7 @@ struct LedgerFill
|
||||
std::vector<TxQ::TxDetails> txQueue;
|
||||
LedgerEntryType type;
|
||||
RPC::Context* context;
|
||||
std::optional<NetClock::time_point> closeTime;
|
||||
};
|
||||
|
||||
/** Given a Ledger and options, fill a Json::Object or Json::Value with a
|
||||
@@ -70,22 +76,6 @@ addJson(Json::Value&, LedgerFill const&);
|
||||
Json::Value
|
||||
getJson(LedgerFill const&);
|
||||
|
||||
/** Serialize an object to a blob. */
|
||||
template <class Object>
|
||||
Blob
|
||||
serializeBlob(Object const& o)
|
||||
{
|
||||
Serializer s;
|
||||
o.add(s);
|
||||
return s.peekData();
|
||||
}
|
||||
|
||||
/** Serialize an object to a hex string. */
|
||||
inline std::string
|
||||
serializeHex(STObject const& o)
|
||||
{
|
||||
return strHex(serializeBlob(o));
|
||||
}
|
||||
} // namespace ripple
|
||||
|
||||
#endif
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
#include <ripple/app/ledger/LedgerMaster.h>
|
||||
#include <ripple/app/ledger/OrderBookDB.h>
|
||||
#include <ripple/app/main/Application.h>
|
||||
#include <ripple/app/misc/AMMUtils.h>
|
||||
#include <ripple/app/misc/NetworkOPs.h>
|
||||
#include <ripple/basics/Log.h>
|
||||
#include <ripple/core/Config.h>
|
||||
@@ -93,7 +94,7 @@ OrderBookDB::update(std::shared_ptr<ReadView const> const& ledger)
|
||||
|
||||
JLOG(j_.debug()) << "Beginning update (" << ledger->seq() << ")";
|
||||
|
||||
// walk through the entire ledger looking for orderbook entries
|
||||
// walk through the entire ledger looking for orderbook/AMM entries
|
||||
int cnt = 0;
|
||||
|
||||
try
|
||||
@@ -126,6 +127,21 @@ OrderBookDB::update(std::shared_ptr<ReadView const> const& ledger)
|
||||
|
||||
++cnt;
|
||||
}
|
||||
else if (sle->getType() == ltAMM)
|
||||
{
|
||||
auto const issue1 = (*sle)[sfAsset];
|
||||
auto const issue2 = (*sle)[sfAsset2];
|
||||
auto addBook = [&](Issue const& in, Issue const& out) {
|
||||
allBooks[in].insert(out);
|
||||
|
||||
if (isXRP(out))
|
||||
xrpBooks.insert(in);
|
||||
|
||||
++cnt;
|
||||
};
|
||||
addBook(issue1, issue2);
|
||||
addBook(issue2, issue1);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (SHAMapMissingNode const& mn)
|
||||
@@ -234,7 +250,7 @@ void
|
||||
OrderBookDB::processTxn(
|
||||
std::shared_ptr<ReadView const> const& ledger,
|
||||
const AcceptedLedgerTx& alTx,
|
||||
Json::Value const& jvObj)
|
||||
MultiApiJson const& jvObj)
|
||||
{
|
||||
std::lock_guard sl(mLock);
|
||||
|
||||
|
||||
@@ -23,6 +23,8 @@
|
||||
#include <ripple/app/ledger/AcceptedLedgerTx.h>
|
||||
#include <ripple/app/ledger/BookListeners.h>
|
||||
#include <ripple/app/main/Application.h>
|
||||
#include <ripple/json/MultivarJson.h>
|
||||
|
||||
#include <mutex>
|
||||
|
||||
namespace ripple {
|
||||
@@ -63,7 +65,7 @@ public:
|
||||
processTxn(
|
||||
std::shared_ptr<ReadView const> const& ledger,
|
||||
const AcceptedLedgerTx& alTx,
|
||||
Json::Value const& jvObj);
|
||||
MultiApiJson const& jvObj);
|
||||
|
||||
private:
|
||||
Application& app_;
|
||||
|
||||
@@ -68,7 +68,7 @@ public:
|
||||
|
||||
std::shared_ptr<STTx const>
|
||||
fetch(
|
||||
std::shared_ptr<SHAMapItem> const& item,
|
||||
boost::intrusive_ptr<SHAMapItem> const& item,
|
||||
SHAMapNodeType type,
|
||||
std::uint32_t uCommitLedger);
|
||||
|
||||
|
||||
@@ -116,10 +116,8 @@ applyTransactions(
|
||||
{
|
||||
auto const txid = it->first.getTXID();
|
||||
|
||||
#ifndef DEBUG
|
||||
try
|
||||
{
|
||||
#endif
|
||||
if (pass == 0 && built->txExists(txid))
|
||||
{
|
||||
it = txns.erase(it);
|
||||
@@ -142,7 +140,6 @@ applyTransactions(
|
||||
case ApplyResult::Retry:
|
||||
++it;
|
||||
}
|
||||
#ifndef DEBUG
|
||||
}
|
||||
catch (std::exception const& ex)
|
||||
{
|
||||
@@ -151,7 +148,6 @@ applyTransactions(
|
||||
failed.insert(txid);
|
||||
it = txns.erase(it);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
JLOG(j.debug()) << (certainRetry ? "Pass: " : "Final pass: ") << pass
|
||||
|
||||
@@ -269,36 +269,6 @@ InboundLedger::neededStateHashes(int max, SHAMapSyncFilter* filter) const
|
||||
mLedger->info().accountHash, mLedger->stateMap(), max, filter);
|
||||
}
|
||||
|
||||
LedgerInfo
|
||||
deserializeHeader(Slice data, bool hasHash)
|
||||
{
|
||||
SerialIter sit(data.data(), data.size());
|
||||
|
||||
LedgerInfo info;
|
||||
|
||||
info.seq = sit.get32();
|
||||
info.drops = sit.get64();
|
||||
info.parentHash = sit.get256();
|
||||
info.txHash = sit.get256();
|
||||
info.accountHash = sit.get256();
|
||||
info.parentCloseTime =
|
||||
NetClock::time_point{NetClock::duration{sit.get32()}};
|
||||
info.closeTime = NetClock::time_point{NetClock::duration{sit.get32()}};
|
||||
info.closeTimeResolution = NetClock::duration{sit.get8()};
|
||||
info.closeFlags = sit.get8();
|
||||
|
||||
if (hasHash)
|
||||
info.hash = sit.get256();
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
LedgerInfo
|
||||
deserializePrefixedHeader(Slice data, bool hasHash)
|
||||
{
|
||||
return deserializeHeader(data + 4, hasHash);
|
||||
}
|
||||
|
||||
// See how much of the ledger data is stored locally
|
||||
// Data found in a fetch pack will be stored
|
||||
void
|
||||
@@ -560,12 +530,11 @@ InboundLedger::trigger(std::shared_ptr<Peer> const& peer, TriggerReason reason)
|
||||
return;
|
||||
}
|
||||
|
||||
if (auto stream = journal_.trace())
|
||||
if (auto stream = journal_.debug())
|
||||
{
|
||||
stream << "Trigger acquiring ledger " << hash_;
|
||||
if (peer)
|
||||
stream << "Trigger acquiring ledger " << hash_ << " from " << peer;
|
||||
else
|
||||
stream << "Trigger acquiring ledger " << hash_;
|
||||
stream << " from " << peer;
|
||||
|
||||
if (complete_ || failed_)
|
||||
stream << "complete=" << complete_ << " failed=" << failed_;
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
#include <ripple/core/JobQueue.h>
|
||||
#include <ripple/nodestore/DatabaseShard.h>
|
||||
#include <ripple/protocol/jss.h>
|
||||
#include <exception>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <vector>
|
||||
@@ -141,6 +142,37 @@ public:
|
||||
return inbound->getLedger();
|
||||
}
|
||||
|
||||
void
|
||||
acquireAsync(
|
||||
uint256 const& hash,
|
||||
std::uint32_t seq,
|
||||
InboundLedger::Reason reason) override
|
||||
{
|
||||
std::unique_lock lock(acquiresMutex_);
|
||||
try
|
||||
{
|
||||
if (pendingAcquires_.contains(hash))
|
||||
return;
|
||||
pendingAcquires_.insert(hash);
|
||||
lock.unlock();
|
||||
acquire(hash, seq, reason);
|
||||
}
|
||||
catch (std::exception const& e)
|
||||
{
|
||||
JLOG(j_.warn())
|
||||
<< "Exception thrown for acquiring new inbound ledger " << hash
|
||||
<< ": " << e.what();
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
JLOG(j_.warn())
|
||||
<< "Unknown exception thrown for acquiring new inbound ledger "
|
||||
<< hash;
|
||||
}
|
||||
lock.lock();
|
||||
pendingAcquires_.erase(hash);
|
||||
}
|
||||
|
||||
std::shared_ptr<InboundLedger>
|
||||
find(uint256 const& hash) override
|
||||
{
|
||||
@@ -411,6 +443,13 @@ public:
|
||||
mRecentFailures.clear();
|
||||
}
|
||||
|
||||
std::size_t
|
||||
cacheSize() override
|
||||
{
|
||||
ScopedLockType lock(mLock);
|
||||
return mLedgers.size();
|
||||
}
|
||||
|
||||
private:
|
||||
clock_type& m_clock;
|
||||
|
||||
@@ -426,6 +465,9 @@ private:
|
||||
beast::insight::Counter mCounter;
|
||||
|
||||
std::unique_ptr<PeerSetBuilder> mPeerSetBuilder;
|
||||
|
||||
std::set<uint256> pendingAcquires_;
|
||||
std::mutex acquiresMutex_;
|
||||
};
|
||||
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
@@ -219,7 +219,7 @@ private:
|
||||
run()
|
||||
{
|
||||
beast::setCurrentThreadName("LedgerCleaner");
|
||||
JLOG(j_.debug()) << "Started";
|
||||
JLOG(j_.debug()) << "Started ledger cleaner";
|
||||
|
||||
while (true)
|
||||
{
|
||||
@@ -392,7 +392,8 @@ private:
|
||||
|
||||
if (app_.getFeeTrack().isLoadedLocal())
|
||||
{
|
||||
JLOG(j_.debug()) << "Waiting for load to subside";
|
||||
JLOG(j_.debug())
|
||||
<< "Ledger Cleaner: Waiting for load to subside";
|
||||
std::this_thread::sleep_for(std::chrono::seconds(5));
|
||||
continue;
|
||||
}
|
||||
@@ -415,13 +416,15 @@ private:
|
||||
bool fail = false;
|
||||
if (ledgerHash.isZero())
|
||||
{
|
||||
JLOG(j_.info())
|
||||
<< "Unable to get hash for ledger " << ledgerIndex;
|
||||
JLOG(j_.warn())
|
||||
<< "Ledger Cleaner: Unable to get hash for ledger "
|
||||
<< ledgerIndex;
|
||||
fail = true;
|
||||
}
|
||||
else if (!doLedger(ledgerIndex, ledgerHash, doNodes, doTxns))
|
||||
{
|
||||
JLOG(j_.info()) << "Failed to process ledger " << ledgerIndex;
|
||||
JLOG(j_.warn()) << "Ledger Cleaner: Failed to process ledger "
|
||||
<< ledgerIndex;
|
||||
fail = true;
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user