mirror of
https://github.com/Xahau/xahaud.git
synced 2026-01-21 07:05:16 +00:00
Compare commits
1 Commits
sync-2.1.0
...
patch-tsh
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2965d3d96c |
@@ -1,31 +0,0 @@
|
||||
name: 'Configure ccache'
|
||||
description: 'Sets up ccache with consistent configuration'
|
||||
|
||||
inputs:
|
||||
max_size:
|
||||
description: 'Maximum cache size'
|
||||
required: false
|
||||
default: '2G'
|
||||
hash_dir:
|
||||
description: 'Whether to include directory paths in hash'
|
||||
required: false
|
||||
default: 'true'
|
||||
compiler_check:
|
||||
description: 'How to check compiler for changes'
|
||||
required: false
|
||||
default: 'content'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Configure ccache
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p ~/.ccache
|
||||
export CONF_PATH="${CCACHE_CONFIGPATH:-${CCACHE_DIR:-$HOME/.ccache}/ccache.conf}"
|
||||
mkdir -p $(dirname "$CONF_PATH")
|
||||
echo "max_size = ${{ inputs.max_size }}" > "$CONF_PATH"
|
||||
echo "hash_dir = ${{ inputs.hash_dir }}" >> "$CONF_PATH"
|
||||
echo "compiler_check = ${{ inputs.compiler_check }}" >> "$CONF_PATH"
|
||||
ccache -p # Print config for verification
|
||||
ccache -z # Zero statistics before the build
|
||||
108
.github/actions/xahau-ga-build/action.yml
vendored
108
.github/actions/xahau-ga-build/action.yml
vendored
@@ -1,108 +0,0 @@
|
||||
name: build
|
||||
description: 'Builds the project with ccache integration'
|
||||
|
||||
inputs:
|
||||
generator:
|
||||
description: 'CMake generator to use'
|
||||
required: true
|
||||
configuration:
|
||||
description: 'Build configuration (Debug, Release, etc.)'
|
||||
required: true
|
||||
build_dir:
|
||||
description: 'Directory to build in'
|
||||
required: false
|
||||
default: '.build'
|
||||
cc:
|
||||
description: 'C compiler to use'
|
||||
required: false
|
||||
default: ''
|
||||
cxx:
|
||||
description: 'C++ compiler to use'
|
||||
required: false
|
||||
default: ''
|
||||
compiler-id:
|
||||
description: 'Unique identifier for compiler/version combination used for cache keys'
|
||||
required: false
|
||||
default: ''
|
||||
cache_version:
|
||||
description: 'Cache version for invalidation'
|
||||
required: false
|
||||
default: '1'
|
||||
ccache_enabled:
|
||||
description: 'Whether to use ccache'
|
||||
required: false
|
||||
default: 'true'
|
||||
main_branch:
|
||||
description: 'Main branch name for restore keys'
|
||||
required: false
|
||||
default: 'dev'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Generate safe branch name
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
id: safe-branch
|
||||
shell: bash
|
||||
run: |
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Restore ccache directory
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
id: ccache-restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ~/.ccache
|
||||
key: ${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ steps.safe-branch.outputs.name }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-${{ inputs.main_branch }}
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ inputs.configuration }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
${{ runner.os }}-ccache-v${{ inputs.cache_version }}-
|
||||
|
||||
- name: Configure project
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p ${{ inputs.build_dir }}
|
||||
cd ${{ inputs.build_dir }}
|
||||
|
||||
# Set compiler environment variables if provided
|
||||
if [ -n "${{ inputs.cc }}" ]; then
|
||||
export CC="${{ inputs.cc }}"
|
||||
fi
|
||||
|
||||
if [ -n "${{ inputs.cxx }}" ]; then
|
||||
export CXX="${{ inputs.cxx }}"
|
||||
fi
|
||||
|
||||
# Configure ccache launcher args
|
||||
CCACHE_ARGS=""
|
||||
if [ "${{ inputs.ccache_enabled }}" = "true" ]; then
|
||||
CCACHE_ARGS="-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||
fi
|
||||
|
||||
# Run CMake configure
|
||||
cmake .. \
|
||||
-G "${{ inputs.generator }}" \
|
||||
$CCACHE_ARGS \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }}
|
||||
|
||||
- name: Build project
|
||||
shell: bash
|
||||
run: |
|
||||
cd ${{ inputs.build_dir }}
|
||||
cmake --build . --config ${{ inputs.configuration }} --parallel $(nproc)
|
||||
|
||||
- name: Show ccache statistics
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
shell: bash
|
||||
run: ccache -s
|
||||
|
||||
- name: Save ccache directory
|
||||
if: inputs.ccache_enabled == 'true'
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ~/.ccache
|
||||
key: ${{ steps.ccache-restore.outputs.cache-primary-key }}
|
||||
86
.github/actions/xahau-ga-dependencies/action.yml
vendored
86
.github/actions/xahau-ga-dependencies/action.yml
vendored
@@ -1,86 +0,0 @@
|
||||
name: dependencies
|
||||
description: 'Installs build dependencies with caching'
|
||||
|
||||
inputs:
|
||||
configuration:
|
||||
description: 'Build configuration (Debug, Release, etc.)'
|
||||
required: true
|
||||
build_dir:
|
||||
description: 'Directory to build dependencies in'
|
||||
required: false
|
||||
default: '.build'
|
||||
compiler-id:
|
||||
description: 'Unique identifier for compiler/version combination used for cache keys'
|
||||
required: false
|
||||
default: ''
|
||||
cache_version:
|
||||
description: 'Cache version for invalidation'
|
||||
required: false
|
||||
default: '1'
|
||||
cache_enabled:
|
||||
description: 'Whether to use caching'
|
||||
required: false
|
||||
default: 'true'
|
||||
main_branch:
|
||||
description: 'Main branch name for restore keys'
|
||||
required: false
|
||||
default: 'dev'
|
||||
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: 'Whether there was a cache hit'
|
||||
value: ${{ steps.cache-restore-conan.outputs.cache-hit }}
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Generate safe branch name
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: safe-branch
|
||||
shell: bash
|
||||
run: |
|
||||
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | tr -c 'a-zA-Z0-9_.-' '-')
|
||||
echo "name=${SAFE_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Restore Conan cache
|
||||
if: inputs.cache_enabled == 'true'
|
||||
id: cache-restore-conan
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: |
|
||||
~/.conan
|
||||
~/.conan2
|
||||
key: ${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-${{ inputs.configuration }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-${{ hashFiles('**/conanfile.txt', '**/conanfile.py') }}-
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-${{ inputs.compiler-id }}-
|
||||
${{ runner.os }}-conan-v${{ inputs.cache_version }}-
|
||||
|
||||
- name: Export custom recipes
|
||||
shell: bash
|
||||
run: |
|
||||
conan export external/snappy snappy/1.1.9@
|
||||
conan export external/soci soci/4.0.3@
|
||||
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
# Create build directory
|
||||
mkdir -p ${{ inputs.build_dir }}
|
||||
cd ${{ inputs.build_dir }}
|
||||
|
||||
# Install dependencies using conan
|
||||
conan install \
|
||||
--output-folder . \
|
||||
--build missing \
|
||||
--settings build_type=${{ inputs.configuration }} \
|
||||
..
|
||||
|
||||
- name: Save Conan cache
|
||||
if: inputs.cache_enabled == 'true' && steps.cache-restore-conan.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
~/.conan
|
||||
~/.conan2
|
||||
key: ${{ steps.cache-restore-conan.outputs.cache-primary-key }}
|
||||
7
.github/pull_request_template.md
vendored
7
.github/pull_request_template.md
vendored
@@ -33,7 +33,6 @@ Please check [x] relevant options, delete irrelevant ones.
|
||||
- [ ] New feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- [ ] Refactor (non-breaking change that only restructures code)
|
||||
- [ ] Performance (increase or change in throughput and/or latency)
|
||||
- [ ] Tests (you added tests for code that already exists, or your new feature included in this PR)
|
||||
- [ ] Documentation update
|
||||
- [ ] Chore (no impact to binary, e.g. `.gitignore`, formatting, dropping support for older tooling)
|
||||
@@ -59,12 +58,6 @@ Please check [x] relevant options, delete irrelevant ones.
|
||||
## Before / After
|
||||
If relevant, use this section for an English description of the change at a technical level.
|
||||
If this change affects an API, examples should be included here.
|
||||
|
||||
For performance-impacting changes, please provide these details:
|
||||
1. Is this a new feature, bug fix, or improvement to existing functionality?
|
||||
2. What behavior/functionality does the change impact?
|
||||
3. In what processing can the impact be measured? Be as specific as possible - e.g. RPC client call, payment transaction that involves LOB, AMM, caching, DB operations, etc.
|
||||
4. Does this change affect concurrent processing - e.g. does it involve acquiring locks, multi-threaded processing, or async processing?
|
||||
-->
|
||||
|
||||
<!--
|
||||
|
||||
20
.github/workflows/checkpatterns.yml
vendored
20
.github/workflows/checkpatterns.yml
vendored
@@ -1,20 +0,0 @@
|
||||
name: checkpatterns
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
checkpatterns:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check for suspicious patterns
|
||||
run: |
|
||||
if [ -f "suspicious_patterns.sh" ]; then
|
||||
bash suspicious_patterns.sh
|
||||
else
|
||||
echo "Warning: suspicious_patterns.sh not found, skipping check"
|
||||
# Still exit with success for compatibility with dependent jobs
|
||||
exit 0
|
||||
fi
|
||||
25
.github/workflows/doxygen.yml
vendored
Normal file
25
.github/workflows/doxygen.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
name: Build and publish Doxygen documentation
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
|
||||
jobs:
|
||||
job:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: docker://rippleci/rippled-ci-builder:2944b78d22db
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: build
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -DBoost_NO_BOOST_CMAKE=ON ..
|
||||
cmake --build . --target docs --parallel $(nproc)
|
||||
- name: publish
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: build/docs/html
|
||||
116
.github/workflows/xahau-ga-macos.yml
vendored
116
.github/workflows/xahau-ga-macos.yml
vendored
@@ -1,116 +0,0 @@
|
||||
name: MacOS - GA Runner
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
pull_request:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
matrix:
|
||||
generator:
|
||||
- Ninja
|
||||
configuration:
|
||||
- Debug
|
||||
runs-on: macos-15
|
||||
env:
|
||||
build_dir: .build
|
||||
# Bump this number to invalidate all caches globally.
|
||||
CACHE_VERSION: 1
|
||||
MAIN_BRANCH_NAME: dev
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Conan
|
||||
run: |
|
||||
brew install conan@1
|
||||
# Add Conan 1 to the PATH for this job
|
||||
echo "$(brew --prefix conan@1)/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Install Coreutils
|
||||
run: |
|
||||
brew install coreutils
|
||||
echo "Num proc: $(nproc)"
|
||||
|
||||
- name: Install Ninja
|
||||
if: matrix.generator == 'Ninja'
|
||||
run: brew install ninja
|
||||
|
||||
- name: Install Python
|
||||
run: |
|
||||
if which python3 > /dev/null 2>&1; then
|
||||
echo "Python 3 executable exists"
|
||||
python3 --version
|
||||
else
|
||||
brew install python@3.12
|
||||
fi
|
||||
# Create 'python' symlink if it doesn't exist (for tools expecting 'python')
|
||||
if ! which python > /dev/null 2>&1; then
|
||||
sudo ln -sf $(which python3) /usr/local/bin/python
|
||||
fi
|
||||
|
||||
- name: Install CMake
|
||||
run: |
|
||||
if which cmake > /dev/null 2>&1; then
|
||||
echo "cmake executable exists"
|
||||
cmake --version
|
||||
else
|
||||
brew install cmake
|
||||
fi
|
||||
|
||||
- name: Install ccache
|
||||
run: brew install ccache
|
||||
|
||||
- name: Configure ccache
|
||||
uses: ./.github/actions/xahau-configure-ccache
|
||||
with:
|
||||
max_size: 2G
|
||||
hash_dir: true
|
||||
compiler_check: content
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
echo "PATH:"
|
||||
echo "${PATH}" | tr ':' '\n'
|
||||
which python && python --version || echo "Python not found"
|
||||
which conan && conan --version || echo "Conan not found"
|
||||
which cmake && cmake --version || echo "CMake not found"
|
||||
clang --version
|
||||
ccache --version
|
||||
echo "---- Full Environment ----"
|
||||
env
|
||||
|
||||
- name: Configure Conan
|
||||
run: |
|
||||
conan profile new default --detect || true # Ignore error if profile exists
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/xahau-ga-dependencies
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
build_dir: ${{ env.build_dir }}
|
||||
compiler-id: clang
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Build
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
with:
|
||||
generator: ${{ matrix.generator }}
|
||||
configuration: ${{ matrix.configuration }}
|
||||
build_dir: ${{ env.build_dir }}
|
||||
compiler-id: clang
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
${{ env.build_dir }}/rippled --unittest --unittest-jobs $(nproc)
|
||||
123
.github/workflows/xahau-ga-nix.yml
vendored
123
.github/workflows/xahau-ga-nix.yml
vendored
@@ -1,123 +0,0 @@
|
||||
name: Nix - GA Runner
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
pull_request:
|
||||
branches: ["dev", "candidate", "release"]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-job:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
artifact_name: ${{ steps.set-artifact-name.outputs.artifact_name }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
compiler: [gcc]
|
||||
configuration: [Debug]
|
||||
include:
|
||||
- compiler: gcc
|
||||
cc: gcc-11
|
||||
cxx: g++-11
|
||||
compiler_id: gcc-11
|
||||
env:
|
||||
build_dir: .build
|
||||
# Bump this number to invalidate all caches globally.
|
||||
CACHE_VERSION: 1
|
||||
MAIN_BRANCH_NAME: dev
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y ninja-build ${{ matrix.cc }} ${{ matrix.cxx }} ccache
|
||||
# Install specific Conan version needed
|
||||
pip install --upgrade "conan<2"
|
||||
|
||||
- name: Configure ccache
|
||||
uses: ./.github/actions/xahau-configure-ccache
|
||||
with:
|
||||
max_size: 2G
|
||||
hash_dir: true
|
||||
compiler_check: content
|
||||
|
||||
- name: Configure Conan
|
||||
run: |
|
||||
conan profile new default --detect || true # Ignore error if profile exists
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
conan profile update settings.compiler=${{ matrix.compiler }} default
|
||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
||||
conan profile update env.CC=/usr/bin/${{ matrix.cc }} default
|
||||
conan profile update env.CXX=/usr/bin/${{ matrix.cxx }} default
|
||||
conan profile update conf.tools.build:compiler_executables='{"c": "/usr/bin/${{ matrix.cc }}", "cpp": "/usr/bin/${{ matrix.cxx }}"}' default
|
||||
|
||||
# Set correct compiler version based on matrix.compiler
|
||||
if [ "${{ matrix.compiler }}" = "gcc" ]; then
|
||||
conan profile update settings.compiler.version=11 default
|
||||
elif [ "${{ matrix.compiler }}" = "clang" ]; then
|
||||
conan profile update settings.compiler.version=14 default
|
||||
fi
|
||||
# Display profile for verification
|
||||
conan profile show default
|
||||
|
||||
- name: Check environment
|
||||
run: |
|
||||
echo "PATH:"
|
||||
echo "${PATH}" | tr ':' '\n'
|
||||
which conan && conan --version || echo "Conan not found"
|
||||
which cmake && cmake --version || echo "CMake not found"
|
||||
which ${{ matrix.cc }} && ${{ matrix.cc }} --version || echo "${{ matrix.cc }} not found"
|
||||
which ${{ matrix.cxx }} && ${{ matrix.cxx }} --version || echo "${{ matrix.cxx }} not found"
|
||||
which ccache && ccache --version || echo "ccache not found"
|
||||
echo "---- Full Environment ----"
|
||||
env
|
||||
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/xahau-ga-dependencies
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
build_dir: ${{ env.build_dir }}
|
||||
compiler-id: ${{ matrix.compiler_id }}
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Build
|
||||
uses: ./.github/actions/xahau-ga-build
|
||||
with:
|
||||
generator: Ninja
|
||||
configuration: ${{ matrix.configuration }}
|
||||
build_dir: ${{ env.build_dir }}
|
||||
cc: ${{ matrix.cc }}
|
||||
cxx: ${{ matrix.cxx }}
|
||||
compiler-id: ${{ matrix.compiler_id }}
|
||||
cache_version: ${{ env.CACHE_VERSION }}
|
||||
main_branch: ${{ env.MAIN_BRANCH_NAME }}
|
||||
|
||||
- name: Set artifact name
|
||||
id: set-artifact-name
|
||||
run: |
|
||||
ARTIFACT_NAME="build-output-nix-${{ github.run_id }}-${{ matrix.compiler }}-${{ matrix.configuration }}"
|
||||
echo "artifact_name=${ARTIFACT_NAME}" >> "$GITHUB_OUTPUT"
|
||||
echo "Using artifact name: ${ARTIFACT_NAME}"
|
||||
|
||||
- name: Debug build directory
|
||||
run: |
|
||||
echo "Checking build directory contents: ${{ env.build_dir }}"
|
||||
ls -la ${{ env.build_dir }} || echo "Build directory not found or empty"
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
# Ensure the binary exists before trying to run
|
||||
if [ -f "${{ env.build_dir }}/rippled" ]; then
|
||||
${{ env.build_dir }}/rippled --unittest --unittest-jobs $(nproc)
|
||||
else
|
||||
echo "Error: rippled executable not found in ${{ env.build_dir }}"
|
||||
exit 1
|
||||
fi
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -116,7 +116,3 @@ CMakeUserPresets.json
|
||||
bld.rippled/
|
||||
|
||||
generated
|
||||
.vscode
|
||||
|
||||
# Suggested in-tree build directory
|
||||
/.build/
|
||||
|
||||
532
BUILD.md
532
BUILD.md
@@ -1,532 +0,0 @@
|
||||
| :warning: **WARNING** :warning:
|
||||
|---|
|
||||
| These instructions assume you have a C++ development environment ready with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up on Linux, macOS, or Windows, [see this guide](./docs/build/environment.md). |
|
||||
|
||||
> These instructions also assume a basic familiarity with Conan and CMake.
|
||||
> If you are unfamiliar with Conan,
|
||||
> you can read our [crash course](./docs/build/conan.md)
|
||||
> or the official [Getting Started][3] walkthrough.
|
||||
|
||||
## Branches
|
||||
|
||||
For a stable release, choose the `master` branch or one of the [tagged
|
||||
releases](https://github.com/ripple/rippled/releases).
|
||||
|
||||
```
|
||||
git checkout master
|
||||
```
|
||||
|
||||
For the latest release candidate, choose the `release` branch.
|
||||
|
||||
```
|
||||
git checkout release
|
||||
```
|
||||
|
||||
For the latest set of untested features, or to contribute, choose the `develop`
|
||||
branch.
|
||||
|
||||
```
|
||||
git checkout develop
|
||||
```
|
||||
|
||||
## Minimum Requirements
|
||||
|
||||
See [System Requirements](https://xrpl.org/system-requirements.html).
|
||||
|
||||
Building rippled generally requires git, Python, Conan, CMake, and a C++ compiler. Some guidance on setting up such a [C++ development environment can be found here](./docs/build/environment.md).
|
||||
|
||||
- [Python 3.7](https://www.python.org/downloads/)
|
||||
- [Conan 1.55](https://conan.io/downloads.html)
|
||||
- [CMake 3.16](https://cmake.org/download/)
|
||||
|
||||
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
The [minimum compiler versions][2] required are:
|
||||
|
||||
| Compiler | Version |
|
||||
|-------------|---------|
|
||||
| GCC | 11 |
|
||||
| Clang | 13 |
|
||||
| Apple Clang | 13.1.6 |
|
||||
| MSVC | 19.23 |
|
||||
|
||||
### Linux
|
||||
|
||||
The Ubuntu operating system has received the highest level of
|
||||
quality assurance, testing, and support.
|
||||
|
||||
Here are [sample instructions for setting up a C++ development environment on Linux](./docs/build/environment.md#linux).
|
||||
|
||||
### Mac
|
||||
|
||||
Many rippled engineers use macOS for development.
|
||||
|
||||
Here are [sample instructions for setting up a C++ development environment on macOS](./docs/build/environment.md#macos).
|
||||
|
||||
### Windows
|
||||
|
||||
Windows is not recommended for production use at this time.
|
||||
|
||||
- Additionally, 32-bit Windows development is not supported.
|
||||
- Visual Studio 2022 is not yet supported.
|
||||
- rippled generally requires [Boost][] 1.77, which Conan cannot build with VS 2022.
|
||||
- Until rippled is updated for compatibility with later versions of Boost, Windows developers may need to use Visual Studio 2019.
|
||||
|
||||
[Boost]: https://www.boost.org/
|
||||
|
||||
## Steps
|
||||
|
||||
### Set Up Conan
|
||||
|
||||
After you have a [C++ development environment](./docs/build/environment.md) ready with Git, Python, Conan, CMake, and a C++ compiler, you may need to set up your Conan profile.
|
||||
|
||||
These instructions assume a basic familiarity with Conan and CMake.
|
||||
|
||||
If you are unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official [Getting Started][3] walkthrough.
|
||||
|
||||
You'll need at least one Conan profile:
|
||||
|
||||
```
|
||||
conan profile new default --detect
|
||||
```
|
||||
|
||||
Update the compiler settings:
|
||||
|
||||
```
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
```
|
||||
|
||||
**Linux** developers will commonly have a default Conan [profile][] that compiles
|
||||
with GCC and links with libstdc++.
|
||||
If you are linking with libstdc++ (see profile setting `compiler.libcxx`),
|
||||
then you will need to choose the `libstdc++11` ABI:
|
||||
|
||||
```
|
||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
||||
```
|
||||
|
||||
**Windows** developers may need to use the x64 native build tools.
|
||||
An easy way to do that is to run the shortcut "x64 Native Tools Command
|
||||
Prompt" for the version of Visual Studio that you have installed.
|
||||
|
||||
Windows developers must also build `rippled` and its dependencies for the x64
|
||||
architecture:
|
||||
|
||||
```
|
||||
conan profile update settings.arch=x86_64 default
|
||||
```
|
||||
|
||||
### Multiple compilers
|
||||
|
||||
When `/usr/bin/g++` exists on a platform, it is the default cpp compiler. This
|
||||
default works for some users.
|
||||
|
||||
However, if this compiler cannot build rippled or its dependencies, then you can
|
||||
install another compiler and set Conan and CMake to use it.
|
||||
Update the `conf.tools.build:compiler_executables` setting in order to set the correct variables (`CMAKE_<LANG>_COMPILER`) in the
|
||||
generated CMake toolchain file.
|
||||
For example, on Ubuntu 20, you may have gcc at `/usr/bin/gcc` and g++ at `/usr/bin/g++`; if that is the case, you can select those compilers with:
|
||||
```
|
||||
conan profile update 'conf.tools.build:compiler_executables={"c": "/usr/bin/gcc", "cpp": "/usr/bin/g++"}' default
|
||||
```
|
||||
|
||||
Replace `/usr/bin/gcc` and `/usr/bin/g++` with paths to the desired compilers.
|
||||
|
||||
It should choose the compiler for dependencies as well,
|
||||
but not all of them have a Conan recipe that respects this setting (yet).
|
||||
For the rest, you can set these environment variables.
|
||||
Replace `<path>` with paths to the desired compilers:
|
||||
|
||||
- `conan profile update env.CC=<path> default`
|
||||
- `conan profile update env.CXX=<path> default`
|
||||
|
||||
Export our [Conan recipe for Snappy](./external/snappy).
|
||||
It does not explicitly link the C++ standard library,
|
||||
which allows you to statically link it with GCC, if you want.
|
||||
|
||||
```
|
||||
conan export external/snappy snappy/1.1.9@
|
||||
```
|
||||
|
||||
Export our [Conan recipe for SOCI](./external/soci).
|
||||
It patches their CMake to correctly import its dependencies.
|
||||
|
||||
```
|
||||
conan export external/soci soci/4.0.3@
|
||||
```
|
||||
|
||||
### Build and Test
|
||||
|
||||
1. Create a build directory and move into it.
|
||||
|
||||
```
|
||||
mkdir .build
|
||||
cd .build
|
||||
```
|
||||
|
||||
You can use any directory name. Conan treats your working directory as an
|
||||
install folder and generates files with implementation details.
|
||||
You don't need to worry about these files, but make sure to change
|
||||
your working directory to your build directory before calling Conan.
|
||||
|
||||
**Note:** You can specify a directory for the installation files by adding
|
||||
the `install-folder` or `-if` option to every `conan install` command
|
||||
in the next step.
|
||||
|
||||
2. Generate CMake files for every configuration you want to build.
|
||||
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
```
|
||||
|
||||
For a single-configuration generator, e.g. `Unix Makefiles` or `Ninja`,
|
||||
you only need to run this command once.
|
||||
For a multi-configuration generator, e.g. `Visual Studio`, you may want to
|
||||
run it more than once.
|
||||
|
||||
Each of these commands should also have a different `build_type` setting.
|
||||
A second command with the same `build_type` setting will overwrite the files
|
||||
generated by the first. You can pass the build type on the command line with
|
||||
`--settings build_type=$BUILD_TYPE` or in the profile itself,
|
||||
under the section `[settings]` with the key `build_type`.
|
||||
|
||||
If you are using a Microsoft Visual C++ compiler,
|
||||
then you will need to ensure consistency between the `build_type` setting
|
||||
and the `compiler.runtime` setting.
|
||||
|
||||
When `build_type` is `Release`, `compiler.runtime` should be `MT`.
|
||||
|
||||
When `build_type` is `Debug`, `compiler.runtime` should be `MTd`.
|
||||
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release --settings compiler.runtime=MT
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug --settings compiler.runtime=MTd
|
||||
```
|
||||
|
||||
3. Configure CMake and pass the toolchain file generated by Conan, located at
|
||||
`$OUTPUT_FOLDER/build/generators/conan_toolchain.cmake`.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
|
||||
```
|
||||
|
||||
Pass the CMake variable [`CMAKE_BUILD_TYPE`][build_type]
|
||||
and make sure it matches the `build_type` setting you chose in the previous
|
||||
step.
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
|
||||
```
|
||||
|
||||
**Note:** You can pass build options for `rippled` in this step.
|
||||
|
||||
4. Build `rippled`.
|
||||
|
||||
For a single-configuration generator, it will build whatever configuration
|
||||
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator,
|
||||
you must pass the option `--config` to select the build configuration.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
cmake --build .
|
||||
```
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
cmake --build . --config Release
|
||||
cmake --build . --config Debug
|
||||
```
|
||||
|
||||
5. Test rippled.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
./rippled --unittest
|
||||
```
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
./Release/rippled --unittest
|
||||
./Debug/rippled --unittest
|
||||
```
|
||||
|
||||
The location of `rippled` in your build directory depends on your CMake
|
||||
generator. Pass `--help` to see the rest of the command line options.
|
||||
|
||||
|
||||
## Coverage report
|
||||
|
||||
The coverage report is intended for developers using compilers GCC
|
||||
or Clang (including Apple Clang). It is generated by the build target `coverage`,
|
||||
which is only enabled when the `coverage` option is set, e.g. with
|
||||
`--options coverage=True` in `conan` or `-Dcoverage=ON` variable in `cmake`
|
||||
|
||||
Prerequisites for the coverage report:
|
||||
|
||||
- [gcovr tool][gcovr] (can be installed e.g. with [pip][python-pip])
|
||||
- `gcov` for GCC (installed with the compiler by default) or
|
||||
- `llvm-cov` for Clang (installed with the compiler by default)
|
||||
- `Debug` build type
|
||||
|
||||
A coverage report is created when the following steps are completed, in order:
|
||||
|
||||
1. `rippled` binary built with instrumentation data, enabled by the `coverage`
|
||||
option mentioned above
|
||||
2. completed run of unit tests, which populates coverage capture data
|
||||
3. completed run of the `gcovr` tool (which internally invokes either `gcov` or `llvm-cov`)
|
||||
to assemble both instrumentation data and the coverage capture data into a coverage report
|
||||
|
||||
The above steps are automated into a single target `coverage`. The instrumented
|
||||
`rippled` binary can also be used for regular development or testing work, at
|
||||
the cost of extra disk space utilization and a small performance hit
|
||||
(to store coverage capture). In case of a spurious failure of unit tests, it is
|
||||
possible to re-run the `coverage` target without rebuilding the `rippled` binary
|
||||
(since it is simply a dependency of the coverage report target). It is also possible
|
||||
to select only specific tests for the purpose of the coverage report, by setting
|
||||
the `coverage_test` variable in `cmake`
|
||||
|
||||
The default coverage report format is `html-details`, but the user
|
||||
can override it to any of the formats listed in `Builds/CMake/CodeCoverage.cmake`
|
||||
by setting the `coverage_format` variable in `cmake`. It is also possible
|
||||
to generate more than one format at a time by setting the `coverage_extra_args`
|
||||
variable in `cmake`. The specific command line used to run the `gcovr` tool will be
|
||||
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
|
||||
|
||||
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
|
||||
set to the number of available CPU cores. This may cause spurious test
|
||||
errors on Apple. Developers can override the number of unit test jobs with
|
||||
the `coverage_test_parallelism` variable in `cmake`.
|
||||
|
||||
Example use with some cmake variables set:
|
||||
|
||||
```
|
||||
cd .build
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
cmake -DCMAKE_BUILD_TYPE=Debug -Dcoverage=ON -Dcoverage_test_parallelism=2 -Dcoverage_format=html-details -Dcoverage_extra_args="--json coverage.json" -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
|
||||
cmake --build . --target coverage
|
||||
```
|
||||
|
||||
After the `coverage` target is completed, the generated coverage report will be
|
||||
stored inside the build directory, as either of:
|
||||
|
||||
- file named `coverage.`_extension_ , with a suitable extension for the report format, or
|
||||
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
|
||||
|
||||
|
||||
## Options
|
||||
|
||||
| Option | Default Value | Description |
|
||||
| --- | ---| ---|
|
||||
| `assert` | OFF | Enable assertions.
|
||||
| `reporting` | OFF | Build the reporting mode feature. |
|
||||
| `coverage` | OFF | Prepare the coverage report. |
|
||||
| `tests` | ON | Build tests. |
|
||||
| `unity` | ON | Configure a unity build. |
|
||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||
|
||||
[Unity builds][5] may be faster for the first build
|
||||
(at the cost of much more memory) since they concatenate sources into fewer
|
||||
translation units. Non-unity builds may be faster for incremental builds,
|
||||
and can be helpful for detecting `#include` omissions.
|
||||
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
||||
### Conan
|
||||
|
||||
If you have trouble building dependencies after changing Conan settings,
|
||||
try removing the Conan cache.
|
||||
|
||||
```
|
||||
rm -rf ~/.conan/data
|
||||
```
|
||||
|
||||
|
||||
### no std::result_of
|
||||
|
||||
If your compiler version is recent enough to have removed `std::result_of` as
|
||||
part of C++20, e.g. Apple Clang 15.0, then you might need to add a preprocessor
|
||||
definition to your build.
|
||||
|
||||
```
|
||||
conan profile update 'options.boost:extra_b2_flags="define=BOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
||||
conan profile update 'env.CFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
||||
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
||||
conan profile update 'conf.tools.build:cflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
|
||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
|
||||
```
|
||||
|
||||
|
||||
### recompile with -fPIC
|
||||
|
||||
If you get a linker error suggesting that you recompile Boost with
|
||||
position-independent code, such as:
|
||||
|
||||
```
|
||||
/usr/bin/ld.gold: error: /home/username/.conan/data/boost/1.77.0/_/_/package/.../lib/libboost_container.a(alloc_lib.o):
|
||||
requires unsupported dynamic reloc 11; recompile with -fPIC
|
||||
```
|
||||
|
||||
Conan most likely downloaded a bad binary distribution of the dependency.
|
||||
This seems to be a [bug][1] in Conan just for Boost 1.77.0 compiled with GCC
|
||||
for Linux. The solution is to build the dependency locally by passing
|
||||
`--build boost` when calling `conan install`.
|
||||
|
||||
```
|
||||
/usr/bin/ld.gold: error: /home/username/.conan/data/boost/1.77.0/_/_/package/dc8aedd23a0f0a773a5fcdcfe1ae3e89c4205978/lib/libboost_container.a(alloc_lib.o): requires unsupported dynamic reloc 11; recompile with -fPIC
|
||||
```
|
||||
|
||||
|
||||
## Add a Dependency
|
||||
|
||||
If you want to experiment with a new package, follow these steps:
|
||||
|
||||
1. Search for the package on [Conan Center](https://conan.io/center/).
|
||||
2. Modify [`conanfile.py`](./conanfile.py):
|
||||
- Add a version of the package to the `requires` property.
|
||||
- Change any default options for the package by adding them to the
|
||||
`default_options` property (with syntax `'$package:$option': $value`).
|
||||
3. Modify [`CMakeLists.txt`](./CMakeLists.txt):
|
||||
- Add a call to `find_package($package REQUIRED)`.
|
||||
- Link a library from the package to the target `ripple_libs`
|
||||
(search for the existing call to `target_link_libraries(ripple_libs INTERFACE ...)`).
|
||||
4. Start coding! Don't forget to include whatever headers you need from the package.
|
||||
|
||||
|
||||
## A crash course in CMake and Conan
|
||||
|
||||
To better understand how to use Conan,
|
||||
we should first understand _why_ we use Conan,
|
||||
and to understand that,
|
||||
we need to understand how we use CMake.
|
||||
|
||||
|
||||
### CMake
|
||||
|
||||
Technically, you don't need CMake to build this project.
|
||||
You could manually compile every translation unit into an object file,
|
||||
using the right compiler options,
|
||||
and then manually link all those objects together,
|
||||
using the right linker options.
|
||||
However, that is very tedious and error-prone,
|
||||
which is why we lean on tools like CMake.
|
||||
|
||||
We have written CMake configuration files
|
||||
([`CMakeLists.txt`](./CMakeLists.txt) and friends)
|
||||
for this project so that CMake can be used to correctly compile and link
|
||||
all of the translation units in it.
|
||||
Or rather, CMake will generate files for a separate build system
|
||||
(e.g. Make, Ninja, Visual Studio, Xcode, etc.)
|
||||
that compile and link all of the translation units.
|
||||
Even then, CMake has parameters, some of which are platform-specific.
|
||||
In CMake's parlance, parameters are specially-named **variables** like
|
||||
[`CMAKE_BUILD_TYPE`][build_type] or
|
||||
[`CMAKE_MSVC_RUNTIME_LIBRARY`][runtime].
|
||||
Parameters include:
|
||||
|
||||
- what build system to generate files for
|
||||
- where to find the compiler and linker
|
||||
- where to find dependencies, e.g. libraries and headers
|
||||
- how to link dependencies, e.g. any special compiler or linker flags that
|
||||
need to be used with them, including preprocessor definitions
|
||||
- how to compile translation units, e.g. with optimizations, debug symbols,
|
||||
position-independent code, etc.
|
||||
- on Windows, which runtime library to link with
|
||||
|
||||
For some of these parameters, like the build system and compiler,
|
||||
CMake goes through a complicated search process to choose default values.
|
||||
For others, like the dependencies,
|
||||
_we_ had written in the CMake configuration files of this project
|
||||
our own complicated process to choose defaults.
|
||||
For most developers, things "just worked"... until they didn't, and then
|
||||
you were left trying to debug one of these complicated processes, instead of
|
||||
choosing and manually passing the parameter values yourself.
|
||||
|
||||
You can pass every parameter to CMake on the command line,
|
||||
but writing out these parameters every time we want to configure CMake is
|
||||
a pain.
|
||||
Most humans prefer to put them into a configuration file, once, that
|
||||
CMake can read every time it is configured.
|
||||
For CMake, that file is a [toolchain file][toolchain].
|
||||
|
||||
|
||||
### Conan
|
||||
|
||||
These next few paragraphs on Conan are going to read much like the ones above
|
||||
for CMake.
|
||||
|
||||
Technically, you don't need Conan to build this project.
|
||||
You could manually download, configure, build, and install all of the
|
||||
dependencies yourself, and then pass all of the parameters necessary for
|
||||
CMake to link to those dependencies.
|
||||
To guarantee ABI compatibility, you must be sure to use the same set of
|
||||
compiler and linker options for all dependencies _and_ this project.
|
||||
However, that is very tedious and error-prone, which is why we lean on tools
|
||||
like Conan.
|
||||
|
||||
We have written a Conan configuration file ([`conanfile.py`](./conanfile.py))
|
||||
so that Conan can be used to correctly download, configure, build, and install
|
||||
all of the dependencies for this project,
|
||||
using a single set of compiler and linker options for all of them.
|
||||
It generates files that contain almost all of the parameters that CMake
|
||||
expects.
|
||||
Those files include:
|
||||
|
||||
- A single toolchain file.
|
||||
- For every dependency, a CMake [package configuration file][pcf],
|
||||
[package version file][pvf], and for every build type, a package
|
||||
targets file.
|
||||
Together, these files implement version checking and define `IMPORTED`
|
||||
targets for the dependencies.
|
||||
|
||||
The toolchain file itself amends the search path
|
||||
([`CMAKE_PREFIX_PATH`][prefix_path]) so that [`find_package()`][find_package]
|
||||
will [discover][search] the generated package configuration files.
|
||||
|
||||
**Nearly all we must do to properly configure CMake is pass the toolchain
|
||||
file.**
|
||||
What CMake parameters are left out?
|
||||
You'll still need to pick a build system generator,
|
||||
and if you choose a single-configuration generator,
|
||||
you'll need to pass the `CMAKE_BUILD_TYPE`,
|
||||
which should match the `build_type` setting you gave to Conan.
|
||||
|
||||
Even then, Conan has parameters, some of which are platform-specific.
|
||||
In Conan's parlance, parameters are either settings or options.
|
||||
**Settings** are shared by all packages, e.g. the build type.
|
||||
**Options** are specific to a given package, e.g. whether to build and link
|
||||
OpenSSL as a shared library.
|
||||
|
||||
For settings, Conan goes through a complicated search process to choose
|
||||
defaults.
|
||||
For options, each package recipe defines its own defaults.
|
||||
|
||||
You can pass every parameter to Conan on the command line,
|
||||
but it is more convenient to put them in a [profile][profile].
|
||||
**All we must do to properly configure Conan is edit and pass the profile.**
|
||||
|
||||
|
||||
[1]: https://github.com/conan-io/conan-center-index/issues/13168
|
||||
[5]: https://en.wikipedia.org/wiki/Unity_build
|
||||
[6]: https://github.com/boostorg/beast/issues/2648
|
||||
[7]: https://github.com/boostorg/beast/issues/2661
|
||||
[gcovr]: https://gcovr.com/en/stable/getting-started.html
|
||||
[python-pip]: https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/
|
||||
[build_type]: https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
[runtime]: https://cmake.org/cmake/help/latest/variable/CMAKE_MSVC_RUNTIME_LIBRARY.html
|
||||
[toolchain]: https://cmake.org/cmake/help/latest/manual/cmake-toolchains.7.html
|
||||
[pcf]: https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#package-configuration-file
|
||||
[pvf]: https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#package-version-file
|
||||
[find_package]: https://cmake.org/cmake/help/latest/command/find_package.html
|
||||
[search]: https://cmake.org/cmake/help/latest/command/find_package.html#search-procedure
|
||||
[prefix_path]: https://cmake.org/cmake/help/latest/variable/CMAKE_PREFIX_PATH.html
|
||||
[profile]: https://docs.conan.io/en/latest/reference/profiles.html
|
||||
@@ -1,440 +0,0 @@
|
||||
# Copyright (c) 2012 - 2017, Lars Bilke
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice, this
|
||||
# list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# CHANGES:
|
||||
#
|
||||
# 2012-01-31, Lars Bilke
|
||||
# - Enable Code Coverage
|
||||
#
|
||||
# 2013-09-17, Joakim Söderberg
|
||||
# - Added support for Clang.
|
||||
# - Some additional usage instructions.
|
||||
#
|
||||
# 2016-02-03, Lars Bilke
|
||||
# - Refactored functions to use named parameters
|
||||
#
|
||||
# 2017-06-02, Lars Bilke
|
||||
# - Merged with modified version from github.com/ufz/ogs
|
||||
#
|
||||
# 2019-05-06, Anatolii Kurotych
|
||||
# - Remove unnecessary --coverage flag
|
||||
#
|
||||
# 2019-12-13, FeRD (Frank Dana)
|
||||
# - Deprecate COVERAGE_LCOVR_EXCLUDES and COVERAGE_GCOVR_EXCLUDES lists in favor
|
||||
# of tool-agnostic COVERAGE_EXCLUDES variable, or EXCLUDE setup arguments.
|
||||
# - CMake 3.4+: All excludes can be specified relative to BASE_DIRECTORY
|
||||
# - All setup functions: accept BASE_DIRECTORY, EXCLUDE list
|
||||
# - Set lcov basedir with -b argument
|
||||
# - Add automatic --demangle-cpp in lcovr, if 'c++filt' is available (can be
|
||||
# overridden with NO_DEMANGLE option in setup_target_for_coverage_lcovr().)
|
||||
# - Delete output dir, .info file on 'make clean'
|
||||
# - Remove Python detection, since version mismatches will break gcovr
|
||||
# - Minor cleanup (lowercase function names, update examples...)
|
||||
#
|
||||
# 2019-12-19, FeRD (Frank Dana)
|
||||
# - Rename Lcov outputs, make filtered file canonical, fix cleanup for targets
|
||||
#
|
||||
# 2020-01-19, Bob Apthorpe
|
||||
# - Added gfortran support
|
||||
#
|
||||
# 2020-02-17, FeRD (Frank Dana)
|
||||
# - Make all add_custom_target()s VERBATIM to auto-escape wildcard characters
|
||||
# in EXCLUDEs, and remove manual escaping from gcovr targets
|
||||
#
|
||||
# 2021-01-19, Robin Mueller
|
||||
# - Add CODE_COVERAGE_VERBOSE option which will allow to print out commands which are run
|
||||
# - Added the option for users to set the GCOVR_ADDITIONAL_ARGS variable to supply additional
|
||||
# flags to the gcovr command
|
||||
#
|
||||
# 2020-05-04, Mihchael Davis
|
||||
# - Add -fprofile-abs-path to make gcno files contain absolute paths
|
||||
# - Fix BASE_DIRECTORY not working when defined
|
||||
# - Change BYPRODUCT from folder to index.html to stop ninja from complaining about double defines
|
||||
#
|
||||
# 2021-05-10, Martin Stump
|
||||
# - Check if the generator is multi-config before warning about non-Debug builds
|
||||
#
|
||||
# 2022-02-22, Marko Wehle
|
||||
# - Change gcovr output from -o <filename> for --xml <filename> and --html <filename> output respectively.
|
||||
# This will allow for Multiple Output Formats at the same time by making use of GCOVR_ADDITIONAL_ARGS, e.g. GCOVR_ADDITIONAL_ARGS "--txt".
|
||||
#
|
||||
# 2022-09-28, Sebastian Mueller
|
||||
# - fix append_coverage_compiler_flags_to_target to correctly add flags
|
||||
# - replace "-fprofile-arcs -ftest-coverage" with "--coverage" (equivalent)
|
||||
#
|
||||
# 2024-01-04, Bronek Kozicki
|
||||
# - remove setup_target_for_coverage_lcov (slow) and setup_target_for_coverage_fastcov (no support for Clang)
|
||||
# - fix Clang support by adding find_program( ... llvm-cov )
|
||||
# - add Apple Clang support by adding execute_process( COMMAND xcrun -f llvm-cov ... )
|
||||
# - add CODE_COVERAGE_GCOV_TOOL to explicitly select gcov tool and disable find_program
|
||||
# - replace both functions setup_target_for_coverage_gcovr_* with a single setup_target_for_coverage_gcovr
|
||||
# - add support for all gcovr output formats
|
||||
#
|
||||
# USAGE:
|
||||
#
|
||||
# 1. Copy this file into your cmake modules path.
|
||||
#
|
||||
# 2. Add the following line to your CMakeLists.txt (best inside an if-condition
|
||||
# using a CMake option() to enable it just optionally):
|
||||
# include(CodeCoverage)
|
||||
#
|
||||
# 3. Append necessary compiler flags for all supported source files:
|
||||
# append_coverage_compiler_flags()
|
||||
# Or for specific target:
|
||||
# append_coverage_compiler_flags_to_target(YOUR_TARGET_NAME)
|
||||
#
|
||||
# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og
|
||||
#
|
||||
# 4. If you need to exclude additional directories from the report, specify them
|
||||
# using full paths in the COVERAGE_EXCLUDES variable before calling
|
||||
# setup_target_for_coverage_*().
|
||||
# Example:
|
||||
# set(COVERAGE_EXCLUDES
|
||||
# '${PROJECT_SOURCE_DIR}/src/dir1/*'
|
||||
# '/path/to/my/src/dir2/*')
|
||||
# Or, use the EXCLUDE argument to setup_target_for_coverage_*().
|
||||
# Example:
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME coverage
|
||||
# EXECUTABLE testrunner
|
||||
# EXCLUDE "${PROJECT_SOURCE_DIR}/src/dir1/*" "/path/to/my/src/dir2/*")
|
||||
#
|
||||
# 4.a NOTE: With CMake 3.4+, COVERAGE_EXCLUDES or EXCLUDE can also be set
|
||||
# relative to the BASE_DIRECTORY (default: PROJECT_SOURCE_DIR)
|
||||
# Example:
|
||||
# set(COVERAGE_EXCLUDES "dir1/*")
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME coverage
|
||||
# EXECUTABLE testrunner
|
||||
# FORMAT html-details
|
||||
# BASE_DIRECTORY "${PROJECT_SOURCE_DIR}/src"
|
||||
# EXCLUDE "dir2/*")
|
||||
#
|
||||
# 4.b If you need to pass specific options to gcovr, specify them in
|
||||
# GCOVR_ADDITIONAL_ARGS variable.
|
||||
# Example:
|
||||
# set (GCOVR_ADDITIONAL_ARGS --exclude-throw-branches --exclude-noncode-lines -s)
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME coverage
|
||||
# EXECUTABLE testrunner
|
||||
# EXCLUDE "src/dir1" "src/dir2")
|
||||
#
|
||||
# 5. Use the functions described below to create a custom make target which
|
||||
# runs your test executable and produces a code coverage report.
|
||||
#
|
||||
# 6. Build a Debug build:
|
||||
# cmake -DCMAKE_BUILD_TYPE=Debug ..
|
||||
# make
|
||||
# make my_coverage_target
|
||||
|
||||
include(CMakeParseArguments)
|
||||
|
||||
option(CODE_COVERAGE_VERBOSE "Verbose information" FALSE)
|
||||
|
||||
# Check prereqs
|
||||
find_program( GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test)
|
||||
|
||||
if(DEFINED CODE_COVERAGE_GCOV_TOOL)
|
||||
set(GCOV_TOOL "${CODE_COVERAGE_GCOV_TOOL}")
|
||||
elseif(DEFINED ENV{CODE_COVERAGE_GCOV_TOOL})
|
||||
set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}")
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
|
||||
if(APPLE)
|
||||
execute_process( COMMAND xcrun -f llvm-cov
|
||||
OUTPUT_VARIABLE LLVMCOV_PATH
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
else()
|
||||
find_program( LLVMCOV_PATH llvm-cov )
|
||||
endif()
|
||||
if(LLVMCOV_PATH)
|
||||
set(GCOV_TOOL "${LLVMCOV_PATH} gcov")
|
||||
endif()
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
|
||||
find_program( GCOV_PATH gcov )
|
||||
set(GCOV_TOOL "${GCOV_PATH}")
|
||||
endif()
|
||||
|
||||
# Check supported compiler (Clang, GNU and Flang)
|
||||
get_property(LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES)
|
||||
foreach(LANG ${LANGUAGES})
|
||||
if("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
|
||||
if("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3)
|
||||
message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
|
||||
endif()
|
||||
elseif(NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU"
|
||||
AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(LLVM)?[Ff]lang")
|
||||
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
set(COVERAGE_COMPILER_FLAGS "-g --coverage"
|
||||
CACHE INTERNAL "")
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
|
||||
include(CheckCXXCompilerFlag)
|
||||
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
|
||||
if(HAVE_cxx_fprofile_abs_path)
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
include(CheckCCompilerFlag)
|
||||
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
|
||||
if(HAVE_c_fprofile_abs_path)
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(CMAKE_Fortran_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the Fortran compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_CXX_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the C++ compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_C_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the C compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
||||
""
|
||||
CACHE STRING "Flags used for linking binaries during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_SHARED_LINKER_FLAGS_COVERAGE
|
||||
""
|
||||
CACHE STRING "Flags used by the shared libraries linker during coverage builds."
|
||||
FORCE )
|
||||
mark_as_advanced(
|
||||
CMAKE_Fortran_FLAGS_COVERAGE
|
||||
CMAKE_CXX_FLAGS_COVERAGE
|
||||
CMAKE_C_FLAGS_COVERAGE
|
||||
CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
||||
CMAKE_SHARED_LINKER_FLAGS_COVERAGE )
|
||||
|
||||
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
|
||||
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
|
||||
endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
|
||||
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
||||
link_libraries(gcov)
|
||||
endif()
|
||||
|
||||
# Defines a target for running and collection code coverage information
|
||||
# Builds dependencies, runs the given executable and outputs reports.
|
||||
# NOTE! The executable should always have a ZERO as exit code otherwise
|
||||
# the coverage generation will not complete.
|
||||
#
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME ctest_coverage # New target name
|
||||
# EXECUTABLE ctest -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR
|
||||
# DEPENDENCIES executable_target # Dependencies to build first
|
||||
# BASE_DIRECTORY "../" # Base directory for report
|
||||
# # (defaults to PROJECT_SOURCE_DIR)
|
||||
# FORMAT "cobertura" # Output format, one of:
|
||||
# # xml cobertura sonarqube json-summary
|
||||
# # json-details coveralls csv txt
|
||||
# # html-single html-nested html-details
|
||||
# # (xml is an alias to cobertura;
|
||||
# # if no format is set, defaults to xml)
|
||||
# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative
|
||||
# # to BASE_DIRECTORY, with CMake 3.4+)
|
||||
# )
|
||||
# The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the
|
||||
# GCVOR command.
|
||||
function(setup_target_for_coverage_gcovr)
|
||||
set(options NONE)
|
||||
set(oneValueArgs BASE_DIRECTORY NAME FORMAT)
|
||||
set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES)
|
||||
cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
|
||||
|
||||
if(NOT GCOV_TOOL)
|
||||
message(FATAL_ERROR "Could not find gcov or llvm-cov tool! Aborting...")
|
||||
endif()
|
||||
|
||||
if(NOT GCOVR_PATH)
|
||||
message(FATAL_ERROR "Could not find gcovr tool! Aborting...")
|
||||
endif()
|
||||
|
||||
# Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR
|
||||
if(DEFINED Coverage_BASE_DIRECTORY)
|
||||
get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE)
|
||||
else()
|
||||
set(BASEDIR ${PROJECT_SOURCE_DIR})
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED Coverage_FORMAT)
|
||||
set(Coverage_FORMAT xml)
|
||||
endif()
|
||||
|
||||
if("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
|
||||
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
|
||||
else()
|
||||
if((Coverage_FORMAT STREQUAL "html-details")
|
||||
OR (Coverage_FORMAT STREQUAL "html-nested"))
|
||||
set(GCOVR_OUTPUT_FILE ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html)
|
||||
set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME})
|
||||
elseif(Coverage_FORMAT STREQUAL "html-single")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html)
|
||||
elseif((Coverage_FORMAT STREQUAL "json-summary")
|
||||
OR (Coverage_FORMAT STREQUAL "json-details")
|
||||
OR (Coverage_FORMAT STREQUAL "coveralls"))
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json)
|
||||
elseif(Coverage_FORMAT STREQUAL "txt")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.txt)
|
||||
elseif(Coverage_FORMAT STREQUAL "csv")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.csv)
|
||||
else()
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.xml)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if((Coverage_FORMAT STREQUAL "cobertura")
|
||||
OR (Coverage_FORMAT STREQUAL "xml"))
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura-pretty )
|
||||
set(Coverage_FORMAT cobertura) # overwrite xml
|
||||
elseif(Coverage_FORMAT STREQUAL "sonarqube")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --sonarqube "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "json-summary")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary-pretty)
|
||||
elseif(Coverage_FORMAT STREQUAL "json-details")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-pretty)
|
||||
elseif(Coverage_FORMAT STREQUAL "coveralls")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls-pretty)
|
||||
elseif(Coverage_FORMAT STREQUAL "csv")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --csv "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "txt")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --txt "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "html-single")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-self-contained)
|
||||
elseif(Coverage_FORMAT STREQUAL "html-nested")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-nested "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "html-details")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-details "${GCOVR_OUTPUT_FILE}" )
|
||||
else()
|
||||
message(FATAL_ERROR "Unsupported output style ${Coverage_FORMAT}! Aborting...")
|
||||
endif()
|
||||
|
||||
# Collect excludes (CMake 3.4+: Also compute absolute paths)
|
||||
set(GCOVR_EXCLUDES "")
|
||||
foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES})
|
||||
if(CMAKE_VERSION VERSION_GREATER 3.4)
|
||||
get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR})
|
||||
endif()
|
||||
list(APPEND GCOVR_EXCLUDES "${EXCLUDE}")
|
||||
endforeach()
|
||||
list(REMOVE_DUPLICATES GCOVR_EXCLUDES)
|
||||
|
||||
# Combine excludes to several -e arguments
|
||||
set(GCOVR_EXCLUDE_ARGS "")
|
||||
foreach(EXCLUDE ${GCOVR_EXCLUDES})
|
||||
list(APPEND GCOVR_EXCLUDE_ARGS "-e")
|
||||
list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}")
|
||||
endforeach()
|
||||
|
||||
# Set up commands which will be run to generate coverage data
|
||||
# Run tests
|
||||
set(GCOVR_EXEC_TESTS_CMD
|
||||
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
|
||||
)
|
||||
|
||||
# Create folder
|
||||
if(DEFINED GCOVR_CREATE_FOLDER)
|
||||
set(GCOVR_FOLDER_CMD
|
||||
${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER})
|
||||
else()
|
||||
set(GCOVR_FOLDER_CMD echo) # dummy
|
||||
endif()
|
||||
|
||||
# Running gcovr
|
||||
set(GCOVR_CMD
|
||||
${GCOVR_PATH}
|
||||
--gcov-executable ${GCOV_TOOL}
|
||||
--gcov-ignore-parse-errors=negative_hits.warn_once_per_file
|
||||
-r ${BASEDIR}
|
||||
${GCOVR_ADDITIONAL_ARGS}
|
||||
${GCOVR_EXCLUDE_ARGS}
|
||||
--object-directory=${PROJECT_BINARY_DIR}
|
||||
)
|
||||
|
||||
if(CODE_COVERAGE_VERBOSE)
|
||||
message(STATUS "Executed command report")
|
||||
|
||||
message(STATUS "Command to run tests: ")
|
||||
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
|
||||
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
|
||||
|
||||
if(NOT GCOVR_FOLDER_CMD STREQUAL "echo")
|
||||
message(STATUS "Command to create a folder: ")
|
||||
string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}")
|
||||
message(STATUS "${GCOVR_FOLDER_CMD_SPACED}")
|
||||
endif()
|
||||
|
||||
message(STATUS "Command to generate gcovr coverage data: ")
|
||||
string(REPLACE ";" " " GCOVR_CMD_SPACED "${GCOVR_CMD}")
|
||||
message(STATUS "${GCOVR_CMD_SPACED}")
|
||||
endif()
|
||||
|
||||
add_custom_target(${Coverage_NAME}
|
||||
COMMAND ${GCOVR_EXEC_TESTS_CMD}
|
||||
COMMAND ${GCOVR_FOLDER_CMD}
|
||||
COMMAND ${GCOVR_CMD}
|
||||
|
||||
BYPRODUCTS ${GCOVR_OUTPUT_FILE}
|
||||
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
|
||||
DEPENDS ${Coverage_DEPENDENCIES}
|
||||
VERBATIM # Protect arguments to commands
|
||||
COMMENT "Running gcovr to produce code coverage report."
|
||||
)
|
||||
|
||||
# Show info where to find the report
|
||||
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
|
||||
COMMAND ;
|
||||
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
|
||||
)
|
||||
endfunction() # setup_target_for_coverage_gcovr
|
||||
|
||||
function(append_coverage_compiler_flags)
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
message(STATUS "Appending code coverage compiler flags: ${COVERAGE_COMPILER_FLAGS}")
|
||||
endfunction() # append_coverage_compiler_flags
|
||||
|
||||
# Setup coverage for specific library
|
||||
function(append_coverage_compiler_flags_to_target name)
|
||||
separate_arguments(_flag_list NATIVE_COMMAND "${COVERAGE_COMPILER_FLAGS}")
|
||||
target_compile_options(${name} PRIVATE ${_flag_list})
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
||||
target_link_libraries(${name} PRIVATE gcov)
|
||||
endif()
|
||||
endfunction()
|
||||
@@ -130,16 +130,7 @@ else ()
|
||||
>)
|
||||
endif ()
|
||||
|
||||
if (use_mold)
|
||||
# use mold linker if available
|
||||
execute_process (
|
||||
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version
|
||||
ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
if ("${LD_VERSION}" MATCHES "mold")
|
||||
target_link_libraries (common INTERFACE -fuse-ld=mold)
|
||||
endif ()
|
||||
unset (LD_VERSION)
|
||||
elseif (use_gold AND is_gcc)
|
||||
if (use_gold AND is_gcc)
|
||||
# use gold linker if available
|
||||
execute_process (
|
||||
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version
|
||||
@@ -171,7 +162,9 @@ elseif (use_gold AND is_gcc)
|
||||
$<$<NOT:$<BOOL:${static}>>:-Wl,--disable-new-dtags>)
|
||||
endif ()
|
||||
unset (LD_VERSION)
|
||||
elseif (use_lld)
|
||||
endif ()
|
||||
|
||||
if (use_lld)
|
||||
# use lld linker if available
|
||||
execute_process (
|
||||
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version
|
||||
@@ -182,7 +175,6 @@ elseif (use_lld)
|
||||
unset (LD_VERSION)
|
||||
endif()
|
||||
|
||||
|
||||
if (assert)
|
||||
foreach (var_ CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_RELEASE)
|
||||
STRING (REGEX REPLACE "[-/]DNDEBUG" "" ${var_} "${${var_}}")
|
||||
|
||||
@@ -23,11 +23,6 @@ else()
|
||||
message(STATUS "ACL not found, continuing without ACL support")
|
||||
endif()
|
||||
|
||||
add_library(libxrpl INTERFACE)
|
||||
target_link_libraries(libxrpl INTERFACE xrpl_core)
|
||||
add_library(xrpl::libxrpl ALIAS libxrpl)
|
||||
|
||||
|
||||
#[===============================[
|
||||
beast/legacy FILES:
|
||||
TODO: review these sources for removal or replacement
|
||||
@@ -84,7 +79,6 @@ target_sources (xrpl_core PRIVATE
|
||||
subdir: protocol
|
||||
#]===============================]
|
||||
src/ripple/protocol/impl/AccountID.cpp
|
||||
src/ripple/protocol/impl/AMMCore.cpp
|
||||
src/ripple/protocol/impl/Book.cpp
|
||||
src/ripple/protocol/impl/BuildInfo.cpp
|
||||
src/ripple/protocol/impl/ErrorCodes.cpp
|
||||
@@ -92,13 +86,10 @@ target_sources (xrpl_core PRIVATE
|
||||
src/ripple/protocol/impl/Indexes.cpp
|
||||
src/ripple/protocol/impl/InnerObjectFormats.cpp
|
||||
src/ripple/protocol/impl/Issue.cpp
|
||||
src/ripple/protocol/impl/STIssue.cpp
|
||||
src/ripple/protocol/impl/Keylet.cpp
|
||||
src/ripple/protocol/impl/LedgerFormats.cpp
|
||||
src/ripple/protocol/impl/LedgerHeader.cpp
|
||||
src/ripple/protocol/impl/PublicKey.cpp
|
||||
src/ripple/protocol/impl/Quality.cpp
|
||||
src/ripple/protocol/impl/QualityFunction.cpp
|
||||
src/ripple/protocol/impl/Rate2.cpp
|
||||
src/ripple/protocol/impl/Rules.cpp
|
||||
src/ripple/protocol/impl/SField.cpp
|
||||
@@ -113,9 +104,7 @@ target_sources (xrpl_core PRIVATE
|
||||
src/ripple/protocol/impl/STObject.cpp
|
||||
src/ripple/protocol/impl/STParsedJSON.cpp
|
||||
src/ripple/protocol/impl/STPathSet.cpp
|
||||
src/ripple/protocol/impl/STXChainBridge.cpp
|
||||
src/ripple/protocol/impl/STTx.cpp
|
||||
src/ripple/protocol/impl/XChainAttestations.cpp
|
||||
src/ripple/protocol/impl/STValidation.cpp
|
||||
src/ripple/protocol/impl/STVar.cpp
|
||||
src/ripple/protocol/impl/STVector256.cpp
|
||||
@@ -129,9 +118,6 @@ target_sources (xrpl_core PRIVATE
|
||||
src/ripple/protocol/impl/UintTypes.cpp
|
||||
src/ripple/protocol/impl/digest.cpp
|
||||
src/ripple/protocol/impl/tokens.cpp
|
||||
src/ripple/protocol/impl/NFTSyntheticSerializer.cpp
|
||||
src/ripple/protocol/impl/NFTokenID.cpp
|
||||
src/ripple/protocol/impl/NFTokenOfferID.cpp
|
||||
#[===============================[
|
||||
main sources:
|
||||
subdir: crypto
|
||||
@@ -158,10 +144,10 @@ target_link_libraries (xrpl_core
|
||||
PUBLIC
|
||||
OpenSSL::Crypto
|
||||
Ripple::boost
|
||||
wasmedge::wasmedge
|
||||
NIH::WasmEdge
|
||||
Ripple::syslibs
|
||||
secp256k1::secp256k1
|
||||
ed25519::ed25519
|
||||
NIH::secp256k1
|
||||
NIH::ed25519-donna
|
||||
date::date
|
||||
Ripple::opts)
|
||||
#[=================================[
|
||||
@@ -169,54 +155,31 @@ target_link_libraries (xrpl_core
|
||||
#]=================================]
|
||||
install (
|
||||
FILES
|
||||
src/ripple/basics/algorithm.h
|
||||
src/ripple/basics/Archive.h
|
||||
src/ripple/basics/base64.h
|
||||
src/ripple/basics/base_uint.h
|
||||
src/ripple/basics/BasicConfig.h
|
||||
src/ripple/basics/Blob.h
|
||||
src/ripple/basics/Buffer.h
|
||||
src/ripple/basics/ByteUtilities.h
|
||||
src/ripple/basics/chrono.h
|
||||
src/ripple/basics/comparators.h
|
||||
src/ripple/basics/CompressionAlgorithms.h
|
||||
src/ripple/basics/contract.h
|
||||
src/ripple/basics/CountedObject.h
|
||||
src/ripple/basics/DecayingSample.h
|
||||
src/ripple/basics/Expected.h
|
||||
src/ripple/basics/FeeUnits.h
|
||||
src/ripple/basics/FileUtilities.h
|
||||
src/ripple/basics/hardened_hash.h
|
||||
src/ripple/basics/IOUAmount.h
|
||||
src/ripple/basics/join.h
|
||||
src/ripple/basics/KeyCache.h
|
||||
src/ripple/basics/LocalValue.h
|
||||
src/ripple/basics/Log.h
|
||||
src/ripple/basics/make_SSLContext.h
|
||||
src/ripple/basics/MathUtilities.h
|
||||
src/ripple/basics/mulDiv.h
|
||||
src/ripple/basics/Number.h
|
||||
src/ripple/basics/partitioned_unordered_map.h
|
||||
src/ripple/basics/PerfLog.h
|
||||
src/ripple/basics/random.h
|
||||
src/ripple/basics/RangeSet.h
|
||||
src/ripple/basics/README.md
|
||||
src/ripple/basics/ResolverAsio.h
|
||||
src/ripple/basics/Resolver.h
|
||||
src/ripple/basics/safe_cast.h
|
||||
src/ripple/basics/scope.h
|
||||
src/ripple/basics/SHAMapHash.h
|
||||
src/ripple/basics/Slice.h
|
||||
src/ripple/basics/spinlock.h
|
||||
src/ripple/basics/strHex.h
|
||||
src/ripple/basics/StringUtilities.h
|
||||
src/ripple/basics/TaggedCache.h
|
||||
src/ripple/basics/tagged_integer.h
|
||||
src/ripple/basics/ThreadSafetyAnalysis.h
|
||||
src/ripple/basics/ToString.h
|
||||
src/ripple/basics/UnorderedContainers.h
|
||||
src/ripple/basics/UptimeClock.h
|
||||
src/ripple/basics/XRPAmount.h
|
||||
src/ripple/basics/algorithm.h
|
||||
src/ripple/basics/base_uint.h
|
||||
src/ripple/basics/chrono.h
|
||||
src/ripple/basics/contract.h
|
||||
src/ripple/basics/FeeUnits.h
|
||||
src/ripple/basics/hardened_hash.h
|
||||
src/ripple/basics/strHex.h
|
||||
DESTINATION include/ripple/basics)
|
||||
install (
|
||||
FILES
|
||||
@@ -227,7 +190,6 @@ install (
|
||||
install (
|
||||
FILES
|
||||
src/ripple/json/JsonPropertyStream.h
|
||||
src/ripple/json/MultivarJson.h
|
||||
src/ripple/json/Object.h
|
||||
src/ripple/json/Output.h
|
||||
src/ripple/json/Writer.h
|
||||
@@ -241,45 +203,31 @@ install (
|
||||
FILES
|
||||
src/ripple/json/impl/json_assert.h
|
||||
DESTINATION include/ripple/json/impl)
|
||||
|
||||
install (
|
||||
FILES
|
||||
src/ripple/net/RPCErr.h
|
||||
DESTINATION include/ripple/net)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/protocol/AccountID.h
|
||||
src/ripple/protocol/AMMCore.h
|
||||
src/ripple/protocol/AmountConversions.h
|
||||
src/ripple/protocol/Book.h
|
||||
src/ripple/protocol/BuildInfo.h
|
||||
src/ripple/protocol/ErrorCodes.h
|
||||
src/ripple/protocol/Feature.h
|
||||
src/ripple/protocol/Fees.h
|
||||
src/ripple/protocol/HashPrefix.h
|
||||
src/ripple/protocol/Indexes.h
|
||||
src/ripple/protocol/InnerObjectFormats.h
|
||||
src/ripple/protocol/Issue.h
|
||||
src/ripple/protocol/json_get_or_throw.h
|
||||
src/ripple/protocol/KeyType.h
|
||||
src/ripple/protocol/Keylet.h
|
||||
src/ripple/protocol/KnownFormats.h
|
||||
src/ripple/protocol/LedgerFormats.h
|
||||
src/ripple/protocol/LedgerHeader.h
|
||||
src/ripple/protocol/NFTSyntheticSerializer.h
|
||||
src/ripple/protocol/NFTokenID.h
|
||||
src/ripple/protocol/NFTokenOfferID.h
|
||||
src/ripple/protocol/Protocol.h
|
||||
src/ripple/protocol/PublicKey.h
|
||||
src/ripple/protocol/Quality.h
|
||||
src/ripple/protocol/QualityFunction.h
|
||||
src/ripple/protocol/Rate.h
|
||||
src/ripple/protocol/Rules.h
|
||||
src/ripple/protocol/SField.h
|
||||
src/ripple/protocol/SOTemplate.h
|
||||
src/ripple/protocol/STAccount.h
|
||||
src/ripple/protocol/STAmount.h
|
||||
src/ripple/protocol/STIssue.h
|
||||
src/ripple/protocol/STArray.h
|
||||
src/ripple/protocol/STBase.h
|
||||
src/ripple/protocol/STBitString.h
|
||||
@@ -291,8 +239,6 @@ install (
|
||||
src/ripple/protocol/STParsedJSON.h
|
||||
src/ripple/protocol/STPathSet.h
|
||||
src/ripple/protocol/STTx.h
|
||||
src/ripple/protocol/XChainAttestations.h
|
||||
src/ripple/protocol/STXChainBridge.h
|
||||
src/ripple/protocol/STValidation.h
|
||||
src/ripple/protocol/STVector256.h
|
||||
src/ripple/protocol/SecretKey.h
|
||||
@@ -308,9 +254,6 @@ install (
|
||||
src/ripple/protocol/UintTypes.h
|
||||
src/ripple/protocol/digest.h
|
||||
src/ripple/protocol/jss.h
|
||||
src/ripple/protocol/serialize.h
|
||||
src/ripple/protocol/nft.h
|
||||
src/ripple/protocol/nftPageMask.h
|
||||
src/ripple/protocol/tokens.h
|
||||
DESTINATION include/ripple/protocol)
|
||||
install (
|
||||
@@ -318,35 +261,7 @@ install (
|
||||
src/ripple/protocol/impl/STVar.h
|
||||
src/ripple/protocol/impl/secp256k1.h
|
||||
DESTINATION include/ripple/protocol/impl)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/resource/Fees.h
|
||||
src/ripple/resource/Charge.h
|
||||
DESTINATION include/ripple/resource)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/server/Port.h
|
||||
src/ripple/server/Server.h
|
||||
src/ripple/server/Session.h
|
||||
src/ripple/server/SimpleWriter.h
|
||||
src/ripple/server/Writer.h
|
||||
src/ripple/server/WSSession.h
|
||||
src/ripple/server/Handoff.h
|
||||
DESTINATION include/ripple/server)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/server/impl/ServerImpl.h
|
||||
src/ripple/server/impl/io_list.h
|
||||
src/ripple/server/impl/Door.h
|
||||
src/ripple/server/impl/PlainHTTPPeer.h
|
||||
src/ripple/server/impl/PlainWSPeer.h
|
||||
src/ripple/server/impl/BaseHTTPPeer.h
|
||||
src/ripple/server/impl/BaseWSPeer.h
|
||||
src/ripple/server/impl/BasePeer.h
|
||||
src/ripple/server/impl/LowestLayer.h
|
||||
src/ripple/server/impl/SSLHTTPPeer.h
|
||||
src/ripple/server/impl/SSLWSPeer.h
|
||||
DESTINATION include/ripple/server/impl)
|
||||
|
||||
#[===================================[
|
||||
beast/legacy headers installation
|
||||
#]===================================]
|
||||
@@ -358,7 +273,6 @@ install (
|
||||
DESTINATION include/ripple/beast/clock)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/beast/core/CurrentThreadName.h
|
||||
src/ripple/beast/core/LexicalCast.h
|
||||
src/ripple/beast/core/List.h
|
||||
src/ripple/beast/core/SemanticVersion.h
|
||||
@@ -372,14 +286,6 @@ install (
|
||||
install (
|
||||
FILES src/ripple/beast/hash/impl/xxhash.h
|
||||
DESTINATION include/ripple/beast/hash/impl)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/beast/net/IPAddress.h
|
||||
src/ripple/beast/net/IPAddressConversion.h
|
||||
src/ripple/beast/net/IPAddressV4.h
|
||||
src/ripple/beast/net/IPAddressV6.h
|
||||
src/ripple/beast/net/IPEndpoint.h
|
||||
DESTINATION include/ripple/beast/net)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/beast/rfc2616.h
|
||||
@@ -387,33 +293,12 @@ install (
|
||||
src/ripple/beast/unit_test.h
|
||||
src/ripple/beast/xor_shift_engine.h
|
||||
DESTINATION include/ripple/beast)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/beast/unit_test/amount.hpp
|
||||
src/ripple/beast/unit_test/dstream.hpp
|
||||
src/ripple/beast/unit_test/global_suites.hpp
|
||||
src/ripple/beast/unit_test/main.cpp
|
||||
src/ripple/beast/unit_test/match.hpp
|
||||
src/ripple/beast/unit_test/recorder.hpp
|
||||
src/ripple/beast/unit_test/reporter.hpp
|
||||
src/ripple/beast/unit_test/results.hpp
|
||||
src/ripple/beast/unit_test/runner.hpp
|
||||
src/ripple/beast/unit_test/suite.hpp
|
||||
src/ripple/beast/unit_test/suite_info.hpp
|
||||
src/ripple/beast/unit_test/suite_list.hpp
|
||||
src/ripple/beast/unit_test/thread.hpp
|
||||
DESTINATION include/ripple/beast/unit_test)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/beast/unit_test/detail/const_container.hpp
|
||||
DESTINATION include/ripple/beast/unit_test/detail)
|
||||
install (
|
||||
FILES
|
||||
src/ripple/beast/utility/Journal.h
|
||||
src/ripple/beast/utility/PropertyStream.h
|
||||
src/ripple/beast/utility/Zero.h
|
||||
src/ripple/beast/utility/rngfill.h
|
||||
src/ripple/beast/utility/WrappedSink.h
|
||||
DESTINATION include/ripple/beast/utility)
|
||||
# WARNING!! -- horrible levelization ahead
|
||||
# (these files should be isolated or moved...but
|
||||
@@ -501,8 +386,6 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/app/reporting/ReportingETL.cpp
|
||||
src/ripple/app/reporting/ETLSource.cpp
|
||||
src/ripple/app/reporting/P2pProxy.cpp
|
||||
src/ripple/app/misc/impl/AMMHelpers.cpp
|
||||
src/ripple/app/misc/impl/AMMUtils.cpp
|
||||
src/ripple/app/misc/CanonicalTXSet.cpp
|
||||
src/ripple/app/misc/FeeVoteImpl.cpp
|
||||
src/ripple/app/misc/HashRouter.cpp
|
||||
@@ -513,7 +396,6 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/app/misc/detail/impl/WorkSSL.cpp
|
||||
src/ripple/app/misc/impl/AccountTxPaging.cpp
|
||||
src/ripple/app/misc/impl/AmendmentTable.cpp
|
||||
src/ripple/app/misc/impl/DeliverMax.cpp
|
||||
src/ripple/app/misc/impl/LoadFeeTrack.cpp
|
||||
src/ripple/app/misc/impl/Manifest.cpp
|
||||
src/ripple/app/misc/impl/Transaction.cpp
|
||||
@@ -530,8 +412,6 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/app/paths/RippleCalc.cpp
|
||||
src/ripple/app/paths/RippleLineCache.cpp
|
||||
src/ripple/app/paths/TrustLine.cpp
|
||||
src/ripple/app/paths/impl/AMMLiquidity.cpp
|
||||
src/ripple/app/paths/impl/AMMOffer.cpp
|
||||
src/ripple/app/paths/impl/BookStep.cpp
|
||||
src/ripple/app/paths/impl/DirectStep.cpp
|
||||
src/ripple/app/paths/impl/PaySteps.cpp
|
||||
@@ -548,12 +428,6 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/app/rdb/impl/UnitaryShard.cpp
|
||||
src/ripple/app/rdb/impl/Vacuum.cpp
|
||||
src/ripple/app/rdb/impl/Wallet.cpp
|
||||
src/ripple/app/tx/impl/AMMBid.cpp
|
||||
src/ripple/app/tx/impl/AMMCreate.cpp
|
||||
src/ripple/app/tx/impl/AMMDelete.cpp
|
||||
src/ripple/app/tx/impl/AMMDeposit.cpp
|
||||
src/ripple/app/tx/impl/AMMVote.cpp
|
||||
src/ripple/app/tx/impl/AMMWithdraw.cpp
|
||||
src/ripple/app/tx/impl/ApplyContext.cpp
|
||||
src/ripple/app/tx/impl/BookTip.cpp
|
||||
src/ripple/app/tx/impl/CancelCheck.cpp
|
||||
@@ -561,13 +435,11 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/app/tx/impl/CashCheck.cpp
|
||||
src/ripple/app/tx/impl/Change.cpp
|
||||
src/ripple/app/tx/impl/ClaimReward.cpp
|
||||
src/ripple/app/tx/impl/Clawback.cpp
|
||||
src/ripple/app/tx/impl/CreateCheck.cpp
|
||||
src/ripple/app/tx/impl/CreateOffer.cpp
|
||||
src/ripple/app/tx/impl/CreateTicket.cpp
|
||||
src/ripple/app/tx/impl/DeleteAccount.cpp
|
||||
src/ripple/app/tx/impl/DepositPreauth.cpp
|
||||
src/ripple/app/tx/impl/DID.cpp
|
||||
src/ripple/app/tx/impl/Escrow.cpp
|
||||
src/ripple/app/tx/impl/GenesisMint.cpp
|
||||
src/ripple/app/tx/impl/Import.cpp
|
||||
@@ -588,7 +460,6 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/app/tx/impl/SetRegularKey.cpp
|
||||
src/ripple/app/tx/impl/SetSignerList.cpp
|
||||
src/ripple/app/tx/impl/SetTrust.cpp
|
||||
src/ripple/app/tx/impl/XChainBridge.cpp
|
||||
src/ripple/app/tx/impl/SignerEntries.cpp
|
||||
src/ripple/app/tx/impl/Taker.cpp
|
||||
src/ripple/app/tx/impl/Transactor.cpp
|
||||
@@ -625,7 +496,9 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/core/impl/JobQueue.cpp
|
||||
src/ripple/core/impl/LoadEvent.cpp
|
||||
src/ripple/core/impl/LoadMonitor.cpp
|
||||
src/ripple/core/impl/SNTPClock.cpp
|
||||
src/ripple/core/impl/SociDB.cpp
|
||||
src/ripple/core/impl/TimeKeeper.cpp
|
||||
src/ripple/core/impl/Workers.cpp
|
||||
src/ripple/core/Pg.cpp
|
||||
#[===============================[
|
||||
@@ -731,7 +604,6 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/rpc/handlers/AccountOffers.cpp
|
||||
src/ripple/rpc/handlers/AccountNamespace.cpp
|
||||
src/ripple/rpc/handlers/AccountTx.cpp
|
||||
src/ripple/rpc/handlers/AMMInfo.cpp
|
||||
src/ripple/rpc/handlers/BlackList.cpp
|
||||
src/ripple/rpc/handlers/BookOffers.cpp
|
||||
src/ripple/rpc/handlers/CanDelete.cpp
|
||||
@@ -798,11 +670,14 @@ target_sources (rippled PRIVATE
|
||||
src/ripple/rpc/impl/RPCHandler.cpp
|
||||
src/ripple/rpc/impl/RPCHelpers.cpp
|
||||
src/ripple/rpc/impl/Role.cpp
|
||||
src/ripple/rpc/impl/ServerHandler.cpp
|
||||
src/ripple/rpc/impl/ServerHandlerImp.cpp
|
||||
src/ripple/rpc/impl/ShardArchiveHandler.cpp
|
||||
src/ripple/rpc/impl/ShardVerificationScheduler.cpp
|
||||
src/ripple/rpc/impl/Status.cpp
|
||||
src/ripple/rpc/impl/TransactionSign.cpp
|
||||
src/ripple/rpc/impl/NFTokenID.cpp
|
||||
src/ripple/rpc/impl/NFTokenOfferID.cpp
|
||||
src/ripple/rpc/impl/NFTSyntheticSerializer.cpp
|
||||
#[===============================[
|
||||
main sources:
|
||||
subdir: perflog
|
||||
@@ -838,18 +713,13 @@ if (tests)
|
||||
src/test/app/AccountDelete_test.cpp
|
||||
src/test/app/AccountTxPaging_test.cpp
|
||||
src/test/app/AmendmentTable_test.cpp
|
||||
src/test/app/AMM_test.cpp
|
||||
src/test/app/AMMCalc_test.cpp
|
||||
src/test/app/AMMExtended_test.cpp
|
||||
src/test/app/BaseFee_test.cpp
|
||||
src/test/app/Check_test.cpp
|
||||
src/test/app/ClaimReward_test.cpp
|
||||
src/test/app/Clawback_test.cpp
|
||||
src/test/app/CrossingLimits_test.cpp
|
||||
src/test/app/DeliverMin_test.cpp
|
||||
src/test/app/DepositAuth_test.cpp
|
||||
src/test/app/Discrepancy_test.cpp
|
||||
src/test/app/DID_test.cpp
|
||||
src/test/app/DNS_test.cpp
|
||||
src/test/app/Escrow_test.cpp
|
||||
src/test/app/FeeVote_test.cpp
|
||||
@@ -879,11 +749,9 @@ if (tests)
|
||||
src/test/app/PseudoTx_test.cpp
|
||||
src/test/app/RCLCensorshipDetector_test.cpp
|
||||
src/test/app/RCLValidations_test.cpp
|
||||
src/test/app/ReducedOffer_test.cpp
|
||||
src/test/app/Regression_test.cpp
|
||||
src/test/app/Remit_test.cpp
|
||||
src/test/app/SHAMapStore_test.cpp
|
||||
src/test/app/XChain_test.cpp
|
||||
src/test/app/SetAuth_test.cpp
|
||||
src/test/app/SetHook_test.cpp
|
||||
src/test/app/SetHookTSH_test.cpp
|
||||
@@ -993,7 +861,6 @@ if (tests)
|
||||
src/test/json/Output_test.cpp
|
||||
src/test/json/Writer_test.cpp
|
||||
src/test/json/json_value_test.cpp
|
||||
src/test/json/MultivarJson_test.cpp
|
||||
#[===============================[
|
||||
test sources:
|
||||
subdir: jtx
|
||||
@@ -1001,23 +868,20 @@ if (tests)
|
||||
src/test/jtx/Env_test.cpp
|
||||
src/test/jtx/WSClient_test.cpp
|
||||
src/test/jtx/impl/Account.cpp
|
||||
src/test/jtx/impl/AMM.cpp
|
||||
src/test/jtx/impl/AMMTest.cpp
|
||||
src/test/jtx/impl/Env.cpp
|
||||
src/test/jtx/impl/JSONRPCClient.cpp
|
||||
src/test/jtx/impl/TestHelpers.cpp
|
||||
src/test/jtx/impl/ManualTimeKeeper.cpp
|
||||
src/test/jtx/impl/WSClient.cpp
|
||||
src/test/jtx/impl/hook.cpp
|
||||
src/test/jtx/impl/acctdelete.cpp
|
||||
src/test/jtx/impl/account_txn_id.cpp
|
||||
src/test/jtx/impl/amount.cpp
|
||||
src/test/jtx/impl/attester.cpp
|
||||
src/test/jtx/impl/balance.cpp
|
||||
src/test/jtx/impl/check.cpp
|
||||
src/test/jtx/impl/delivermin.cpp
|
||||
src/test/jtx/impl/deposit.cpp
|
||||
src/test/jtx/impl/did.cpp
|
||||
src/test/jtx/impl/envconfig.cpp
|
||||
src/test/jtx/impl/escrow.cpp
|
||||
src/test/jtx/impl/fee.cpp
|
||||
src/test/jtx/impl/flags.cpp
|
||||
src/test/jtx/impl/genesis.cpp
|
||||
@@ -1042,7 +906,6 @@ if (tests)
|
||||
src/test/jtx/impl/remit.cpp
|
||||
src/test/jtx/impl/sendmax.cpp
|
||||
src/test/jtx/impl/seq.cpp
|
||||
src/test/jtx/impl/xchain_bridge.cpp
|
||||
src/test/jtx/impl/sig.cpp
|
||||
src/test/jtx/impl/tag.cpp
|
||||
src/test/jtx/impl/ticket.cpp
|
||||
@@ -1136,7 +999,6 @@ if (tests)
|
||||
src/test/rpc/AccountSet_test.cpp
|
||||
src/test/rpc/AccountTx_test.cpp
|
||||
src/test/rpc/AmendmentBlocked_test.cpp
|
||||
src/test/rpc/AMMInfo_test.cpp
|
||||
src/test/rpc/Book_test.cpp
|
||||
src/test/rpc/Catalogue_test.cpp
|
||||
src/test/rpc/DepositAuthorized_test.cpp
|
||||
@@ -1148,7 +1010,6 @@ if (tests)
|
||||
src/test/rpc/KeyGeneration_test.cpp
|
||||
src/test/rpc/LedgerClosed_test.cpp
|
||||
src/test/rpc/LedgerData_test.cpp
|
||||
src/test/rpc/LedgerHeader_test.cpp
|
||||
src/test/rpc/LedgerRPC_test.cpp
|
||||
src/test/rpc/LedgerRequestRPC_test.cpp
|
||||
src/test/rpc/ManifestRPC_test.cpp
|
||||
@@ -1173,7 +1034,6 @@ if (tests)
|
||||
src/test/rpc/ValidatorInfo_test.cpp
|
||||
src/test/rpc/ValidatorRPC_test.cpp
|
||||
src/test/rpc/Version_test.cpp
|
||||
src/test/rpc/Handler_test.cpp
|
||||
#[===============================[
|
||||
test sources:
|
||||
subdir: server
|
||||
|
||||
@@ -2,37 +2,97 @@
|
||||
coverage report target
|
||||
#]===================================================================]
|
||||
|
||||
if(NOT coverage)
|
||||
message(FATAL_ERROR "Code coverage not enabled! Aborting ...")
|
||||
endif()
|
||||
if (coverage)
|
||||
if (is_clang)
|
||||
if (APPLE)
|
||||
execute_process (COMMAND xcrun -f llvm-profdata
|
||||
OUTPUT_VARIABLE LLVM_PROFDATA
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
else ()
|
||||
find_program (LLVM_PROFDATA llvm-profdata)
|
||||
endif ()
|
||||
if (NOT LLVM_PROFDATA)
|
||||
message (WARNING "unable to find llvm-profdata - skipping coverage_report target")
|
||||
endif ()
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
|
||||
message(WARNING "Code coverage on Windows is not supported, ignoring 'coverage' flag")
|
||||
return()
|
||||
endif()
|
||||
if (APPLE)
|
||||
execute_process (COMMAND xcrun -f llvm-cov
|
||||
OUTPUT_VARIABLE LLVM_COV
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
else ()
|
||||
find_program (LLVM_COV llvm-cov)
|
||||
endif ()
|
||||
if (NOT LLVM_COV)
|
||||
message (WARNING "unable to find llvm-cov - skipping coverage_report target")
|
||||
endif ()
|
||||
|
||||
include(CodeCoverage)
|
||||
set (extract_pattern "")
|
||||
if (coverage_core_only)
|
||||
set (extract_pattern "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/")
|
||||
endif ()
|
||||
|
||||
# The instructions for these commands come from the `CodeCoverage` module,
|
||||
# which was copied from https://github.com/bilke/cmake-modules, commit fb7d2a3,
|
||||
# then locally changed (see CHANGES: section in `CodeCoverage.cmake`)
|
||||
if (LLVM_COV AND LLVM_PROFDATA)
|
||||
add_custom_target (coverage_report
|
||||
USES_TERMINAL
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Generating coverage - results will be in ${CMAKE_BINARY_DIR}/coverage/index.html."
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Running rippled tests."
|
||||
COMMAND rippled --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --quiet --unittest-log
|
||||
COMMAND ${LLVM_PROFDATA}
|
||||
merge -sparse default.profraw -o rip.profdata
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
|
||||
COMMAND ${LLVM_COV}
|
||||
report -instr-profile=rip.profdata
|
||||
$<TARGET_FILE:rippled> ${extract_pattern}
|
||||
# generate html report
|
||||
COMMAND ${LLVM_COV}
|
||||
show -format=html -output-dir=${CMAKE_BINARY_DIR}/coverage
|
||||
-instr-profile=rip.profdata
|
||||
$<TARGET_FILE:rippled> ${extract_pattern}
|
||||
BYPRODUCTS coverage/index.html)
|
||||
endif ()
|
||||
elseif (is_gcc)
|
||||
find_program (LCOV lcov)
|
||||
if (NOT LCOV)
|
||||
message (WARNING "unable to find lcov - skipping coverage_report target")
|
||||
endif ()
|
||||
|
||||
set(GCOVR_ADDITIONAL_ARGS ${coverage_extra_args})
|
||||
if(NOT GCOVR_ADDITIONAL_ARGS STREQUAL "")
|
||||
separate_arguments(GCOVR_ADDITIONAL_ARGS)
|
||||
endif()
|
||||
find_program (GENHTML genhtml)
|
||||
if (NOT GENHTML)
|
||||
message (WARNING "unable to find genhtml - skipping coverage_report target")
|
||||
endif ()
|
||||
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS
|
||||
--exclude-throw-branches
|
||||
--exclude-noncode-lines
|
||||
--exclude-unreachable-branches -s
|
||||
-j ${coverage_test_parallelism})
|
||||
set (extract_pattern "*")
|
||||
if (coverage_core_only)
|
||||
set (extract_pattern "*/src/ripple/*")
|
||||
endif ()
|
||||
|
||||
setup_target_for_coverage_gcovr(
|
||||
NAME coverage
|
||||
FORMAT ${coverage_format}
|
||||
EXECUTABLE rippled
|
||||
EXECUTABLE_ARGS --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --unittest-jobs ${coverage_test_parallelism} --quiet --unittest-log
|
||||
EXCLUDE "src/test" "${CMAKE_BINARY_DIR}/proto_gen" "${CMAKE_BINARY_DIR}/proto_gen_grpc"
|
||||
DEPENDENCIES rippled
|
||||
)
|
||||
if (LCOV AND GENHTML)
|
||||
add_custom_target (coverage_report
|
||||
USES_TERMINAL
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Generating coverage- results will be in ${CMAKE_BINARY_DIR}/coverage/index.html."
|
||||
# create baseline info file
|
||||
COMMAND ${LCOV}
|
||||
--no-external -d "${CMAKE_CURRENT_SOURCE_DIR}" -c -d . -i -o baseline.info
|
||||
| grep -v "ignoring data for external file"
|
||||
# run tests
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Running rippled tests for coverage report."
|
||||
COMMAND rippled --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --quiet --unittest-log
|
||||
# Create test coverage data file
|
||||
COMMAND ${LCOV}
|
||||
--no-external -d "${CMAKE_CURRENT_SOURCE_DIR}" -c -d . -o tests.info
|
||||
| grep -v "ignoring data for external file"
|
||||
# Combine baseline and test coverage data
|
||||
COMMAND ${LCOV}
|
||||
-a baseline.info -a tests.info -o lcov-all.info
|
||||
# extract our files
|
||||
COMMAND ${LCOV}
|
||||
-e lcov-all.info "${extract_pattern}" -o lcov.info
|
||||
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
|
||||
COMMAND ${LCOV} --summary lcov.info
|
||||
# generate HTML report
|
||||
COMMAND ${GENHTML}
|
||||
-o ${CMAKE_BINARY_DIR}/coverage lcov.info
|
||||
BYPRODUCTS coverage/index.html)
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
@@ -1,13 +1,6 @@
|
||||
#[===================================================================[
|
||||
docs target (optional)
|
||||
#]===================================================================]
|
||||
|
||||
# Early return if the `docs` directory is missing,
|
||||
# e.g. when we are building a Conan package.
|
||||
if(NOT EXISTS docs)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (tests)
|
||||
find_package (Doxygen)
|
||||
if (NOT TARGET Doxygen::doxygen)
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
install (
|
||||
TARGETS
|
||||
ed25519-donna
|
||||
common
|
||||
opts
|
||||
ripple_syslibs
|
||||
@@ -15,6 +16,17 @@ install (
|
||||
RUNTIME DESTINATION bin
|
||||
INCLUDES DESTINATION include)
|
||||
|
||||
if(${INSTALL_SECP256K1})
|
||||
install (
|
||||
TARGETS
|
||||
secp256k1
|
||||
EXPORT RippleExports
|
||||
LIBRARY DESTINATION lib
|
||||
ARCHIVE DESTINATION lib
|
||||
RUNTIME DESTINATION bin
|
||||
INCLUDES DESTINATION include)
|
||||
endif()
|
||||
|
||||
install (EXPORT RippleExports
|
||||
FILE RippleTargets.cmake
|
||||
NAMESPACE Ripple::
|
||||
|
||||
@@ -23,15 +23,15 @@ target_compile_options (opts
|
||||
INTERFACE
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
|
||||
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-fprofile-arcs -ftest-coverage>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-fprofile-instr-generate -fcoverage-mapping>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
target_link_libraries (opts
|
||||
INTERFACE
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-fprofile-arcs -ftest-coverage>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-fprofile-instr-generate -fcoverage-mapping>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ if (is_multiconfig)
|
||||
file(GLOB md_files RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} CONFIGURE_DEPENDS
|
||||
*.md)
|
||||
LIST(APPEND all_sources ${md_files})
|
||||
foreach (_target secp256k1::secp256k1 ed25519::ed25519 xrpl_core rippled)
|
||||
foreach (_target secp256k1 ed25519-donna pbufs xrpl_core rippled)
|
||||
get_target_property (_type ${_target} TYPE)
|
||||
if(_type STREQUAL "INTERFACE_LIBRARY")
|
||||
continue()
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
convenience variables and sanity checks
|
||||
#]===================================================================]
|
||||
|
||||
include(ProcessorCount)
|
||||
|
||||
if (NOT ep_procs)
|
||||
ProcessorCount(ep_procs)
|
||||
if (ep_procs GREATER 1)
|
||||
|
||||
@@ -2,129 +2,121 @@
|
||||
declare user options/settings
|
||||
#]===================================================================]
|
||||
|
||||
include(ProcessorCount)
|
||||
option (assert "Enables asserts, even in release builds" OFF)
|
||||
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
option (reporting "Build rippled with reporting mode enabled" OFF)
|
||||
|
||||
option(assert "Enables asserts, even in release builds" OFF)
|
||||
option (tests "Build tests" ON)
|
||||
|
||||
option(reporting "Build rippled with reporting mode enabled" OFF)
|
||||
|
||||
option(tests "Build tests" ON)
|
||||
|
||||
option(unity "Creates a build using UNITY support in cmake. This is the default" ON)
|
||||
if(unity)
|
||||
if(NOT is_ci)
|
||||
set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
|
||||
endif()
|
||||
endif()
|
||||
if(is_gcc OR is_clang)
|
||||
option(coverage "Generates coverage info." OFF)
|
||||
option(profile "Add profiling flags" OFF)
|
||||
set(coverage_test_parallelism "${PROCESSOR_COUNT}" CACHE STRING
|
||||
"Unit tests parallelism for the purpose of coverage report.")
|
||||
set(coverage_format "html-details" CACHE STRING
|
||||
"Output format of the coverage report.")
|
||||
set(coverage_extra_args "" CACHE STRING
|
||||
"Additional arguments to pass to gcovr.")
|
||||
set(coverage_test "" CACHE STRING
|
||||
option (unity "Creates a build using UNITY support in cmake. This is the default" ON)
|
||||
if (unity)
|
||||
if (NOT is_ci)
|
||||
set (CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
|
||||
endif ()
|
||||
endif ()
|
||||
if (is_gcc OR is_clang)
|
||||
option (coverage "Generates coverage info." OFF)
|
||||
option (profile "Add profiling flags" OFF)
|
||||
set (coverage_test "" CACHE STRING
|
||||
"On gcc & clang, the specific unit test(s) to run for coverage. Default is all tests.")
|
||||
if(coverage_test AND NOT coverage)
|
||||
set(coverage ON CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
option(wextra "compile with extra gcc/clang warnings enabled" ON)
|
||||
else()
|
||||
set(profile OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
if(is_linux)
|
||||
option(BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
|
||||
option(static "link protobuf, openssl, libc++, and boost statically" ON)
|
||||
option(perf "Enables flags that assist with perf recording" OFF)
|
||||
option(use_gold "enables detection of gold (binutils) linker" ON)
|
||||
option(use_mold "enables detection of mold (binutils) linker" ON)
|
||||
else()
|
||||
if (coverage_test AND NOT coverage)
|
||||
set (coverage ON CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif ()
|
||||
option (coverage_core_only
|
||||
"Include only src/ripple files when generating coverage report. \
|
||||
Set to OFF to include all sources in coverage report."
|
||||
ON)
|
||||
option (wextra "compile with extra gcc/clang warnings enabled" ON)
|
||||
else ()
|
||||
set (profile OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set (coverage OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set (wextra OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif ()
|
||||
if (is_linux)
|
||||
option (BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
|
||||
option (static "link protobuf, openssl, libc++, and boost statically" ON)
|
||||
option (perf "Enables flags that assist with perf recording" OFF)
|
||||
option (use_gold "enables detection of gold (binutils) linker" ON)
|
||||
else ()
|
||||
# we are not ready to allow shared-libs on windows because it would require
|
||||
# export declarations. On macos it's more feasible, but static openssl
|
||||
# produces odd linker errors, thus we disable shared lib builds for now.
|
||||
set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE)
|
||||
set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
|
||||
set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
|
||||
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
|
||||
set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
|
||||
endif()
|
||||
if(is_clang)
|
||||
option(use_lld "enables detection of lld linker" ON)
|
||||
else()
|
||||
set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
|
||||
endif()
|
||||
option(jemalloc "Enables jemalloc for heap profiling" OFF)
|
||||
option(werr "treat warnings as errors" OFF)
|
||||
option(local_protobuf
|
||||
set (BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE)
|
||||
set (static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
|
||||
set (perf OFF CACHE BOOL "perf flags, linux only" FORCE)
|
||||
set (use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
|
||||
endif ()
|
||||
if (is_clang)
|
||||
option (use_lld "enables detection of lld linker" ON)
|
||||
else ()
|
||||
set (use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
|
||||
endif ()
|
||||
option (jemalloc "Enables jemalloc for heap profiling" OFF)
|
||||
option (werr "treat warnings as errors" OFF)
|
||||
option (local_protobuf
|
||||
"Force a local build of protobuf instead of looking for an installed version." OFF)
|
||||
option(local_grpc
|
||||
option (local_grpc
|
||||
"Force a local build of gRPC instead of looking for an installed version." OFF)
|
||||
|
||||
# this one is a string and therefore can't be an option
|
||||
set(san "" CACHE STRING "On gcc & clang, add sanitizer instrumentation")
|
||||
set_property(CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
|
||||
if(san)
|
||||
string(TOLOWER ${san} san)
|
||||
set(SAN_FLAG "-fsanitize=${san}")
|
||||
set(SAN_LIB "")
|
||||
if(is_gcc)
|
||||
if(san STREQUAL "address")
|
||||
set(SAN_LIB "asan")
|
||||
elseif(san STREQUAL "thread")
|
||||
set(SAN_LIB "tsan")
|
||||
elseif(san STREQUAL "memory")
|
||||
set(SAN_LIB "msan")
|
||||
elseif(san STREQUAL "undefined")
|
||||
set(SAN_LIB "ubsan")
|
||||
endif()
|
||||
endif()
|
||||
set(_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
|
||||
set(CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
|
||||
check_cxx_compiler_flag(${SAN_FLAG} COMPILER_SUPPORTS_SAN)
|
||||
set(CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
|
||||
if(NOT COMPILER_SUPPORTS_SAN)
|
||||
message(FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
|
||||
endif()
|
||||
endif()
|
||||
set(container_label "" CACHE STRING "tag to use for package building containers")
|
||||
option(packages_only
|
||||
set (san "" CACHE STRING "On gcc & clang, add sanitizer instrumentation")
|
||||
set_property (CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
|
||||
if (san)
|
||||
string (TOLOWER ${san} san)
|
||||
set (SAN_FLAG "-fsanitize=${san}")
|
||||
set (SAN_LIB "")
|
||||
if (is_gcc)
|
||||
if (san STREQUAL "address")
|
||||
set (SAN_LIB "asan")
|
||||
elseif (san STREQUAL "thread")
|
||||
set (SAN_LIB "tsan")
|
||||
elseif (san STREQUAL "memory")
|
||||
set (SAN_LIB "msan")
|
||||
elseif (san STREQUAL "undefined")
|
||||
set (SAN_LIB "ubsan")
|
||||
endif ()
|
||||
endif ()
|
||||
set (_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
|
||||
set (CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
|
||||
check_cxx_compiler_flag (${SAN_FLAG} COMPILER_SUPPORTS_SAN)
|
||||
set (CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
|
||||
if (NOT COMPILER_SUPPORTS_SAN)
|
||||
message (FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
|
||||
endif ()
|
||||
endif ()
|
||||
set (container_label "" CACHE STRING "tag to use for package building containers")
|
||||
option (packages_only
|
||||
"ONLY generate package building targets. This is special use-case and almost \
|
||||
certainly not what you want. Use with caution as you won't be able to build \
|
||||
any compiled targets locally." OFF)
|
||||
option(have_package_container
|
||||
option (have_package_container
|
||||
"Sometimes you already have the tagged container you want to use for package \
|
||||
building and you don't want docker to rebuild it. This flag will detach the \
|
||||
dependency of the package build from the container build. It's an advanced \
|
||||
use case and most likely you should not be touching this flag." OFF)
|
||||
|
||||
# the remaining options are obscure and rarely used
|
||||
option(beast_no_unit_test_inline
|
||||
option (beast_no_unit_test_inline
|
||||
"Prevents unit test definitions from being inserted into global table"
|
||||
OFF)
|
||||
option(single_io_service_thread
|
||||
option (single_io_service_thread
|
||||
"Restricts the number of threads calling io_service::run to one. \
|
||||
This can be useful when debugging."
|
||||
OFF)
|
||||
option(boost_show_deprecated
|
||||
option (boost_show_deprecated
|
||||
"Allow boost to fail on deprecated usage. Only useful if you're trying\
|
||||
to find deprecated calls."
|
||||
OFF)
|
||||
option(beast_hashers
|
||||
option (beast_hashers
|
||||
"Use local implementations for sha/ripemd hashes (experimental, not recommended)"
|
||||
OFF)
|
||||
|
||||
if(WIN32)
|
||||
option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
|
||||
else()
|
||||
set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
|
||||
endif()
|
||||
if(coverage)
|
||||
message(STATUS "coverage build requested - forcing Debug build")
|
||||
set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
|
||||
endif()
|
||||
if (WIN32)
|
||||
option (beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
|
||||
else ()
|
||||
set (beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
|
||||
endif ()
|
||||
if (coverage)
|
||||
message (STATUS "coverage build requested - forcing Debug build")
|
||||
set (CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
|
||||
endif ()
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
find_package(Boost 1.83 REQUIRED
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
context
|
||||
coroutine
|
||||
date_time
|
||||
filesystem
|
||||
json
|
||||
program_options
|
||||
regex
|
||||
system
|
||||
thread
|
||||
)
|
||||
|
||||
add_library(ripple_boost INTERFACE)
|
||||
add_library(Ripple::boost ALIAS ripple_boost)
|
||||
if(XCODE)
|
||||
target_include_directories(ripple_boost BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
|
||||
target_compile_options(ripple_boost INTERFACE --system-header-prefix="boost/")
|
||||
else()
|
||||
target_include_directories(ripple_boost SYSTEM BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
|
||||
endif()
|
||||
|
||||
target_link_libraries(ripple_boost
|
||||
INTERFACE
|
||||
Boost::boost
|
||||
Boost::chrono
|
||||
Boost::container
|
||||
Boost::coroutine
|
||||
Boost::date_time
|
||||
Boost::filesystem
|
||||
Boost::json
|
||||
Boost::program_options
|
||||
Boost::regex
|
||||
Boost::system
|
||||
Boost::iostreams
|
||||
Boost::thread)
|
||||
if(Boost_COMPILER)
|
||||
target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking)
|
||||
endif()
|
||||
if(san AND is_clang)
|
||||
# TODO: gcc does not support -fsanitize-blacklist...can we do something else
|
||||
# for gcc ?
|
||||
if(NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers)
|
||||
get_target_property(Boost_INCLUDE_DIRS Boost::headers INTERFACE_INCLUDE_DIRECTORIES)
|
||||
endif()
|
||||
message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist")
|
||||
file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*")
|
||||
target_compile_options(opts
|
||||
INTERFACE
|
||||
# ignore boost headers for sanitizing
|
||||
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
|
||||
endif()
|
||||
@@ -1,27 +0,0 @@
|
||||
find_package(Protobuf 3.8)
|
||||
|
||||
set(output_dir ${CMAKE_BINARY_DIR}/proto_gen)
|
||||
file(MAKE_DIRECTORY ${output_dir})
|
||||
set(ccbd ${CMAKE_CURRENT_BINARY_DIR})
|
||||
set(CMAKE_CURRENT_BINARY_DIR ${output_dir})
|
||||
protobuf_generate_cpp(PROTO_SRCS PROTO_HDRS src/ripple/proto/ripple.proto)
|
||||
set(CMAKE_CURRENT_BINARY_DIR ${ccbd})
|
||||
|
||||
target_include_directories(xrpl_core SYSTEM PUBLIC
|
||||
# The generated implementation imports the header relative to the output
|
||||
# directory.
|
||||
$<BUILD_INTERFACE:${output_dir}>
|
||||
$<BUILD_INTERFACE:${output_dir}/src>
|
||||
)
|
||||
target_sources(xrpl_core PRIVATE ${output_dir}/src/ripple/proto/ripple.pb.cc)
|
||||
install(
|
||||
FILES ${output_dir}/src/ripple/proto/ripple.pb.h
|
||||
DESTINATION include/ripple/proto)
|
||||
target_link_libraries(xrpl_core PUBLIC protobuf::libprotobuf)
|
||||
target_compile_options(xrpl_core
|
||||
PUBLIC
|
||||
$<$<BOOL:${XCODE}>:
|
||||
--system-header-prefix="google/protobuf"
|
||||
-Wno-deprecated-dynamic-exception-spec
|
||||
>
|
||||
)
|
||||
@@ -1,82 +0,0 @@
|
||||
find_package(gRPC 1.23)
|
||||
|
||||
#[=================================[
|
||||
generate protobuf sources for
|
||||
grpc defs and bundle into a
|
||||
static lib
|
||||
#]=================================]
|
||||
set(output_dir "${CMAKE_BINARY_DIR}/proto_gen_grpc")
|
||||
set(GRPC_GEN_DIR "${output_dir}/ripple/proto")
|
||||
file(MAKE_DIRECTORY ${GRPC_GEN_DIR})
|
||||
set(GRPC_PROTO_SRCS)
|
||||
set(GRPC_PROTO_HDRS)
|
||||
set(GRPC_PROTO_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/proto/org")
|
||||
file(GLOB_RECURSE GRPC_DEFINITION_FILES "${GRPC_PROTO_ROOT}/*.proto")
|
||||
foreach(file ${GRPC_DEFINITION_FILES})
|
||||
# /home/user/rippled/src/ripple/proto/org/.../v1/get_ledger.proto
|
||||
get_filename_component(_abs_file ${file} ABSOLUTE)
|
||||
# /home/user/rippled/src/ripple/proto/org/.../v1
|
||||
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
|
||||
# get_ledger
|
||||
get_filename_component(_basename ${file} NAME_WE)
|
||||
# /home/user/rippled/src/ripple/proto
|
||||
get_filename_component(_proto_inc ${GRPC_PROTO_ROOT} DIRECTORY) # updir one level
|
||||
# org/.../v1/get_ledger.proto
|
||||
file(RELATIVE_PATH _rel_root_file ${_proto_inc} ${_abs_file})
|
||||
# org/.../v1
|
||||
get_filename_component(_rel_root_dir ${_rel_root_file} DIRECTORY)
|
||||
# src/ripple/proto/org/.../v1
|
||||
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
|
||||
|
||||
# .cmake/proto_gen_grpc/ripple/proto/org/.../v1/get_ledger.grpc.pb.cc
|
||||
set(src_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.cc")
|
||||
set(src_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.cc")
|
||||
set(hdr_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.h")
|
||||
set(hdr_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.h")
|
||||
add_custom_command(
|
||||
OUTPUT ${src_1} ${src_2} ${hdr_1} ${hdr_2}
|
||||
COMMAND protobuf::protoc
|
||||
ARGS --grpc_out=${GRPC_GEN_DIR}
|
||||
--cpp_out=${GRPC_GEN_DIR}
|
||||
--plugin=protoc-gen-grpc=$<TARGET_FILE:gRPC::grpc_cpp_plugin>
|
||||
-I ${_proto_inc} -I ${_rel_dir}
|
||||
${_abs_file}
|
||||
DEPENDS ${_abs_file} protobuf::protoc gRPC::grpc_cpp_plugin
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
COMMENT "Running gRPC C++ protocol buffer compiler on ${file}"
|
||||
VERBATIM)
|
||||
set_source_files_properties(${src_1} ${src_2} ${hdr_1} ${hdr_2} PROPERTIES
|
||||
GENERATED TRUE
|
||||
SKIP_UNITY_BUILD_INCLUSION ON
|
||||
)
|
||||
list(APPEND GRPC_PROTO_SRCS ${src_1} ${src_2})
|
||||
list(APPEND GRPC_PROTO_HDRS ${hdr_1} ${hdr_2})
|
||||
endforeach()
|
||||
|
||||
target_include_directories(xrpl_core SYSTEM PUBLIC
|
||||
$<BUILD_INTERFACE:${output_dir}>
|
||||
$<BUILD_INTERFACE:${output_dir}/ripple/proto>
|
||||
# The generated sources include headers relative to this path. Fix it later.
|
||||
$<INSTALL_INTERFACE:include/ripple/proto>
|
||||
)
|
||||
target_sources(xrpl_core PRIVATE ${GRPC_PROTO_SRCS})
|
||||
install(
|
||||
DIRECTORY ${output_dir}/ripple
|
||||
DESTINATION include/
|
||||
FILES_MATCHING PATTERN "*.h"
|
||||
)
|
||||
target_link_libraries(xrpl_core PUBLIC
|
||||
"gRPC::grpc++"
|
||||
# libgrpc is missing references.
|
||||
absl::random_random
|
||||
)
|
||||
target_compile_options(xrpl_core
|
||||
PRIVATE
|
||||
$<$<BOOL:${MSVC}>:-wd4065>
|
||||
$<$<NOT:$<BOOL:${MSVC}>>:-Wno-deprecated-declarations>
|
||||
PUBLIC
|
||||
$<$<BOOL:${MSVC}>:-wd4996>
|
||||
$<$<BOOL:${XCODE}>:
|
||||
--system-header-prefix="google/protobuf"
|
||||
-Wno-deprecated-dynamic-exception-spec
|
||||
>)
|
||||
@@ -54,7 +54,6 @@ find_package(Boost 1.86 REQUIRED
|
||||
coroutine
|
||||
date_time
|
||||
filesystem
|
||||
json
|
||||
program_options
|
||||
regex
|
||||
system
|
||||
@@ -78,7 +77,6 @@ target_link_libraries(ripple_boost
|
||||
Boost::coroutine
|
||||
Boost::date_time
|
||||
Boost::filesystem
|
||||
Boost::json
|
||||
Boost::iostreams
|
||||
Boost::program_options
|
||||
Boost::regex
|
||||
|
||||
@@ -129,28 +129,27 @@ else ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
set(output_dir ${CMAKE_BINARY_DIR}/proto_gen)
|
||||
file(MAKE_DIRECTORY ${output_dir})
|
||||
set(ccbd ${CMAKE_CURRENT_BINARY_DIR})
|
||||
set(CMAKE_CURRENT_BINARY_DIR ${output_dir})
|
||||
protobuf_generate_cpp(PROTO_SRCS PROTO_HDRS src/ripple/proto/ripple.proto)
|
||||
set(CMAKE_CURRENT_BINARY_DIR ${ccbd})
|
||||
file (MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/proto_gen)
|
||||
set (save_CBD ${CMAKE_CURRENT_BINARY_DIR})
|
||||
set (CMAKE_CURRENT_BINARY_DIR ${CMAKE_BINARY_DIR}/proto_gen)
|
||||
protobuf_generate_cpp (
|
||||
PROTO_SRCS
|
||||
PROTO_HDRS
|
||||
src/ripple/proto/ripple.proto)
|
||||
set (CMAKE_CURRENT_BINARY_DIR ${save_CBD})
|
||||
|
||||
target_include_directories(xrpl_core SYSTEM PUBLIC
|
||||
# The generated implementation imports the header relative to the output
|
||||
# directory.
|
||||
$<BUILD_INTERFACE:${output_dir}>
|
||||
$<BUILD_INTERFACE:${output_dir}/src>
|
||||
)
|
||||
target_sources(xrpl_core PRIVATE ${output_dir}/src/ripple/proto/ripple.pb.cc)
|
||||
install(
|
||||
FILES ${output_dir}/src/ripple/proto/ripple.pb.h
|
||||
DESTINATION include/ripple/proto)
|
||||
target_link_libraries(xrpl_core PUBLIC protobuf::libprotobuf)
|
||||
target_compile_options(xrpl_core
|
||||
add_library (pbufs STATIC ${PROTO_SRCS} ${PROTO_HDRS})
|
||||
|
||||
target_include_directories (pbufs PRIVATE src)
|
||||
target_include_directories (pbufs
|
||||
SYSTEM PUBLIC ${CMAKE_BINARY_DIR}/proto_gen)
|
||||
target_link_libraries (pbufs protobuf::libprotobuf)
|
||||
target_compile_options (pbufs
|
||||
PUBLIC
|
||||
$<$<BOOL:${is_xcode}>:
|
||||
--system-header-prefix="google/protobuf"
|
||||
-Wno-deprecated-dynamic-exception-spec
|
||||
>
|
||||
)
|
||||
>)
|
||||
add_library (Ripple::pbufs ALIAS pbufs)
|
||||
target_link_libraries (ripple_libs INTERFACE Ripple::pbufs)
|
||||
exclude_if_included (pbufs)
|
||||
|
||||
@@ -81,4 +81,4 @@ if(XAR_LIBRARY)
|
||||
else()
|
||||
message(WARNING "xar library not found... (only important for mac builds)")
|
||||
endif()
|
||||
add_library (wasmedge::wasmedge ALIAS wasmedge)
|
||||
add_library (NIH::WasmEdge ALIAS wasmedge)
|
||||
|
||||
@@ -314,33 +314,25 @@ endif ()
|
||||
grpc defs and bundle into a
|
||||
static lib
|
||||
#]=================================]
|
||||
set(output_dir "${CMAKE_BINARY_DIR}/proto_gen_grpc")
|
||||
set(GRPC_GEN_DIR "${output_dir}/ripple/proto")
|
||||
file(MAKE_DIRECTORY ${GRPC_GEN_DIR})
|
||||
set(GRPC_PROTO_SRCS)
|
||||
set(GRPC_PROTO_HDRS)
|
||||
set(GRPC_PROTO_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/proto/org")
|
||||
file(GLOB_RECURSE GRPC_DEFINITION_FILES "${GRPC_PROTO_ROOT}/*.proto")
|
||||
set (GRPC_GEN_DIR "${CMAKE_BINARY_DIR}/proto_gen_grpc")
|
||||
file (MAKE_DIRECTORY ${GRPC_GEN_DIR})
|
||||
set (GRPC_PROTO_SRCS)
|
||||
set (GRPC_PROTO_HDRS)
|
||||
set (GRPC_PROTO_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/proto/org")
|
||||
file(GLOB_RECURSE GRPC_DEFINITION_FILES LIST_DIRECTORIES false "${GRPC_PROTO_ROOT}/*.proto")
|
||||
foreach(file ${GRPC_DEFINITION_FILES})
|
||||
# /home/user/rippled/src/ripple/proto/org/.../v1/get_ledger.proto
|
||||
get_filename_component(_abs_file ${file} ABSOLUTE)
|
||||
# /home/user/rippled/src/ripple/proto/org/.../v1
|
||||
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
|
||||
# get_ledger
|
||||
get_filename_component(_basename ${file} NAME_WE)
|
||||
# /home/user/rippled/src/ripple/proto
|
||||
get_filename_component(_proto_inc ${GRPC_PROTO_ROOT} DIRECTORY) # updir one level
|
||||
# org/.../v1/get_ledger.proto
|
||||
file(RELATIVE_PATH _rel_root_file ${_proto_inc} ${_abs_file})
|
||||
# org/.../v1
|
||||
get_filename_component(_rel_root_dir ${_rel_root_file} DIRECTORY)
|
||||
# src/ripple/proto/org/.../v1
|
||||
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
|
||||
|
||||
set(src_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.cc")
|
||||
set(src_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.cc")
|
||||
set(hdr_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.h")
|
||||
set(hdr_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.h")
|
||||
set (src_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.cc")
|
||||
set (src_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.cc")
|
||||
set (hdr_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.h")
|
||||
set (hdr_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.h")
|
||||
add_custom_command(
|
||||
OUTPUT ${src_1} ${src_2} ${hdr_1} ${hdr_2}
|
||||
COMMAND protobuf::protoc
|
||||
@@ -353,32 +345,16 @@ foreach(file ${GRPC_DEFINITION_FILES})
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
COMMENT "Running gRPC C++ protocol buffer compiler on ${file}"
|
||||
VERBATIM)
|
||||
set_source_files_properties(${src_1} ${src_2} ${hdr_1} ${hdr_2} PROPERTIES
|
||||
GENERATED TRUE
|
||||
SKIP_UNITY_BUILD_INCLUSION ON
|
||||
)
|
||||
set_source_files_properties(${src_1} ${src_2} ${hdr_1} ${hdr_2} PROPERTIES GENERATED TRUE)
|
||||
list(APPEND GRPC_PROTO_SRCS ${src_1} ${src_2})
|
||||
list(APPEND GRPC_PROTO_HDRS ${hdr_1} ${hdr_2})
|
||||
endforeach()
|
||||
|
||||
target_include_directories(xrpl_core SYSTEM PUBLIC
|
||||
$<BUILD_INTERFACE:${output_dir}>
|
||||
$<BUILD_INTERFACE:${output_dir}/ripple/proto>
|
||||
# The generated sources include headers relative to this path. Fix it later.
|
||||
$<INSTALL_INTERFACE:include/ripple/proto>
|
||||
)
|
||||
target_sources(xrpl_core PRIVATE ${GRPC_PROTO_SRCS})
|
||||
install(
|
||||
DIRECTORY ${output_dir}/ripple
|
||||
DESTINATION include/
|
||||
FILES_MATCHING PATTERN "*.h"
|
||||
)
|
||||
target_link_libraries(xrpl_core PUBLIC
|
||||
"gRPC::grpc++"
|
||||
# libgrpc is missing references.
|
||||
absl::random_random
|
||||
)
|
||||
target_compile_options(xrpl_core
|
||||
add_library (grpc_pbufs STATIC ${GRPC_PROTO_SRCS} ${GRPC_PROTO_HDRS})
|
||||
#target_include_directories (grpc_pbufs PRIVATE src)
|
||||
target_include_directories (grpc_pbufs SYSTEM PUBLIC ${GRPC_GEN_DIR})
|
||||
target_link_libraries (grpc_pbufs protobuf::libprotobuf "gRPC::grpc++${grpc_suffix}")
|
||||
target_compile_options (grpc_pbufs
|
||||
PRIVATE
|
||||
$<$<BOOL:${MSVC}>:-wd4065>
|
||||
$<$<NOT:$<BOOL:${MSVC}>>:-Wno-deprecated-declarations>
|
||||
@@ -388,5 +364,6 @@ target_compile_options(xrpl_core
|
||||
--system-header-prefix="google/protobuf"
|
||||
-Wno-deprecated-dynamic-exception-spec
|
||||
>)
|
||||
# target_link_libraries (ripple_libs INTERFACE Ripple::grpc_pbufs)
|
||||
# exclude_if_included (grpc_pbufs)
|
||||
add_library (Ripple::grpc_pbufs ALIAS grpc_pbufs)
|
||||
target_link_libraries (ripple_libs INTERFACE Ripple::grpc_pbufs)
|
||||
exclude_if_included (grpc_pbufs)
|
||||
|
||||
@@ -130,7 +130,6 @@ test.csf > ripple.json
|
||||
test.csf > ripple.protocol
|
||||
test.json > ripple.beast
|
||||
test.json > ripple.json
|
||||
test.json > ripple.rpc
|
||||
test.json > test.jtx
|
||||
test.jtx > ripple.app
|
||||
test.jtx > ripple.basics
|
||||
@@ -141,8 +140,6 @@ test.jtx > ripple.json
|
||||
test.jtx > ripple.ledger
|
||||
test.jtx > ripple.net
|
||||
test.jtx > ripple.protocol
|
||||
test.jtx > ripple.resource
|
||||
test.jtx > ripple.rpc
|
||||
test.jtx > ripple.server
|
||||
test.ledger > ripple.app
|
||||
test.ledger > ripple.basics
|
||||
@@ -169,6 +166,7 @@ test.nodestore > test.unit_test
|
||||
test.overlay > ripple.app
|
||||
test.overlay > ripple.basics
|
||||
test.overlay > ripple.beast
|
||||
test.overlay > ripple.core
|
||||
test.overlay > ripple.overlay
|
||||
test.overlay > ripple.peerfinder
|
||||
test.overlay > ripple.protocol
|
||||
|
||||
147
CMakeLists.txt
147
CMakeLists.txt
@@ -3,16 +3,10 @@ set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
set(CMAKE_CXX_STANDARD 20)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
|
||||
if(POLICY CMP0074)
|
||||
cmake_policy(SET CMP0074 NEW)
|
||||
endif()
|
||||
if(POLICY CMP0077)
|
||||
cmake_policy(SET CMP0077 NEW)
|
||||
endif()
|
||||
|
||||
# Fix "unrecognized escape" issues when passing CMAKE_MODULE_PATH on Windows.
|
||||
file(TO_CMAKE_PATH "${CMAKE_MODULE_PATH}" CMAKE_MODULE_PATH)
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake")
|
||||
if (POLICY CMP0074)
|
||||
cmake_policy(SET CMP0074 NEW)
|
||||
endif ()
|
||||
|
||||
if(POLICY CMP0144)
|
||||
cmake_policy(SET CMP0144 NEW)
|
||||
@@ -24,7 +18,7 @@ set(Boost_NO_BOOST_CMAKE ON)
|
||||
# make GIT_COMMIT_HASH define available to all sources
|
||||
find_package(Git)
|
||||
if(Git_FOUND)
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git describe --always --abbrev=40
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} describe --always --abbrev=40
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
|
||||
if(gch)
|
||||
set(GIT_COMMIT_HASH "${gch}")
|
||||
@@ -33,32 +27,20 @@ if(Git_FOUND)
|
||||
endif()
|
||||
endif() #git
|
||||
|
||||
if(thread_safety_analysis)
|
||||
if (thread_safety_analysis)
|
||||
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS)
|
||||
add_compile_options("-stdlib=libc++")
|
||||
add_link_options("-stdlib=libc++")
|
||||
endif()
|
||||
|
||||
option(USE_CONAN "Use Conan package manager for dependencies" OFF)
|
||||
# Then, auto-detect if conan_toolchain.cmake is being used
|
||||
if(CMAKE_TOOLCHAIN_FILE)
|
||||
# Check if the toolchain file path contains "conan_toolchain"
|
||||
if(CMAKE_TOOLCHAIN_FILE MATCHES "conan_toolchain")
|
||||
set(USE_CONAN ON CACHE BOOL "Using Conan detected from toolchain file" FORCE)
|
||||
message(STATUS "Conan toolchain detected: ${CMAKE_TOOLCHAIN_FILE}")
|
||||
message(STATUS "Building with Conan dependencies")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (NOT USE_CONAN)
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake")
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/deps")
|
||||
endif()
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake")
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/deps")
|
||||
|
||||
include (CheckCXXCompilerFlag)
|
||||
include (FetchContent)
|
||||
include (ExternalProject)
|
||||
include (CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP
|
||||
include (ProcessorCount)
|
||||
if (target)
|
||||
message (FATAL_ERROR "The target option has been removed - use native cmake options to control build")
|
||||
endif ()
|
||||
@@ -66,9 +48,7 @@ endif ()
|
||||
include(RippledSanity)
|
||||
include(RippledVersion)
|
||||
include(RippledSettings)
|
||||
if (NOT USE_CONAN)
|
||||
include(RippledNIH)
|
||||
endif()
|
||||
include(RippledNIH)
|
||||
# this check has to remain in the top-level cmake
|
||||
# because of the early return statement
|
||||
if (packages_only)
|
||||
@@ -81,103 +61,30 @@ include(RippledCompiler)
|
||||
include(RippledInterface)
|
||||
|
||||
###
|
||||
if (NOT USE_CONAN)
|
||||
set(SECP256K1_INSTALL TRUE)
|
||||
add_subdirectory(src/secp256k1)
|
||||
add_library(secp256k1::secp256k1 ALIAS secp256k1)
|
||||
add_subdirectory(src/ed25519-donna)
|
||||
include(deps/Boost)
|
||||
include(deps/OpenSSL)
|
||||
# include(deps/Secp256k1)
|
||||
# include(deps/Ed25519-donna)
|
||||
include(deps/Lz4)
|
||||
include(deps/Libarchive)
|
||||
include(deps/Sqlite)
|
||||
include(deps/Soci)
|
||||
include(deps/Snappy)
|
||||
include(deps/Rocksdb)
|
||||
include(deps/Nudb)
|
||||
include(deps/date)
|
||||
# include(deps/Protobuf)
|
||||
# include(deps/gRPC)
|
||||
include(deps/cassandra)
|
||||
include(deps/Postgres)
|
||||
include(deps/WasmEdge)
|
||||
else()
|
||||
include(conan/Boost)
|
||||
find_package(OpenSSL 1.1.1 REQUIRED)
|
||||
set_target_properties(OpenSSL::SSL PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
|
||||
)
|
||||
set(SECP256K1_INSTALL TRUE)
|
||||
add_subdirectory(src/secp256k1)
|
||||
add_library(secp256k1::secp256k1 ALIAS secp256k1)
|
||||
add_subdirectory(src/ed25519-donna)
|
||||
find_package(lz4 REQUIRED)
|
||||
# Target names with :: are not allowed in a generator expression.
|
||||
# We need to pull the include directories and imported location properties
|
||||
# from separate targets.
|
||||
find_package(LibArchive REQUIRED)
|
||||
find_package(SOCI REQUIRED)
|
||||
find_package(SQLite3 REQUIRED)
|
||||
find_package(Snappy REQUIRED)
|
||||
find_package(wasmedge REQUIRED)
|
||||
option(rocksdb "Enable RocksDB" ON)
|
||||
if(rocksdb)
|
||||
find_package(RocksDB REQUIRED)
|
||||
set_target_properties(RocksDB::rocksdb PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1
|
||||
)
|
||||
target_link_libraries(ripple_libs INTERFACE RocksDB::rocksdb)
|
||||
endif()
|
||||
find_package(nudb REQUIRED)
|
||||
find_package(date REQUIRED)
|
||||
if(TARGET nudb::core)
|
||||
set(nudb nudb::core)
|
||||
elseif(TARGET NuDB::nudb)
|
||||
set(nudb NuDB::nudb)
|
||||
else()
|
||||
message(FATAL_ERROR "unknown nudb target")
|
||||
endif()
|
||||
target_link_libraries(ripple_libs INTERFACE ${nudb})
|
||||
|
||||
if(reporting)
|
||||
find_package(cassandra-cpp-driver REQUIRED)
|
||||
find_package(PostgreSQL REQUIRED)
|
||||
target_link_libraries(ripple_libs INTERFACE
|
||||
cassandra-cpp-driver::cassandra-cpp-driver
|
||||
PostgreSQL::PostgreSQL
|
||||
)
|
||||
endif()
|
||||
target_link_libraries(ripple_libs INTERFACE
|
||||
ed25519::ed25519
|
||||
LibArchive::LibArchive
|
||||
lz4::lz4
|
||||
OpenSSL::Crypto
|
||||
OpenSSL::SSL
|
||||
# Ripple::grpc_pbufs
|
||||
# Ripple::pbufs
|
||||
secp256k1::secp256k1
|
||||
soci::soci
|
||||
SQLite::SQLite3
|
||||
)
|
||||
endif()
|
||||
|
||||
if(coverage)
|
||||
include(RippledCov)
|
||||
endif()
|
||||
include(deps/Boost)
|
||||
include(deps/OpenSSL)
|
||||
include(deps/Secp256k1)
|
||||
include(deps/Ed25519-donna)
|
||||
include(deps/Lz4)
|
||||
include(deps/Libarchive)
|
||||
include(deps/Sqlite)
|
||||
include(deps/Soci)
|
||||
include(deps/Snappy)
|
||||
include(deps/Rocksdb)
|
||||
include(deps/Nudb)
|
||||
include(deps/date)
|
||||
include(deps/Protobuf)
|
||||
include(deps/gRPC)
|
||||
include(deps/cassandra)
|
||||
include(deps/Postgres)
|
||||
include(deps/WasmEdge)
|
||||
|
||||
###
|
||||
|
||||
include(RippledCore)
|
||||
if (NOT USE_CONAN)
|
||||
include(deps/Protobuf)
|
||||
include(deps/gRPC)
|
||||
else()
|
||||
include(conan/Protobuf)
|
||||
include(conan/gRPC)
|
||||
endif()
|
||||
include(RippledInstall)
|
||||
include(RippledCov)
|
||||
include(RippledMultiConfig)
|
||||
include(RippledDocs)
|
||||
include(RippledValidatorKeys)
|
||||
|
||||
@@ -123,25 +123,6 @@ pip3 install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
## Unit Tests
|
||||
To execute all unit tests:
|
||||
|
||||
```rippled --unittest --unittest-jobs=<number of cores>```
|
||||
|
||||
(Note: Using multiple cores on a Mac M1 can cause spurious test failures. The
|
||||
cause is still under investigation. If you observe this problem, try specifying fewer jobs.)
|
||||
|
||||
To run a specific set of test suites:
|
||||
|
||||
```
|
||||
rippled --unittest TestSuiteName
|
||||
```
|
||||
Note: In this example, all tests with prefix `TestSuiteName` will be run, so if
|
||||
`TestSuiteName1` and `TestSuiteName2` both exist, then both tests will run.
|
||||
Alternatively, if the unit test name finds an exact match, it will stop
|
||||
doing partial matches, i.e. if a unit test with a title of `TestSuiteName`
|
||||
exists, then no other unit test will be executed, apart from `TestSuiteName`.
|
||||
|
||||
## Avoid
|
||||
|
||||
1. Proliferation of nearly identical code.
|
||||
@@ -201,4 +182,4 @@ existing maintainer without a vote.
|
||||
|
||||
|
||||
[1]: https://docs.github.com/en/get-started/quickstart/contributing-to-projects
|
||||
[2]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/incorporating-changes-from-a-pull-request/about-pull-request-merges#squash-and-merge-your-commits
|
||||
[2]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/incorporating-changes-from-a-pull-request/about-pull-request-merges#squash-and-merge-your-commits
|
||||
247
RELEASENOTES.md
247
RELEASENOTES.md
@@ -7,253 +7,6 @@ This document contains the release notes for `rippled`, the reference server imp
|
||||
|
||||
Have new ideas? Need help with setting up your node? [Please open an issue here](https://github.com/xrplf/rippled/issues/new/choose).
|
||||
|
||||
# Introducing XRP Ledger version 1.12.0
|
||||
|
||||
Version 1.12.0 of `rippled`, the reference server implementation of the XRP Ledger protocol, is now available. This release adds new features and bug fixes, and introduces these amendments:
|
||||
|
||||
- `AMM`
|
||||
- `Clawback`
|
||||
- `fixReducedOffersV1`
|
||||
|
||||
[Sign Up for Future Release Announcements](https://groups.google.com/g/ripple-server)
|
||||
|
||||
<!-- BREAK -->
|
||||
|
||||
## Action Required
|
||||
|
||||
Three new amendments are now open for voting according to the XRP Ledger's [amendment process](https://xrpl.org/amendments.html), which enables protocol changes following two weeks of >80% support from trusted validators.
|
||||
|
||||
If you operate an XRP Ledger server, upgrade to version 1.12.0 by September 20, 2023 to ensure service continuity. The exact time that protocol changes take effect depends on the voting decisions of the decentralized network.
|
||||
|
||||
|
||||
## Install / Upgrade
|
||||
|
||||
On supported platforms, see the [instructions on installing or updating `rippled`](https://xrpl.org/install-rippled.html).
|
||||
|
||||
The XRPL Foundation publishes portable binaries, which are drop-in replacements for the `rippled` daemon. [See information and downloads for the portable binaries](https://github.com/XRPLF/rippled-portable-builds#portable-builds-of-the-rippled-server). This will work on most distributions, including Ubuntu 16.04, 18.04, 20.04, and 22.04; CentOS; and others. Please test and open issues on GitHub if there are problems.
|
||||
|
||||
|
||||
## Changelog
|
||||
|
||||
### Amendments, New Features, and Changes
|
||||
(These are changes which may impact or be useful to end users. For example, you may be able to update your code/workflow to take advantage of these changes.)
|
||||
|
||||
- **`AMM`**: Introduces an automated market maker (AMM) protocol to the XRP Ledger's decentralized exchange, enabling you to trade assets without a counterparty. For more information about AMMs, see: [Automated Market Maker](https://opensource.ripple.com/docs/xls-30d-amm/amm-uc/). [#4294](https://github.com/XRPLF/rippled/pull/4294)
|
||||
|
||||
- **`Clawback`**: Adds a setting, *Allow Clawback*, which lets an issuer recover, or _claw back_, tokens that they previously issued. Issuers cannot enable this setting if they have issued tokens already. For additional documentation on this feature, see: [#4553](https://github.com/XRPLF/rippled/pull/4553).
|
||||
|
||||
- **`fixReducedOffersV1`**: Reduces the occurrence of order books that are blocked by reduced offers. [#4512](https://github.com/XRPLF/rippled/pull/4512)
|
||||
|
||||
- Added WebSocket and RPC port info to `server_info` responses. [#4427](https://github.com/XRPLF/rippled/pull/4427)
|
||||
|
||||
- Removed the deprecated `accepted`, `seqNum`, `hash`, and `totalCoins` fields from the `ledger` method. [#4244](https://github.com/XRPLF/rippled/pull/4244)
|
||||
|
||||
|
||||
### Bug Fixes and Performance Improvements
|
||||
(These are behind-the-scenes improvements, such as internal changes to the code, which are not expected to impact end users.)
|
||||
|
||||
- Added a pre-commit hook that runs the clang-format linter locally before committing changes. To install this feature, see: [CONTRIBUTING](https://github.com/XRPLF/xrpl-dev-portal/blob/master/CONTRIBUTING.md). [#4599](https://github.com/XRPLF/rippled/pull/4599)
|
||||
|
||||
- In order to make it more straightforward to catch and handle overflows: changed the output type of the `mulDiv()` function from `std::pair<bool, uint64_t>` to `std::optional`. [#4243](https://github.com/XRPLF/rippled/pull/4243)
|
||||
|
||||
- Updated `Handler::Condition` enum values to make the code less brittle. [#4239](https://github.com/XRPLF/rippled/pull/4239)
|
||||
|
||||
- Renamed `ServerHandlerImp` to `ServerHandler`. [#4516](https://github.com/XRPLF/rippled/pull/4516), [#4592](https://github.com/XRPLF/rippled/pull/4592)
|
||||
|
||||
- Replaced hand-rolled code with `std::from_chars` for better maintainability. [#4473](https://github.com/XRPLF/rippled/pull/4473)
|
||||
|
||||
- Removed an unused `TypedField` move constructor. [#4567](https://github.com/XRPLF/rippled/pull/4567)
|
||||
|
||||
|
||||
### Docs and Build System
|
||||
|
||||
- Updated checkout versions to resolve warnings during GitHub jobs. [#4598](https://github.com/XRPLF/rippled/pull/4598)
|
||||
|
||||
- Fixed an issue with the Debian package build. [#4591](https://github.com/XRPLF/rippled/pull/4591)
|
||||
|
||||
- Updated build instructions with additional steps to take after updating dependencies. [#4623](https://github.com/XRPLF/rippled/pull/4623)
|
||||
|
||||
- Updated contributing doc to clarify that beta releases should also be pushed to the `release` branch. [#4589](https://github.com/XRPLF/rippled/pull/4589)
|
||||
|
||||
- Enabled the `BETA_RPC_API` flag in the default unit tests config, making the API v2 (beta) available to unit tests. [#4573](https://github.com/XRPLF/rippled/pull/4573)
|
||||
|
||||
- Conan dependency management.
|
||||
- Fixed package definitions for Conan. [#4485](https://github.com/XRPLF/rippled/pull/4485)
|
||||
- Updated build dependencies to the most recent versions in Conan Center. [#4595](https://github.com/XRPLF/rippled/pull/4595)
|
||||
- Updated Conan recipe for NuDB. [#4615](https://github.com/XRPLF/rippled/pull/4615)
|
||||
|
||||
- Added binary hardening and linker flags to enhance security during the build process. [#4603](https://github.com/XRPLF/rippled/pull/4603)
|
||||
|
||||
- Added an Artifactory to the `nix` workflow to improve build times. [#4556](https://github.com/XRPLF/rippled/pull/4556)
|
||||
|
||||
- Added quality-of-life improvements to workflows, using new [concurrency control](https://docs.github.com/en/actions/using-jobs/using-concurrency) features. [#4597](https://github.com/XRPLF/rippled/pull/4597)
|
||||
|
||||
|
||||
[Full Commit Log](https://github.com/XRPLF/rippled/compare/1.11.0...1.12.0)
|
||||
|
||||
|
||||
### GitHub
|
||||
|
||||
The public source code repository for `rippled` is hosted on GitHub at <https://github.com/XRPLF/rippled>.
|
||||
|
||||
We welcome all contributions and invite everyone to join the community of XRP Ledger developers to help build the Internet of Value.
|
||||
|
||||
|
||||
## Credits
|
||||
|
||||
The following people contributed directly to this release:
|
||||
|
||||
- Alphonse N. Mousse <39067955+a-noni-mousse@users.noreply.github.com>
|
||||
- Arihant Kothari <arihantkothari17@gmail.com>
|
||||
- Chenna Keshava B S <21219765+ckeshava@users.noreply.github.com>
|
||||
- Denis Angell <dangell@transia.co>
|
||||
- Ed Hennis <ed@ripple.com>
|
||||
- Elliot Lee <github.public@intelliot.com>
|
||||
- Gregory Tsipenyuk <gregtatcam@users.noreply.github.com>
|
||||
- Howard Hinnant <howard.hinnant@gmail.com>
|
||||
- Ikko Eltociear Ashimine <eltociear@gmail.com>
|
||||
- John Freeman <jfreeman08@gmail.com>
|
||||
- Manoj Doshi <mdoshi@ripple.com>
|
||||
- Mark Travis <mtravis@ripple.com>
|
||||
- Mayukha Vadari <mvadari@gmail.com>
|
||||
- Michael Legleux <legleux@users.noreply.github.com>
|
||||
- Peter Chen <34582813+PeterChen13579@users.noreply.github.com>
|
||||
- RichardAH <richard.holland@starstone.co.nz>
|
||||
- Rome Reginelli <rome@ripple.com>
|
||||
- Scott Schurr <scott@ripple.com>
|
||||
- Shawn Xie <35279399+shawnxie999@users.noreply.github.com>
|
||||
- drlongle <drlongle@gmail.com>
|
||||
|
||||
Bug Bounties and Responsible Disclosures:
|
||||
|
||||
We welcome reviews of the rippled code and urge researchers to responsibly disclose any issues they may find.
|
||||
|
||||
To report a bug, please send a detailed report to: <bugs@xrpl.org>
|
||||
|
||||
|
||||
# Introducing XRP Ledger version 1.11.0
|
||||
|
||||
Version 1.11.0 of `rippled`, the reference server implementation of the XRP Ledger protocol, is now available.
|
||||
|
||||
This release reduces memory usage, introduces the `fixNFTokenRemint` amendment, and adds new features and bug fixes. For example, the new NetworkID field in transactions helps to prevent replay attacks with side-chains.
|
||||
|
||||
[Sign Up for Future Release Announcements](https://groups.google.com/g/ripple-server)
|
||||
|
||||
<!-- BREAK -->
|
||||
|
||||
## Action Required
|
||||
|
||||
The `fixNFTokenRemint` amendment is now open for voting according to the XRP Ledger's [amendment process](https://xrpl.org/amendments.html), which enables protocol changes following two weeks of >80% support from trusted validators.
|
||||
|
||||
If you operate an XRP Ledger server, upgrade to version 1.11.0 by July 5 to ensure service continuity. The exact time that protocol changes take effect depends on the voting decisions of the decentralized network.
|
||||
|
||||
|
||||
## Install / Upgrade
|
||||
|
||||
On supported platforms, see the [instructions on installing or updating `rippled`](https://xrpl.org/install-rippled.html).
|
||||
|
||||
|
||||
## What's Changed
|
||||
|
||||
### New Features and Improvements
|
||||
|
||||
* Allow port numbers be be specified using a either a colon or a space by @RichardAH in https://github.com/XRPLF/rippled/pull/4328
|
||||
* Eliminate memory allocation from critical path: by @nbougalis in https://github.com/XRPLF/rippled/pull/4353
|
||||
* Make it easy for projects to depend on libxrpl by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4449
|
||||
* Add the ability to mark amendments as obsolete by @ximinez in https://github.com/XRPLF/rippled/pull/4291
|
||||
* Always create the FeeSettings object in genesis ledger by @ximinez in https://github.com/XRPLF/rippled/pull/4319
|
||||
* Log exception messages in several locations by @drlongle in https://github.com/XRPLF/rippled/pull/4400
|
||||
* Parse flags in account_info method by @drlongle in https://github.com/XRPLF/rippled/pull/4459
|
||||
* Add NFTokenPages to account_objects RPC by @RichardAH in https://github.com/XRPLF/rippled/pull/4352
|
||||
* add jss fields used by clio `nft_info` by @ledhed2222 in https://github.com/XRPLF/rippled/pull/4320
|
||||
* Introduce a slab-based memory allocator and optimize SHAMapItem by @nbougalis in https://github.com/XRPLF/rippled/pull/4218
|
||||
* Add NetworkID field to transactions to help prevent replay attacks on and from side-chains by @RichardAH in https://github.com/XRPLF/rippled/pull/4370
|
||||
* If present, set quorum based on command line. by @mtrippled in https://github.com/XRPLF/rippled/pull/4489
|
||||
* API does not accept seed or public key for account by @drlongle in https://github.com/XRPLF/rippled/pull/4404
|
||||
* Add `nftoken_id`, `nftoken_ids` and `offer_id` meta fields into NFT `Tx` responses by @shawnxie999 in https://github.com/XRPLF/rippled/pull/4447
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* fix(gateway_balances): handle overflow exception by @RichardAH in https://github.com/XRPLF/rippled/pull/4355
|
||||
* fix(ValidatorSite): handle rare null pointer dereference in timeout by @ximinez in https://github.com/XRPLF/rippled/pull/4420
|
||||
* RPC commands understand markers derived from all ledger object types by @ximinez in https://github.com/XRPLF/rippled/pull/4361
|
||||
* `fixNFTokenRemint`: prevent NFT re-mint: by @shawnxie999 in https://github.com/XRPLF/rippled/pull/4406
|
||||
* Fix a case where ripple::Expected returned a json array, not a value by @scottschurr in https://github.com/XRPLF/rippled/pull/4401
|
||||
* fix: Ledger data returns an empty list (instead of null) when all entries are filtered out by @drlongle in https://github.com/XRPLF/rippled/pull/4398
|
||||
* Fix unit test ripple.app.LedgerData by @drlongle in https://github.com/XRPLF/rippled/pull/4484
|
||||
* Fix the fix for std::result_of by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4496
|
||||
* Fix errors for Clang 16 by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4501
|
||||
* Ensure that switchover vars are initialized before use: by @seelabs in https://github.com/XRPLF/rippled/pull/4527
|
||||
* Move faulty assert by @ximinez in https://github.com/XRPLF/rippled/pull/4533
|
||||
* Fix unaligned load and stores: (#4528) by @seelabs in https://github.com/XRPLF/rippled/pull/4531
|
||||
* fix node size estimation by @dangell7 in https://github.com/XRPLF/rippled/pull/4536
|
||||
* fix: remove redundant moves by @ckeshava in https://github.com/XRPLF/rippled/pull/4565
|
||||
|
||||
### Code Cleanup and Testing
|
||||
|
||||
* Replace compare() with the three-way comparison operator in base_uint, Issue and Book by @drlongle in https://github.com/XRPLF/rippled/pull/4411
|
||||
* Rectify the import paths of boost::function_output_iterator by @ckeshava in https://github.com/XRPLF/rippled/pull/4293
|
||||
* Expand Linux test matrix by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4454
|
||||
* Add patched recipe for SOCI by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4510
|
||||
* Switch to self-hosted runners for macOS by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4511
|
||||
* [TRIVIAL] Add missing includes by @seelabs in https://github.com/XRPLF/rippled/pull/4555
|
||||
|
||||
### Docs
|
||||
|
||||
* Refactor build instructions by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4381
|
||||
* Add install instructions for package managers by @thejohnfreeman in https://github.com/XRPLF/rippled/pull/4472
|
||||
* Fix typo by @solmsted in https://github.com/XRPLF/rippled/pull/4508
|
||||
* Update environment.md by @sappenin in https://github.com/XRPLF/rippled/pull/4498
|
||||
* Update BUILD.md by @oeggert in https://github.com/XRPLF/rippled/pull/4514
|
||||
* Trivial: add comments for NFToken-related invariants by @scottschurr in https://github.com/XRPLF/rippled/pull/4558
|
||||
|
||||
## New Contributors
|
||||
* @drlongle made their first contribution in https://github.com/XRPLF/rippled/pull/4411
|
||||
* @ckeshava made their first contribution in https://github.com/XRPLF/rippled/pull/4293
|
||||
* @solmsted made their first contribution in https://github.com/XRPLF/rippled/pull/4508
|
||||
* @sappenin made their first contribution in https://github.com/XRPLF/rippled/pull/4498
|
||||
* @oeggert made their first contribution in https://github.com/XRPLF/rippled/pull/4514
|
||||
|
||||
**Full Changelog**: https://github.com/XRPLF/rippled/compare/1.10.1...1.11.0
|
||||
|
||||
|
||||
### GitHub
|
||||
|
||||
The public source code repository for `rippled` is hosted on GitHub at <https://github.com/XRPLF/rippled>.
|
||||
|
||||
We welcome all contributions and invite everyone to join the community of XRP Ledger developers to help build the Internet of Value.
|
||||
|
||||
### Credits
|
||||
|
||||
The following people contributed directly to this release:
|
||||
- Alloy Networks <45832257+alloynetworks@users.noreply.github.com>
|
||||
- Brandon Wilson <brandon@coil.com>
|
||||
- Chenna Keshava B S <21219765+ckeshava@users.noreply.github.com>
|
||||
- David Fuelling <sappenin@gmail.com>
|
||||
- Denis Angell <dangell@transia.co>
|
||||
- Ed Hennis <ed@ripple.com>
|
||||
- Elliot Lee <github.public@intelliot.com>
|
||||
- John Freeman <jfreeman08@gmail.com>
|
||||
- Mark Travis <mtrippled@users.noreply.github.com>
|
||||
- Nik Bougalis <nikb@bougalis.net>
|
||||
- RichardAH <richard.holland@starstone.co.nz>
|
||||
- Scott Determan <scott.determan@yahoo.com>
|
||||
- Scott Schurr <scott@ripple.com>
|
||||
- Shawn Xie <35279399+shawnxie999@users.noreply.github.com>
|
||||
- drlongle <drlongle@gmail.com>
|
||||
- ledhed2222 <ledhed2222@users.noreply.github.com>
|
||||
- oeggert <117319296+oeggert@users.noreply.github.com>
|
||||
- solmsted <steven.olm@gmail.com>
|
||||
|
||||
|
||||
Bug Bounties and Responsible Disclosures:
|
||||
We welcome reviews of the rippled code and urge researchers to
|
||||
responsibly disclose any issues they may find.
|
||||
|
||||
To report a bug, please send a detailed report to:
|
||||
|
||||
bugs@xrpl.org
|
||||
|
||||
|
||||
# Introducing XRP Ledger version 1.10.1
|
||||
|
||||
|
||||
13
SECURITY.md
13
SECURITY.md
@@ -61,12 +61,13 @@ For these complaints or reports, please [contact our support team](mailto:bugs@x
|
||||
|
||||
### The following type of security problems are excluded
|
||||
|
||||
1. **In scope**. Only bugs in software under the scope of the program qualify. Currently, that means `xahaud` and `xahau-lib`.
|
||||
2. **Relevant**. A security issue, posing a danger to user funds, privacy or the operation of the Xahau Ledger.
|
||||
3. **Original and previously unknown**. Bugs that are already known and discussed in public do not qualify. Previously reported bugs, even if publicly unknown, are not eligible.
|
||||
4. **Specific**. We welcome general security advice or recommendations, but we cannot pay bounties for that.
|
||||
5. **Fixable**. There has to be something we can do to permanently fix the problem. Note that bugs in other people’s software may still qualify in some cases. For example, if you find a bug in a library that we use which can compromise the security of software that is in scope and we can get it fixed, you may qualify for a bounty.
|
||||
6. **Unused**. If you use the exploit to attack the Xahau Ledger, you do not qualify for a bounty. If you report a vulnerability used in an ongoing or past attack and there is specific, concrete evidence that suggests you are the attacker we reserve the right not to pay a bounty.
|
||||
- (D)DOS attacks
|
||||
- Error messages or error pages without sensitive data
|
||||
- Tests & sample data as publicly available in our repositories at Github
|
||||
- Common issues like browser header warnings or DNS configuration, identified by vulnerability scans
|
||||
- Vulnerability scan reports for software we publicly use
|
||||
- Security issues related to outdated OS's, browsers or plugins
|
||||
- Reports for security problems that we have been notified of before
|
||||
|
||||
Please note: Reports that are lacking any proof (such as screenshots or other data), detailed information or details on how to reproduce any unexpected result will be investigated but will not be eligible for any reward.
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ message(STATUS \"Using LLVMConfig.cmake in: \${LLVM_DIR}\")
|
||||
add_library (wasmedge STATIC IMPORTED GLOBAL)
|
||||
set_target_properties(wasmedge PROPERTIES IMPORTED_LOCATION \${WasmEdge_LIB})
|
||||
target_link_libraries (ripple_libs INTERFACE wasmedge)
|
||||
add_library (wasmedge::wasmedge ALIAS wasmedge)
|
||||
add_library (NIH::WasmEdge ALIAS wasmedge)
|
||||
message(\"WasmEdge DONE\")
|
||||
" > Builds/CMake/deps/WasmEdge.cmake &&
|
||||
git checkout src/ripple/protocol/impl/BuildInfo.cpp &&
|
||||
|
||||
0
build-full.sh
Normal file → Executable file
0
build-full.sh
Normal file → Executable file
@@ -283,14 +283,12 @@
|
||||
# ssl_cert
|
||||
#
|
||||
# Specifies the path to the SSL certificate file in PEM format.
|
||||
# This is not needed if the chain includes it. Use ssl_chain if
|
||||
# your certificate includes one or more intermediates.
|
||||
# This is not needed if the chain includes it.
|
||||
#
|
||||
# ssl_chain
|
||||
#
|
||||
# If you need a certificate chain, specify the path to the
|
||||
# certificate chain here. The chain may include the end certificate.
|
||||
# This must be used if the certificate includes intermediates.
|
||||
#
|
||||
# ssl_ciphers = <cipherlist>
|
||||
#
|
||||
@@ -389,21 +387,6 @@
|
||||
#
|
||||
#
|
||||
#
|
||||
# [compression]
|
||||
#
|
||||
# true or false
|
||||
#
|
||||
# true - enables compression
|
||||
# false - disables compression [default].
|
||||
#
|
||||
# The rippled server can save bandwidth by compressing its peer-to-peer communications,
|
||||
# at a cost of greater CPU usage. If you enable link compression,
|
||||
# the server automatically compresses communications with peer servers
|
||||
# that also have link compression enabled.
|
||||
# https://xrpl.org/enable-link-compression.html
|
||||
#
|
||||
#
|
||||
#
|
||||
# [ips]
|
||||
#
|
||||
# List of hostnames or ips where the Ripple protocol is served. A default
|
||||
@@ -478,6 +461,19 @@
|
||||
#
|
||||
#
|
||||
#
|
||||
# [sntp_servers]
|
||||
#
|
||||
# IP address or domain of NTP servers to use for time synchronization.
|
||||
#
|
||||
# These NTP servers are suitable for rippled servers located in the United
|
||||
# States:
|
||||
# time.windows.com
|
||||
# time.apple.com
|
||||
# time.nist.gov
|
||||
# pool.ntp.org
|
||||
#
|
||||
#
|
||||
#
|
||||
# [max_transactions]
|
||||
#
|
||||
# Configure the maximum number of transactions to have in the job queue
|
||||
@@ -1653,7 +1649,6 @@ port = 6006
|
||||
ip = 127.0.0.1
|
||||
admin = 127.0.0.1
|
||||
protocol = ws
|
||||
send_queue_limit = 500
|
||||
|
||||
[port_grpc]
|
||||
port = 50051
|
||||
@@ -1664,7 +1659,6 @@ secure_gateway = 127.0.0.1
|
||||
#port = 6005
|
||||
#ip = 127.0.0.1
|
||||
#protocol = wss
|
||||
#send_queue_limit = 500
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
@@ -1720,6 +1714,12 @@ advisory_delete=0
|
||||
[debug_logfile]
|
||||
/var/log/rippled/debug.log
|
||||
|
||||
[sntp_servers]
|
||||
time.windows.com
|
||||
time.apple.com
|
||||
time.nist.gov
|
||||
pool.ntp.org
|
||||
|
||||
# To use the XRP test network
|
||||
# (see https://xrpl.org/connect-your-rippled-to-the-xrp-test-net.html),
|
||||
# use the following [ips] section:
|
||||
|
||||
@@ -450,6 +450,19 @@
|
||||
#
|
||||
#
|
||||
#
|
||||
# [sntp_servers]
|
||||
#
|
||||
# IP address or domain of NTP servers to use for time synchronization.
|
||||
#
|
||||
# These NTP servers are suitable for rippled servers located in the United
|
||||
# States:
|
||||
# time.windows.com
|
||||
# time.apple.com
|
||||
# time.nist.gov
|
||||
# pool.ntp.org
|
||||
#
|
||||
#
|
||||
#
|
||||
# [max_transactions]
|
||||
#
|
||||
# Configure the maximum number of transactions to have in the job queue
|
||||
@@ -1649,6 +1662,12 @@ advisory_delete=0
|
||||
[debug_logfile]
|
||||
/var/log/rippled-reporting/debug.log
|
||||
|
||||
[sntp_servers]
|
||||
time.windows.com
|
||||
time.apple.com
|
||||
time.nist.gov
|
||||
pool.ntp.org
|
||||
|
||||
# To use the XRP test network
|
||||
# (see https://xrpl.org/connect-your-rippled-to-the-xrp-test-net.html),
|
||||
# use the following [ips] section:
|
||||
|
||||
162
conanfile.py
162
conanfile.py
@@ -1,162 +0,0 @@
|
||||
from conan import ConanFile
|
||||
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
|
||||
import re
|
||||
|
||||
class Xrpl(ConanFile):
|
||||
name = 'xrpl'
|
||||
|
||||
license = 'ISC'
|
||||
author = 'John Freeman <jfreeman@ripple.com>'
|
||||
url = 'https://github.com/xrplf/rippled'
|
||||
description = 'The XRP Ledger'
|
||||
settings = 'os', 'compiler', 'build_type', 'arch'
|
||||
options = {
|
||||
'assertions': [True, False],
|
||||
'coverage': [True, False],
|
||||
'fPIC': [True, False],
|
||||
'jemalloc': [True, False],
|
||||
'reporting': [True, False],
|
||||
'rocksdb': [True, False],
|
||||
'shared': [True, False],
|
||||
'static': [True, False],
|
||||
'tests': [True, False],
|
||||
'unity': [True, False],
|
||||
}
|
||||
|
||||
requires = [
|
||||
'boost/1.86.0',
|
||||
'date/3.0.1',
|
||||
'libarchive/3.6.0',
|
||||
'lz4/1.9.3',
|
||||
'grpc/1.50.1',
|
||||
'nudb/2.0.8',
|
||||
'openssl/1.1.1u',
|
||||
'protobuf/3.21.9',
|
||||
'snappy/1.1.10',
|
||||
'soci/4.0.3',
|
||||
'sqlite3/3.42.0',
|
||||
'zlib/1.2.13',
|
||||
'wasmedge/0.11.2',
|
||||
]
|
||||
|
||||
default_options = {
|
||||
'assertions': False,
|
||||
'coverage': False,
|
||||
'fPIC': True,
|
||||
'jemalloc': False,
|
||||
'reporting': False,
|
||||
'rocksdb': True,
|
||||
'shared': False,
|
||||
'static': True,
|
||||
'tests': True,
|
||||
'unity': False,
|
||||
|
||||
'cassandra-cpp-driver:shared': False,
|
||||
'date:header_only': True,
|
||||
'grpc:shared': False,
|
||||
'grpc:secure': True,
|
||||
'libarchive:shared': False,
|
||||
'libarchive:with_acl': False,
|
||||
'libarchive:with_bzip2': False,
|
||||
'libarchive:with_cng': False,
|
||||
'libarchive:with_expat': False,
|
||||
'libarchive:with_iconv': False,
|
||||
'libarchive:with_libxml2': False,
|
||||
'libarchive:with_lz4': True,
|
||||
'libarchive:with_lzma': False,
|
||||
'libarchive:with_lzo': False,
|
||||
'libarchive:with_nettle': False,
|
||||
'libarchive:with_openssl': False,
|
||||
'libarchive:with_pcreposix': False,
|
||||
'libarchive:with_xattr': False,
|
||||
'libarchive:with_zlib': False,
|
||||
'libpq:shared': False,
|
||||
'lz4:shared': False,
|
||||
'openssl:shared': False,
|
||||
'protobuf:shared': False,
|
||||
'protobuf:with_zlib': True,
|
||||
'rocksdb:enable_sse': False,
|
||||
'rocksdb:lite': False,
|
||||
'rocksdb:shared': False,
|
||||
'rocksdb:use_rtti': True,
|
||||
'rocksdb:with_jemalloc': False,
|
||||
'rocksdb:with_lz4': True,
|
||||
'rocksdb:with_snappy': True,
|
||||
'snappy:shared': False,
|
||||
'soci:shared': False,
|
||||
'soci:with_sqlite3': True,
|
||||
'soci:with_boost': True,
|
||||
}
|
||||
|
||||
def set_version(self):
|
||||
path = f'{self.recipe_folder}/src/ripple/protocol/impl/BuildInfo.cpp'
|
||||
regex = r'versionString\s?=\s?\"(.*)\"'
|
||||
with open(path, 'r') as file:
|
||||
matches = (re.search(regex, line) for line in file)
|
||||
match = next(m for m in matches if m)
|
||||
self.version = match.group(1)
|
||||
|
||||
def configure(self):
|
||||
if self.settings.compiler == 'apple-clang':
|
||||
self.options['boost'].visibility = 'global'
|
||||
|
||||
def requirements(self):
|
||||
if self.options.jemalloc:
|
||||
self.requires('jemalloc/5.2.1')
|
||||
if self.options.reporting:
|
||||
self.requires('cassandra-cpp-driver/2.15.3')
|
||||
self.requires('libpq/13.6')
|
||||
if self.options.rocksdb:
|
||||
self.requires('rocksdb/6.27.3')
|
||||
|
||||
exports_sources = (
|
||||
'CMakeLists.txt', 'Builds/*', 'bin/getRippledInfo', 'src/*', 'cfg/*'
|
||||
)
|
||||
|
||||
def layout(self):
|
||||
cmake_layout(self)
|
||||
# Fix this setting to follow the default introduced in Conan 1.48
|
||||
# to align with our build instructions.
|
||||
self.folders.generators = 'build/generators'
|
||||
|
||||
generators = 'CMakeDeps'
|
||||
def generate(self):
|
||||
tc = CMakeToolchain(self)
|
||||
tc.variables['tests'] = self.options.tests
|
||||
tc.variables['assert'] = self.options.assertions
|
||||
tc.variables['coverage'] = self.options.coverage
|
||||
tc.variables['jemalloc'] = self.options.jemalloc
|
||||
tc.variables['reporting'] = self.options.reporting
|
||||
tc.variables['rocksdb'] = self.options.rocksdb
|
||||
tc.variables['BUILD_SHARED_LIBS'] = self.options.shared
|
||||
tc.variables['static'] = self.options.static
|
||||
tc.variables['unity'] = self.options.unity
|
||||
tc.generate()
|
||||
|
||||
def build(self):
|
||||
cmake = CMake(self)
|
||||
cmake.verbose = True
|
||||
cmake.configure()
|
||||
cmake.build()
|
||||
|
||||
def package(self):
|
||||
cmake = CMake(self)
|
||||
cmake.verbose = True
|
||||
cmake.install()
|
||||
|
||||
def package_info(self):
|
||||
libxrpl = self.cpp_info.components['libxrpl']
|
||||
libxrpl.libs = [
|
||||
'xrpl_core',
|
||||
'ed25519',
|
||||
'secp256k1',
|
||||
]
|
||||
# TODO: Fix the protobufs to include each other relative to
|
||||
# `include/`, not `include/ripple/proto/`.
|
||||
libxrpl.includedirs = ['include', 'include/ripple/proto']
|
||||
libxrpl.requires = [
|
||||
'boost::boost',
|
||||
'openssl::crypto',
|
||||
'date::date',
|
||||
'grpc::grpc++',
|
||||
]
|
||||
84
docs/build/environment.md
vendored
84
docs/build/environment.md
vendored
@@ -1,84 +0,0 @@
|
||||
Our [build instructions][BUILD.md] assume you have a C++ development
|
||||
environment complete with Git, Python, Conan, CMake, and a C++ compiler.
|
||||
This document exists to help readers set one up on any of the Big Three
|
||||
platforms: Linux, macOS, or Windows.
|
||||
|
||||
[BUILD.md]: ../../BUILD.md
|
||||
|
||||
|
||||
## Linux
|
||||
|
||||
Package ecosystems vary across Linux distributions,
|
||||
so there is no one set of instructions that will work for every Linux user.
|
||||
These instructions are written for Ubuntu 22.04.
|
||||
They are largely copied from the [script][1] used to configure our Docker
|
||||
container for continuous integration.
|
||||
That script handles many more responsibilities.
|
||||
These instructions are just the bare minimum to build one configuration of
|
||||
rippled.
|
||||
You can check that codebase for other Linux distributions and versions.
|
||||
If you cannot find yours there,
|
||||
then we hope that these instructions can at least guide you in the right
|
||||
direction.
|
||||
|
||||
```
|
||||
apt update
|
||||
apt install --yes curl git libssl-dev python3.10-dev python3-pip make g++-11
|
||||
|
||||
curl --location --remote-name \
|
||||
"https://github.com/Kitware/CMake/releases/download/v3.25.1/cmake-3.25.1.tar.gz"
|
||||
tar -xzf cmake-3.25.1.tar.gz
|
||||
rm cmake-3.25.1.tar.gz
|
||||
cd cmake-3.25.1
|
||||
./bootstrap --parallel=$(nproc)
|
||||
make --jobs $(nproc)
|
||||
make install
|
||||
cd ..
|
||||
|
||||
pip3 install 'conan<2'
|
||||
```
|
||||
|
||||
[1]: https://github.com/thejohnfreeman/rippled-docker/blob/master/ubuntu-22.04/install.sh
|
||||
|
||||
|
||||
## macOS
|
||||
|
||||
Open a Terminal and enter the below command to bring up a dialog to install
|
||||
the command line developer tools.
|
||||
Once it is finished, this command should return a version greater than the
|
||||
minimum required (see [BUILD.md][]).
|
||||
|
||||
```
|
||||
clang --version
|
||||
```
|
||||
|
||||
The command line developer tools should include Git too:
|
||||
|
||||
```
|
||||
git --version
|
||||
```
|
||||
|
||||
Install [Homebrew][],
|
||||
use it to install [pyenv][],
|
||||
use it to install Python,
|
||||
and use it to install Conan:
|
||||
|
||||
[Homebrew]: https://brew.sh/
|
||||
[pyenv]: https://github.com/pyenv/pyenv
|
||||
|
||||
```
|
||||
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
||||
brew update
|
||||
brew install xz
|
||||
brew install pyenv
|
||||
pyenv install 3.10-dev
|
||||
pyenv global 3.10-dev
|
||||
eval "$(pyenv init -)"
|
||||
pip install 'conan<2'
|
||||
```
|
||||
|
||||
Install CMake with Homebrew too:
|
||||
|
||||
```
|
||||
brew install cmake
|
||||
```
|
||||
159
docs/build/install.md
vendored
159
docs/build/install.md
vendored
@@ -1,159 +0,0 @@
|
||||
This document contains instructions for installing rippled.
|
||||
The APT package manager is common on Debian-based Linux distributions like
|
||||
Ubuntu,
|
||||
while the YUM package manager is common on Red Hat-based Linux distributions
|
||||
like CentOS.
|
||||
Installing from source is an option for all platforms,
|
||||
and the only supported option for installing custom builds.
|
||||
|
||||
|
||||
## From source
|
||||
|
||||
From a source build, you can install rippled and libxrpl using CMake's
|
||||
`--install` mode:
|
||||
|
||||
```
|
||||
cmake --install . --prefix /opt/local
|
||||
```
|
||||
|
||||
The default [prefix][1] is typically `/usr/local` on Linux and macOS and
|
||||
`C:/Program Files/rippled` on Windows.
|
||||
|
||||
[1]: https://cmake.org/cmake/help/latest/variable/CMAKE_INSTALL_PREFIX.html
|
||||
|
||||
|
||||
## With the APT package manager
|
||||
|
||||
1. Update repositories:
|
||||
|
||||
sudo apt update -y
|
||||
|
||||
2. Install utilities:
|
||||
|
||||
sudo apt install -y apt-transport-https ca-certificates wget gnupg
|
||||
|
||||
3. Add Ripple's package-signing GPG key to your list of trusted keys:
|
||||
|
||||
sudo mkdir /usr/local/share/keyrings/
|
||||
wget -q -O - "https://repos.ripple.com/repos/api/gpg/key/public" | gpg --dearmor > ripple-key.gpg
|
||||
sudo mv ripple-key.gpg /usr/local/share/keyrings
|
||||
|
||||
|
||||
4. Check the fingerprint of the newly-added key:
|
||||
|
||||
gpg /usr/local/share/keyrings/ripple-key.gpg
|
||||
|
||||
The output should include an entry for Ripple such as the following:
|
||||
|
||||
gpg: WARNING: no command supplied. Trying to guess what you mean ...
|
||||
pub rsa3072 2019-02-14 [SC] [expires: 2026-02-17]
|
||||
C0010EC205B35A3310DC90DE395F97FFCCAFD9A2
|
||||
uid TechOps Team at Ripple <techops+rippled@ripple.com>
|
||||
sub rsa3072 2019-02-14 [E] [expires: 2026-02-17]
|
||||
|
||||
|
||||
In particular, make sure that the fingerprint matches. (In the above example, the fingerprint is on the third line, starting with `C001`.)
|
||||
|
||||
4. Add the appropriate Ripple repository for your operating system version:
|
||||
|
||||
echo "deb [signed-by=/usr/local/share/keyrings/ripple-key.gpg] https://repos.ripple.com/repos/rippled-deb focal stable" | \
|
||||
sudo tee -a /etc/apt/sources.list.d/ripple.list
|
||||
|
||||
The above example is appropriate for **Ubuntu 20.04 Focal Fossa**. For other operating systems, replace the word `focal` with one of the following:
|
||||
|
||||
- `jammy` for **Ubuntu 22.04 Jammy Jellyfish**
|
||||
- `bionic` for **Ubuntu 18.04 Bionic Beaver**
|
||||
- `bullseye` for **Debian 11 Bullseye**
|
||||
- `buster` for **Debian 10 Buster**
|
||||
|
||||
If you want access to development or pre-release versions of `rippled`, use one of the following instead of `stable`:
|
||||
|
||||
- `unstable` - Pre-release builds ([`release` branch](https://github.com/ripple/rippled/tree/release))
|
||||
- `nightly` - Experimental/development builds ([`develop` branch](https://github.com/ripple/rippled/tree/develop))
|
||||
|
||||
**Warning:** Unstable and nightly builds may be broken at any time. Do not use these builds for production servers.
|
||||
|
||||
5. Fetch the Ripple repository.
|
||||
|
||||
sudo apt -y update
|
||||
|
||||
6. Install the `rippled` software package:
|
||||
|
||||
sudo apt -y install rippled
|
||||
|
||||
7. Check the status of the `rippled` service:
|
||||
|
||||
systemctl status rippled.service
|
||||
|
||||
The `rippled` service should start automatically. If not, you can start it manually:
|
||||
|
||||
sudo systemctl start rippled.service
|
||||
|
||||
8. Optional: allow `rippled` to bind to privileged ports.
|
||||
|
||||
This allows you to serve incoming API requests on port 80 or 443. (If you want to do so, you must also update the config file's port settings.)
|
||||
|
||||
sudo setcap 'cap_net_bind_service=+ep' /opt/ripple/bin/rippled
|
||||
|
||||
|
||||
## With the YUM package manager
|
||||
|
||||
1. Install the Ripple RPM repository:
|
||||
|
||||
Choose the appropriate RPM repository for the stability of releases you want:
|
||||
|
||||
- `stable` for the latest production release (`master` branch)
|
||||
- `unstable` for pre-release builds (`release` branch)
|
||||
- `nightly` for experimental/development builds (`develop` branch)
|
||||
|
||||
*Stable*
|
||||
|
||||
cat << REPOFILE | sudo tee /etc/yum.repos.d/ripple.repo
|
||||
[ripple-stable]
|
||||
name=XRP Ledger Packages
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
repo_gpgcheck=1
|
||||
baseurl=https://repos.ripple.com/repos/rippled-rpm/stable/
|
||||
gpgkey=https://repos.ripple.com/repos/rippled-rpm/stable/repodata/repomd.xml.key
|
||||
REPOFILE
|
||||
|
||||
*Unstable*
|
||||
|
||||
cat << REPOFILE | sudo tee /etc/yum.repos.d/ripple.repo
|
||||
[ripple-unstable]
|
||||
name=XRP Ledger Packages
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
repo_gpgcheck=1
|
||||
baseurl=https://repos.ripple.com/repos/rippled-rpm/unstable/
|
||||
gpgkey=https://repos.ripple.com/repos/rippled-rpm/unstable/repodata/repomd.xml.key
|
||||
REPOFILE
|
||||
|
||||
*Nightly*
|
||||
|
||||
cat << REPOFILE | sudo tee /etc/yum.repos.d/ripple.repo
|
||||
[ripple-nightly]
|
||||
name=XRP Ledger Packages
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
repo_gpgcheck=1
|
||||
baseurl=https://repos.ripple.com/repos/rippled-rpm/nightly/
|
||||
gpgkey=https://repos.ripple.com/repos/rippled-rpm/nightly/repodata/repomd.xml.key
|
||||
REPOFILE
|
||||
|
||||
2. Fetch the latest repo updates:
|
||||
|
||||
sudo yum -y update
|
||||
|
||||
3. Install the new `rippled` package:
|
||||
|
||||
sudo yum install -y rippled
|
||||
|
||||
4. Configure the `rippled` service to start on boot:
|
||||
|
||||
sudo systemctl enable rippled.service
|
||||
|
||||
5. Start the `rippled` service:
|
||||
|
||||
sudo systemctl start rippled.service
|
||||
193
external/rocksdb/conanfile.py
vendored
193
external/rocksdb/conanfile.py
vendored
@@ -1,193 +0,0 @@
|
||||
import os
|
||||
import shutil
|
||||
from conans import ConanFile, CMake
|
||||
from conan.tools import microsoft as ms
|
||||
|
||||
class RocksDB(ConanFile):
|
||||
name = 'rocksdb'
|
||||
version = '6.27.3'
|
||||
|
||||
license = ('GPL-2.0-only', 'Apache-2.0')
|
||||
url = 'https://github.com/conan-io/conan-center-index'
|
||||
description = 'A library that provides an embeddable, persistent key-value store for fast storage'
|
||||
topics = ('rocksdb', 'database', 'leveldb', 'facebook', 'key-value')
|
||||
|
||||
settings = 'os', 'compiler', 'build_type', 'arch'
|
||||
options = {
|
||||
'enable_sse': [False, 'sse42', 'avx2'],
|
||||
'fPIC': [True, False],
|
||||
'lite': [True, False],
|
||||
'shared': [True, False],
|
||||
'use_rtti': [True, False],
|
||||
'with_gflags': [True, False],
|
||||
'with_jemalloc': [True, False],
|
||||
'with_lz4': [True, False],
|
||||
'with_snappy': [True, False],
|
||||
'with_tbb': [True, False],
|
||||
'with_zlib': [True, False],
|
||||
'with_zstd': [True, False],
|
||||
}
|
||||
default_options = {
|
||||
'enable_sse': False,
|
||||
'fPIC': True,
|
||||
'lite': False,
|
||||
'shared': False,
|
||||
'use_rtti': False,
|
||||
'with_gflags': False,
|
||||
'with_jemalloc': False,
|
||||
'with_lz4': False,
|
||||
'with_snappy': False,
|
||||
'with_tbb': False,
|
||||
'with_zlib': False,
|
||||
'with_zstd': False,
|
||||
}
|
||||
|
||||
def requirements(self):
|
||||
if self.options.with_gflags:
|
||||
self.requires('gflags/2.2.2')
|
||||
if self.options.with_jemalloc:
|
||||
self.requires('jemalloc/5.2.1')
|
||||
if self.options.with_lz4:
|
||||
self.requires('lz4/1.9.3')
|
||||
if self.options.with_snappy:
|
||||
self.requires('snappy/1.1.9')
|
||||
if self.options.with_tbb:
|
||||
self.requires('onetbb/2020.3')
|
||||
if self.options.with_zlib:
|
||||
self.requires('zlib/1.2.11')
|
||||
if self.options.with_zstd:
|
||||
self.requires('zstd/1.5.2')
|
||||
|
||||
def config_options(self):
|
||||
if self.settings.os == 'Windows':
|
||||
del self.options.fPIC
|
||||
|
||||
def configure(self):
|
||||
if self.options.shared:
|
||||
del self.options.fPIC
|
||||
|
||||
generators = 'cmake', 'cmake_find_package'
|
||||
|
||||
scm = {
|
||||
'type': 'git',
|
||||
'url': 'https://github.com/facebook/rocksdb.git',
|
||||
'revision': 'v6.27.3',
|
||||
}
|
||||
|
||||
exports_sources = 'thirdparty.inc'
|
||||
# For out-of-source build.
|
||||
no_copy_source = True
|
||||
|
||||
_cmake = None
|
||||
|
||||
def _configure_cmake(self):
|
||||
if self._cmake:
|
||||
return
|
||||
|
||||
self._cmake = CMake(self)
|
||||
|
||||
self._cmake.definitions['CMAKE_POSITION_INDEPENDENT_CODE'] = True
|
||||
|
||||
self._cmake.definitions['DISABLE_STALL_NOTIF'] = False
|
||||
self._cmake.definitions['FAIL_ON_WARNINGS'] = False
|
||||
self._cmake.definitions['OPTDBG'] = True
|
||||
self._cmake.definitions['WITH_TESTS'] = False
|
||||
self._cmake.definitions['WITH_TOOLS'] = False
|
||||
|
||||
self._cmake.definitions['WITH_GFLAGS'] = self.options.with_gflags
|
||||
self._cmake.definitions['WITH_JEMALLOC'] = self.options.with_jemalloc
|
||||
self._cmake.definitions['WITH_LZ4'] = self.options.with_lz4
|
||||
self._cmake.definitions['WITH_SNAPPY'] = self.options.with_snappy
|
||||
self._cmake.definitions['WITH_TBB'] = self.options.with_tbb
|
||||
self._cmake.definitions['WITH_ZLIB'] = self.options.with_zlib
|
||||
self._cmake.definitions['WITH_ZSTD'] = self.options.with_zstd
|
||||
|
||||
self._cmake.definitions['USE_RTTI'] = self.options.use_rtti
|
||||
self._cmake.definitions['ROCKSDB_LITE'] = self.options.lite
|
||||
self._cmake.definitions['ROCKSDB_INSTALL_ON_WINDOWS'] = (
|
||||
self.settings.os == 'Windows'
|
||||
)
|
||||
|
||||
if not self.options.enable_sse:
|
||||
self._cmake.definitions['PORTABLE'] = True
|
||||
self._cmake.definitions['FORCE_SSE42'] = False
|
||||
elif self.options.enable_sse == 'sse42':
|
||||
self._cmake.definitions['PORTABLE'] = True
|
||||
self._cmake.definitions['FORCE_SSE42'] = True
|
||||
elif self.options.enable_sse == 'avx2':
|
||||
self._cmake.definitions['PORTABLE'] = False
|
||||
self._cmake.definitions['FORCE_SSE42'] = False
|
||||
|
||||
self._cmake.definitions['WITH_ASAN'] = False
|
||||
self._cmake.definitions['WITH_BZ2'] = False
|
||||
self._cmake.definitions['WITH_JNI'] = False
|
||||
self._cmake.definitions['WITH_LIBRADOS'] = False
|
||||
if ms.is_msvc(self):
|
||||
self._cmake.definitions['WITH_MD_LIBRARY'] = (
|
||||
ms.msvc_runtime_flag(self).startswith('MD')
|
||||
)
|
||||
self._cmake.definitions['WITH_RUNTIME_DEBUG'] = (
|
||||
ms.msvc_runtime_flag(self).endswith('d')
|
||||
)
|
||||
self._cmake.definitions['WITH_NUMA'] = False
|
||||
self._cmake.definitions['WITH_TSAN'] = False
|
||||
self._cmake.definitions['WITH_UBSAN'] = False
|
||||
self._cmake.definitions['WITH_WINDOWS_UTF8_FILENAMES'] = False
|
||||
self._cmake.definitions['WITH_XPRESS'] = False
|
||||
self._cmake.definitions['WITH_FALLOCATE'] = True
|
||||
|
||||
|
||||
def build(self):
|
||||
if ms.is_msvc(self):
|
||||
file = os.path.join(
|
||||
self.recipe_folder, '..', 'export_source', 'thirdparty.inc'
|
||||
)
|
||||
shutil.copy(file, self.build_folder)
|
||||
self._configure_cmake()
|
||||
self._cmake.configure()
|
||||
self._cmake.build()
|
||||
|
||||
def package(self):
|
||||
self._configure_cmake()
|
||||
self._cmake.install()
|
||||
|
||||
def package_info(self):
|
||||
self.cpp_info.filenames['cmake_find_package'] = 'RocksDB'
|
||||
self.cpp_info.filenames['cmake_find_package_multi'] = 'RocksDB'
|
||||
self.cpp_info.set_property('cmake_file_name', 'RocksDB')
|
||||
|
||||
self.cpp_info.names['cmake_find_package'] = 'RocksDB'
|
||||
self.cpp_info.names['cmake_find_package_multi'] = 'RocksDB'
|
||||
|
||||
self.cpp_info.components['librocksdb'].names['cmake_find_package'] = 'rocksdb'
|
||||
self.cpp_info.components['librocksdb'].names['cmake_find_package_multi'] = 'rocksdb'
|
||||
self.cpp_info.components['librocksdb'].set_property(
|
||||
'cmake_target_name', 'RocksDB::rocksdb'
|
||||
)
|
||||
|
||||
self.cpp_info.components['librocksdb'].libs = ['rocksdb']
|
||||
|
||||
if self.settings.os == "Windows":
|
||||
self.cpp_info.components["librocksdb"].system_libs = ["shlwapi", "rpcrt4"]
|
||||
if self.options.shared:
|
||||
self.cpp_info.components["librocksdb"].defines = ["ROCKSDB_DLL"]
|
||||
elif self.settings.os in ["Linux", "FreeBSD"]:
|
||||
self.cpp_info.components["librocksdb"].system_libs = ["pthread", "m"]
|
||||
|
||||
if self.options.lite:
|
||||
self.cpp_info.components["librocksdb"].defines.append("ROCKSDB_LITE")
|
||||
|
||||
if self.options.with_gflags:
|
||||
self.cpp_info.components["librocksdb"].requires.append("gflags::gflags")
|
||||
if self.options.with_jemalloc:
|
||||
self.cpp_info.components["librocksdb"].requires.append("jemalloc::jemalloc")
|
||||
if self.options.with_lz4:
|
||||
self.cpp_info.components["librocksdb"].requires.append("lz4::lz4")
|
||||
if self.options.with_snappy:
|
||||
self.cpp_info.components["librocksdb"].requires.append("snappy::snappy")
|
||||
if self.options.with_tbb:
|
||||
self.cpp_info.components["librocksdb"].requires.append("onetbb::onetbb")
|
||||
if self.options.with_zlib:
|
||||
self.cpp_info.components["librocksdb"].requires.append("zlib::zlib")
|
||||
if self.options.with_zstd:
|
||||
self.cpp_info.components["librocksdb"].requires.append("zstd::zstd")
|
||||
62
external/rocksdb/thirdparty.inc
vendored
62
external/rocksdb/thirdparty.inc
vendored
@@ -1,62 +0,0 @@
|
||||
if(WITH_GFLAGS)
|
||||
# Config with namespace available since gflags 2.2.2
|
||||
find_package(gflags REQUIRED)
|
||||
set(GFLAGS_LIB gflags::gflags)
|
||||
list(APPEND THIRDPARTY_LIBS ${GFLAGS_LIB})
|
||||
add_definitions(-DGFLAGS=1)
|
||||
endif()
|
||||
|
||||
if(WITH_SNAPPY)
|
||||
find_package(Snappy REQUIRED)
|
||||
add_definitions(-DSNAPPY)
|
||||
list(APPEND THIRDPARTY_LIBS Snappy::snappy)
|
||||
endif()
|
||||
|
||||
if(WITH_LZ4)
|
||||
find_package(lz4 REQUIRED)
|
||||
add_definitions(-DLZ4)
|
||||
list(APPEND THIRDPARTY_LIBS lz4::lz4)
|
||||
endif()
|
||||
|
||||
if(WITH_ZLIB)
|
||||
find_package(ZLIB REQUIRED)
|
||||
add_definitions(-DZLIB)
|
||||
list(APPEND THIRDPARTY_LIBS ZLIB::ZLIB)
|
||||
endif()
|
||||
|
||||
option(WITH_BZ2 "build with bzip2" OFF)
|
||||
if(WITH_BZ2)
|
||||
find_package(BZip2 REQUIRED)
|
||||
add_definitions(-DBZIP2)
|
||||
list(APPEND THIRDPARTY_LIBS BZip2::BZip2)
|
||||
endif()
|
||||
|
||||
if(WITH_ZSTD)
|
||||
find_package(zstd REQUIRED)
|
||||
add_definitions(-DZSTD)
|
||||
list(APPEND THIRDPARTY_LIBS zstd::zstd)
|
||||
endif()
|
||||
|
||||
# ================================================== XPRESS ==================================================
|
||||
# This makes use of built-in Windows API, no additional includes, links to a system lib
|
||||
|
||||
if(WITH_XPRESS)
|
||||
message(STATUS "XPRESS is enabled")
|
||||
add_definitions(-DXPRESS)
|
||||
# We are using the implementation provided by the system
|
||||
list(APPEND SYSTEM_LIBS Cabinet.lib)
|
||||
else()
|
||||
message(STATUS "XPRESS is disabled")
|
||||
endif()
|
||||
|
||||
# ================================================== JEMALLOC ==================================================
|
||||
if(WITH_JEMALLOC)
|
||||
message(STATUS "JEMALLOC library is enabled")
|
||||
add_definitions(-DROCKSDB_JEMALLOC -DJEMALLOC_EXPORT= -DJEMALLOC_NO_RENAME)
|
||||
list(APPEND THIRDPARTY_LIBS jemalloc::jemalloc)
|
||||
set(ARTIFACT_SUFFIX "_je")
|
||||
|
||||
else ()
|
||||
set(ARTIFACT_SUFFIX "")
|
||||
message(STATUS "JEMALLOC library is disabled")
|
||||
endif ()
|
||||
40
external/snappy/conandata.yml
vendored
40
external/snappy/conandata.yml
vendored
@@ -1,40 +0,0 @@
|
||||
sources:
|
||||
"1.1.10":
|
||||
url: "https://github.com/google/snappy/archive/1.1.10.tar.gz"
|
||||
sha256: "49d831bffcc5f3d01482340fe5af59852ca2fe76c3e05df0e67203ebbe0f1d90"
|
||||
"1.1.9":
|
||||
url: "https://github.com/google/snappy/archive/1.1.9.tar.gz"
|
||||
sha256: "75c1fbb3d618dd3a0483bff0e26d0a92b495bbe5059c8b4f1c962b478b6e06e7"
|
||||
"1.1.8":
|
||||
url: "https://github.com/google/snappy/archive/1.1.8.tar.gz"
|
||||
sha256: "16b677f07832a612b0836178db7f374e414f94657c138e6993cbfc5dcc58651f"
|
||||
"1.1.7":
|
||||
url: "https://github.com/google/snappy/archive/1.1.7.tar.gz"
|
||||
sha256: "3dfa02e873ff51a11ee02b9ca391807f0c8ea0529a4924afa645fbf97163f9d4"
|
||||
patches:
|
||||
"1.1.10":
|
||||
- patch_file: "patches/1.1.10-0001-fix-inlining-failure.patch"
|
||||
patch_description: "disable inlining for compilation error"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.9-0002-no-Werror.patch"
|
||||
patch_description: "disable 'warning as error' options"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.10-0003-fix-clobber-list-older-llvm.patch"
|
||||
patch_description: "disable inline asm on apple-clang"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.9-0004-rtti-by-default.patch"
|
||||
patch_description: "remove 'disable rtti'"
|
||||
patch_type: "conan"
|
||||
"1.1.9":
|
||||
- patch_file: "patches/1.1.9-0001-fix-inlining-failure.patch"
|
||||
patch_description: "disable inlining for compilation error"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.9-0002-no-Werror.patch"
|
||||
patch_description: "disable 'warning as error' options"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.9-0003-fix-clobber-list-older-llvm.patch"
|
||||
patch_description: "disable inline asm on apple-clang"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.9-0004-rtti-by-default.patch"
|
||||
patch_description: "remove 'disable rtti'"
|
||||
patch_type: "conan"
|
||||
89
external/snappy/conanfile.py
vendored
89
external/snappy/conanfile.py
vendored
@@ -1,89 +0,0 @@
|
||||
from conan import ConanFile
|
||||
from conan.tools.build import check_min_cppstd
|
||||
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
|
||||
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, rmdir
|
||||
from conan.tools.scm import Version
|
||||
import os
|
||||
|
||||
required_conan_version = ">=1.54.0"
|
||||
|
||||
|
||||
class SnappyConan(ConanFile):
|
||||
name = "snappy"
|
||||
description = "A fast compressor/decompressor"
|
||||
topics = ("google", "compressor", "decompressor")
|
||||
url = "https://github.com/conan-io/conan-center-index"
|
||||
homepage = "https://github.com/google/snappy"
|
||||
license = "BSD-3-Clause"
|
||||
|
||||
package_type = "library"
|
||||
settings = "os", "arch", "compiler", "build_type"
|
||||
options = {
|
||||
"shared": [True, False],
|
||||
"fPIC": [True, False],
|
||||
}
|
||||
default_options = {
|
||||
"shared": False,
|
||||
"fPIC": True,
|
||||
}
|
||||
|
||||
def export_sources(self):
|
||||
export_conandata_patches(self)
|
||||
|
||||
def config_options(self):
|
||||
if self.settings.os == 'Windows':
|
||||
del self.options.fPIC
|
||||
|
||||
def configure(self):
|
||||
if self.options.shared:
|
||||
self.options.rm_safe("fPIC")
|
||||
|
||||
def layout(self):
|
||||
cmake_layout(self, src_folder="src")
|
||||
|
||||
def validate(self):
|
||||
if self.settings.compiler.get_safe("cppstd"):
|
||||
check_min_cppstd(self, 11)
|
||||
|
||||
def source(self):
|
||||
get(self, **self.conan_data["sources"][self.version], strip_root=True)
|
||||
|
||||
def generate(self):
|
||||
tc = CMakeToolchain(self)
|
||||
tc.variables["SNAPPY_BUILD_TESTS"] = False
|
||||
if Version(self.version) >= "1.1.8":
|
||||
tc.variables["SNAPPY_FUZZING_BUILD"] = False
|
||||
tc.variables["SNAPPY_REQUIRE_AVX"] = False
|
||||
tc.variables["SNAPPY_REQUIRE_AVX2"] = False
|
||||
tc.variables["SNAPPY_INSTALL"] = True
|
||||
if Version(self.version) >= "1.1.9":
|
||||
tc.variables["SNAPPY_BUILD_BENCHMARKS"] = False
|
||||
tc.generate()
|
||||
|
||||
def build(self):
|
||||
apply_conandata_patches(self)
|
||||
cmake = CMake(self)
|
||||
cmake.configure()
|
||||
cmake.build()
|
||||
|
||||
def package(self):
|
||||
copy(self, "COPYING", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
|
||||
cmake = CMake(self)
|
||||
cmake.install()
|
||||
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
|
||||
|
||||
def package_info(self):
|
||||
self.cpp_info.set_property("cmake_file_name", "Snappy")
|
||||
self.cpp_info.set_property("cmake_target_name", "Snappy::snappy")
|
||||
# TODO: back to global scope in conan v2 once cmake_find_package* generators removed
|
||||
self.cpp_info.components["snappylib"].libs = ["snappy"]
|
||||
if not self.options.shared:
|
||||
if self.settings.os in ["Linux", "FreeBSD"]:
|
||||
self.cpp_info.components["snappylib"].system_libs.append("m")
|
||||
|
||||
# TODO: to remove in conan v2 once cmake_find_package* generators removed
|
||||
self.cpp_info.names["cmake_find_package"] = "Snappy"
|
||||
self.cpp_info.names["cmake_find_package_multi"] = "Snappy"
|
||||
self.cpp_info.components["snappylib"].names["cmake_find_package"] = "snappy"
|
||||
self.cpp_info.components["snappylib"].names["cmake_find_package_multi"] = "snappy"
|
||||
self.cpp_info.components["snappylib"].set_property("cmake_target_name", "Snappy::snappy")
|
||||
@@ -1,13 +0,0 @@
|
||||
diff --git a/snappy-stubs-internal.h b/snappy-stubs-internal.h
|
||||
index 1548ed7..3b4a9f3 100644
|
||||
--- a/snappy-stubs-internal.h
|
||||
+++ b/snappy-stubs-internal.h
|
||||
@@ -100,7 +100,7 @@
|
||||
|
||||
// Inlining hints.
|
||||
#if HAVE_ATTRIBUTE_ALWAYS_INLINE
|
||||
-#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline))
|
||||
+#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
||||
#else
|
||||
#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
||||
#endif // HAVE_ATTRIBUTE_ALWAYS_INLINE
|
||||
@@ -1,13 +0,0 @@
|
||||
diff --git a/snappy.cc b/snappy.cc
|
||||
index d414718..e4efb59 100644
|
||||
--- a/snappy.cc
|
||||
+++ b/snappy.cc
|
||||
@@ -1132,7 +1132,7 @@ inline size_t AdvanceToNextTagX86Optimized(const uint8_t** ip_p, size_t* tag) {
|
||||
size_t literal_len = *tag >> 2;
|
||||
size_t tag_type = *tag;
|
||||
bool is_literal;
|
||||
-#if defined(__GCC_ASM_FLAG_OUTPUTS__) && defined(__x86_64__)
|
||||
+#if defined(__GCC_ASM_FLAG_OUTPUTS__) && defined(__x86_64__) && ( (!defined(__clang__) && !defined(__APPLE__)) || (!defined(__APPLE__) && defined(__clang__) && (__clang_major__ >= 9)) || (defined(__APPLE__) && defined(__clang__) && (__clang_major__ > 11)) )
|
||||
// TODO clang misses the fact that the (c & 3) already correctly
|
||||
// sets the zero flag.
|
||||
asm("and $3, %k[tag_type]\n\t"
|
||||
@@ -1,14 +0,0 @@
|
||||
Fixes the following error:
|
||||
error: inlining failed in call to ‘always_inline’ ‘size_t snappy::AdvanceToNextTag(const uint8_t**, size_t*)’: function body can be overwritten at link time
|
||||
|
||||
--- snappy-stubs-internal.h
|
||||
+++ snappy-stubs-internal.h
|
||||
@@ -100,7 +100,7 @@
|
||||
|
||||
// Inlining hints.
|
||||
#ifdef HAVE_ATTRIBUTE_ALWAYS_INLINE
|
||||
-#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline))
|
||||
+#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
||||
#else
|
||||
#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
||||
#endif
|
||||
@@ -1,12 +0,0 @@
|
||||
--- CMakeLists.txt
|
||||
+++ CMakeLists.txt
|
||||
@@ -69,7 +69,7 @@
|
||||
- # Use -Werror for clang only.
|
||||
+if(0)
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
if(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror")
|
||||
endif(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
|
||||
endif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
-
|
||||
+endif()
|
||||
@@ -1,12 +0,0 @@
|
||||
asm clobbers do not work for clang < 9 and apple-clang < 11 (found by SpaceIm)
|
||||
--- snappy.cc
|
||||
+++ snappy.cc
|
||||
@@ -1026,7 +1026,7 @@
|
||||
size_t literal_len = *tag >> 2;
|
||||
size_t tag_type = *tag;
|
||||
bool is_literal;
|
||||
-#if defined(__GNUC__) && defined(__x86_64__)
|
||||
+#if defined(__GNUC__) && defined(__x86_64__) && ( (!defined(__clang__) && !defined(__APPLE__)) || (!defined(__APPLE__) && defined(__clang__) && (__clang_major__ >= 9)) || (defined(__APPLE__) && defined(__clang__) && (__clang_major__ > 11)) )
|
||||
// TODO clang misses the fact that the (c & 3) already correctly
|
||||
// sets the zero flag.
|
||||
asm("and $3, %k[tag_type]\n\t"
|
||||
@@ -1,20 +0,0 @@
|
||||
--- a/CMakeLists.txt
|
||||
+++ b/CMakeLists.txt
|
||||
@@ -53,8 +53,6 @@ if(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
add_definitions(-D_HAS_EXCEPTIONS=0)
|
||||
|
||||
# Disable RTTI.
|
||||
- string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
|
||||
else(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
# Use -Wall for clang and gcc.
|
||||
if(NOT CMAKE_CXX_FLAGS MATCHES "-Wall")
|
||||
@@ -78,8 +76,6 @@ endif()
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions")
|
||||
|
||||
# Disable RTTI.
|
||||
- string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
|
||||
endif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
|
||||
# BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make
|
||||
12
external/soci/conandata.yml
vendored
12
external/soci/conandata.yml
vendored
@@ -1,12 +0,0 @@
|
||||
sources:
|
||||
"4.0.3":
|
||||
url: "https://github.com/SOCI/soci/archive/v4.0.3.tar.gz"
|
||||
sha256: "4b1ff9c8545c5d802fbe06ee6cd2886630e5c03bf740e269bb625b45cf934928"
|
||||
patches:
|
||||
"4.0.3":
|
||||
- patch_file: "patches/0001-Remove-hardcoded-INSTALL_NAME_DIR-for-relocatable-li.patch"
|
||||
patch_description: "Generate relocatable libraries on MacOS"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/0002-Fix-soci_backend.patch"
|
||||
patch_description: "Fix variable names for dependencies"
|
||||
patch_type: "conan"
|
||||
212
external/soci/conanfile.py
vendored
212
external/soci/conanfile.py
vendored
@@ -1,212 +0,0 @@
|
||||
from conan import ConanFile
|
||||
from conan.tools.build import check_min_cppstd
|
||||
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
|
||||
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, rmdir
|
||||
from conan.tools.microsoft import is_msvc
|
||||
from conan.tools.scm import Version
|
||||
from conan.errors import ConanInvalidConfiguration
|
||||
import os
|
||||
|
||||
required_conan_version = ">=1.55.0"
|
||||
|
||||
|
||||
class SociConan(ConanFile):
|
||||
name = "soci"
|
||||
homepage = "https://github.com/SOCI/soci"
|
||||
url = "https://github.com/conan-io/conan-center-index"
|
||||
description = "The C++ Database Access Library "
|
||||
topics = ("mysql", "odbc", "postgresql", "sqlite3")
|
||||
license = "BSL-1.0"
|
||||
|
||||
settings = "os", "arch", "compiler", "build_type"
|
||||
options = {
|
||||
"shared": [True, False],
|
||||
"fPIC": [True, False],
|
||||
"empty": [True, False],
|
||||
"with_sqlite3": [True, False],
|
||||
"with_db2": [True, False],
|
||||
"with_odbc": [True, False],
|
||||
"with_oracle": [True, False],
|
||||
"with_firebird": [True, False],
|
||||
"with_mysql": [True, False],
|
||||
"with_postgresql": [True, False],
|
||||
"with_boost": [True, False],
|
||||
}
|
||||
default_options = {
|
||||
"shared": False,
|
||||
"fPIC": True,
|
||||
"empty": False,
|
||||
"with_sqlite3": False,
|
||||
"with_db2": False,
|
||||
"with_odbc": False,
|
||||
"with_oracle": False,
|
||||
"with_firebird": False,
|
||||
"with_mysql": False,
|
||||
"with_postgresql": False,
|
||||
"with_boost": False,
|
||||
}
|
||||
|
||||
def export_sources(self):
|
||||
export_conandata_patches(self)
|
||||
|
||||
def layout(self):
|
||||
cmake_layout(self, src_folder="src")
|
||||
|
||||
def config_options(self):
|
||||
if self.settings.os == "Windows":
|
||||
self.options.rm_safe("fPIC")
|
||||
|
||||
def configure(self):
|
||||
if self.options.shared:
|
||||
self.options.rm_safe("fPIC")
|
||||
|
||||
def requirements(self):
|
||||
if self.options.with_sqlite3:
|
||||
self.requires("sqlite3/3.41.1")
|
||||
if self.options.with_odbc and self.settings.os != "Windows":
|
||||
self.requires("odbc/2.3.11")
|
||||
if self.options.with_mysql:
|
||||
self.requires("libmysqlclient/8.0.31")
|
||||
if self.options.with_postgresql:
|
||||
self.requires("libpq/14.7")
|
||||
if self.options.with_boost:
|
||||
self.requires("boost/1.81.0")
|
||||
|
||||
@property
|
||||
def _minimum_compilers_version(self):
|
||||
return {
|
||||
"Visual Studio": "14",
|
||||
"gcc": "4.8",
|
||||
"clang": "3.8",
|
||||
"apple-clang": "8.0"
|
||||
}
|
||||
|
||||
def validate(self):
|
||||
if self.settings.compiler.get_safe("cppstd"):
|
||||
check_min_cppstd(self, 11)
|
||||
|
||||
compiler = str(self.settings.compiler)
|
||||
compiler_version = Version(self.settings.compiler.version.value)
|
||||
if compiler not in self._minimum_compilers_version:
|
||||
self.output.warning("{} recipe lacks information about the {} compiler support.".format(self.name, self.settings.compiler))
|
||||
elif compiler_version < self._minimum_compilers_version[compiler]:
|
||||
raise ConanInvalidConfiguration("{} requires a {} version >= {}".format(self.name, compiler, compiler_version))
|
||||
|
||||
prefix = "Dependencies for"
|
||||
message = "not configured in this conan package."
|
||||
if self.options.with_db2:
|
||||
# self.requires("db2/0.0.0") # TODO add support for db2
|
||||
raise ConanInvalidConfiguration("{} DB2 {} ".format(prefix, message))
|
||||
if self.options.with_oracle:
|
||||
# self.requires("oracle_db/0.0.0") # TODO add support for oracle
|
||||
raise ConanInvalidConfiguration("{} ORACLE {} ".format(prefix, message))
|
||||
if self.options.with_firebird:
|
||||
# self.requires("firebird/0.0.0") # TODO add support for firebird
|
||||
raise ConanInvalidConfiguration("{} firebird {} ".format(prefix, message))
|
||||
|
||||
def source(self):
|
||||
get(self, **self.conan_data["sources"][self.version], strip_root=True)
|
||||
|
||||
def generate(self):
|
||||
tc = CMakeToolchain(self)
|
||||
|
||||
tc.variables["SOCI_SHARED"] = self.options.shared
|
||||
tc.variables["SOCI_STATIC"] = not self.options.shared
|
||||
tc.variables["SOCI_TESTS"] = False
|
||||
tc.variables["SOCI_CXX11"] = True
|
||||
tc.variables["SOCI_EMPTY"] = self.options.empty
|
||||
tc.variables["WITH_SQLITE3"] = self.options.with_sqlite3
|
||||
tc.variables["WITH_DB2"] = self.options.with_db2
|
||||
tc.variables["WITH_ODBC"] = self.options.with_odbc
|
||||
tc.variables["WITH_ORACLE"] = self.options.with_oracle
|
||||
tc.variables["WITH_FIREBIRD"] = self.options.with_firebird
|
||||
tc.variables["WITH_MYSQL"] = self.options.with_mysql
|
||||
tc.variables["WITH_POSTGRESQL"] = self.options.with_postgresql
|
||||
tc.variables["WITH_BOOST"] = self.options.with_boost
|
||||
tc.generate()
|
||||
|
||||
deps = CMakeDeps(self)
|
||||
deps.generate()
|
||||
|
||||
def build(self):
|
||||
apply_conandata_patches(self)
|
||||
cmake = CMake(self)
|
||||
cmake.configure()
|
||||
cmake.build()
|
||||
|
||||
def package(self):
|
||||
copy(self, "LICENSE_1_0.txt", dst=os.path.join(self.package_folder, "licenses"), src=self.source_folder)
|
||||
|
||||
cmake = CMake(self)
|
||||
cmake.install()
|
||||
|
||||
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
|
||||
|
||||
def package_info(self):
|
||||
self.cpp_info.set_property("cmake_file_name", "SOCI")
|
||||
|
||||
target_suffix = "" if self.options.shared else "_static"
|
||||
lib_prefix = "lib" if is_msvc(self) and not self.options.shared else ""
|
||||
version = Version(self.version)
|
||||
lib_suffix = "_{}_{}".format(version.major, version.minor) if self.settings.os == "Windows" else ""
|
||||
|
||||
# soci_core
|
||||
self.cpp_info.components["soci_core"].set_property("cmake_target_name", "SOCI::soci_core{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_core"].libs = ["{}soci_core{}".format(lib_prefix, lib_suffix)]
|
||||
if self.options.with_boost:
|
||||
self.cpp_info.components["soci_core"].requires.append("boost::boost")
|
||||
|
||||
# soci_empty
|
||||
if self.options.empty:
|
||||
self.cpp_info.components["soci_empty"].set_property("cmake_target_name", "SOCI::soci_empty{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_empty"].libs = ["{}soci_empty{}".format(lib_prefix, lib_suffix)]
|
||||
self.cpp_info.components["soci_empty"].requires = ["soci_core"]
|
||||
|
||||
# soci_sqlite3
|
||||
if self.options.with_sqlite3:
|
||||
self.cpp_info.components["soci_sqlite3"].set_property("cmake_target_name", "SOCI::soci_sqlite3{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_sqlite3"].libs = ["{}soci_sqlite3{}".format(lib_prefix, lib_suffix)]
|
||||
self.cpp_info.components["soci_sqlite3"].requires = ["soci_core", "sqlite3::sqlite3"]
|
||||
|
||||
# soci_odbc
|
||||
if self.options.with_odbc:
|
||||
self.cpp_info.components["soci_odbc"].set_property("cmake_target_name", "SOCI::soci_odbc{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_odbc"].libs = ["{}soci_odbc{}".format(lib_prefix, lib_suffix)]
|
||||
self.cpp_info.components["soci_odbc"].requires = ["soci_core"]
|
||||
if self.settings.os == "Windows":
|
||||
self.cpp_info.components["soci_odbc"].system_libs.append("odbc32")
|
||||
else:
|
||||
self.cpp_info.components["soci_odbc"].requires.append("odbc::odbc")
|
||||
|
||||
# soci_mysql
|
||||
if self.options.with_mysql:
|
||||
self.cpp_info.components["soci_mysql"].set_property("cmake_target_name", "SOCI::soci_mysql{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_mysql"].libs = ["{}soci_mysql{}".format(lib_prefix, lib_suffix)]
|
||||
self.cpp_info.components["soci_mysql"].requires = ["soci_core", "libmysqlclient::libmysqlclient"]
|
||||
|
||||
# soci_postgresql
|
||||
if self.options.with_postgresql:
|
||||
self.cpp_info.components["soci_postgresql"].set_property("cmake_target_name", "SOCI::soci_postgresql{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_postgresql"].libs = ["{}soci_postgresql{}".format(lib_prefix, lib_suffix)]
|
||||
self.cpp_info.components["soci_postgresql"].requires = ["soci_core", "libpq::libpq"]
|
||||
|
||||
# TODO: to remove in conan v2 once cmake_find_package* generators removed
|
||||
self.cpp_info.names["cmake_find_package"] = "SOCI"
|
||||
self.cpp_info.names["cmake_find_package_multi"] = "SOCI"
|
||||
self.cpp_info.components["soci_core"].names["cmake_find_package"] = "soci_core{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_core"].names["cmake_find_package_multi"] = "soci_core{}".format(target_suffix)
|
||||
if self.options.empty:
|
||||
self.cpp_info.components["soci_empty"].names["cmake_find_package"] = "soci_empty{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_empty"].names["cmake_find_package_multi"] = "soci_empty{}".format(target_suffix)
|
||||
if self.options.with_sqlite3:
|
||||
self.cpp_info.components["soci_sqlite3"].names["cmake_find_package"] = "soci_sqlite3{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_sqlite3"].names["cmake_find_package_multi"] = "soci_sqlite3{}".format(target_suffix)
|
||||
if self.options.with_odbc:
|
||||
self.cpp_info.components["soci_odbc"].names["cmake_find_package"] = "soci_odbc{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_odbc"].names["cmake_find_package_multi"] = "soci_odbc{}".format(target_suffix)
|
||||
if self.options.with_mysql:
|
||||
self.cpp_info.components["soci_mysql"].names["cmake_find_package"] = "soci_mysql{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_mysql"].names["cmake_find_package_multi"] = "soci_mysql{}".format(target_suffix)
|
||||
if self.options.with_postgresql:
|
||||
self.cpp_info.components["soci_postgresql"].names["cmake_find_package"] = "soci_postgresql{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_postgresql"].names["cmake_find_package_multi"] = "soci_postgresql{}".format(target_suffix)
|
||||
@@ -1,39 +0,0 @@
|
||||
From d491bf7b5040d314ffd0c6310ba01f78ff44c85e Mon Sep 17 00:00:00 2001
|
||||
From: Rasmus Thomsen <rasmus.thomsen@dampsoft.de>
|
||||
Date: Fri, 14 Apr 2023 09:16:29 +0200
|
||||
Subject: [PATCH] Remove hardcoded INSTALL_NAME_DIR for relocatable libraries
|
||||
on MacOS
|
||||
|
||||
---
|
||||
cmake/SociBackend.cmake | 2 +-
|
||||
src/core/CMakeLists.txt | 1 -
|
||||
2 files changed, 1 insertion(+), 2 deletions(-)
|
||||
|
||||
diff --git a/cmake/SociBackend.cmake b/cmake/SociBackend.cmake
|
||||
index 5d4ef0df..39fe1f77 100644
|
||||
--- a/cmake/SociBackend.cmake
|
||||
+++ b/cmake/SociBackend.cmake
|
||||
@@ -171,7 +171,7 @@ macro(soci_backend NAME)
|
||||
set_target_properties(${THIS_BACKEND_TARGET}
|
||||
PROPERTIES
|
||||
SOVERSION ${${PROJECT_NAME}_SOVERSION}
|
||||
- INSTALL_NAME_DIR ${CMAKE_INSTALL_PREFIX}/lib)
|
||||
+ )
|
||||
|
||||
if(APPLE)
|
||||
set_target_properties(${THIS_BACKEND_TARGET}
|
||||
diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt
|
||||
index 3e7deeae..f9eae564 100644
|
||||
--- a/src/core/CMakeLists.txt
|
||||
+++ b/src/core/CMakeLists.txt
|
||||
@@ -59,7 +59,6 @@ if (SOCI_SHARED)
|
||||
PROPERTIES
|
||||
VERSION ${SOCI_VERSION}
|
||||
SOVERSION ${SOCI_SOVERSION}
|
||||
- INSTALL_NAME_DIR ${CMAKE_INSTALL_PREFIX}/lib
|
||||
CLEAN_DIRECT_OUTPUT 1)
|
||||
endif()
|
||||
|
||||
--
|
||||
2.25.1
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
diff --git a/cmake/SociBackend.cmake b/cmake/SociBackend.cmake
|
||||
index 0a664667..3fa2ed95 100644
|
||||
--- a/cmake/SociBackend.cmake
|
||||
+++ b/cmake/SociBackend.cmake
|
||||
@@ -31,14 +31,13 @@ macro(soci_backend_deps_found NAME DEPS SUCCESS)
|
||||
if(NOT DEPEND_FOUND)
|
||||
list(APPEND DEPS_NOT_FOUND ${dep})
|
||||
else()
|
||||
- string(TOUPPER "${dep}" DEPU)
|
||||
- if( ${DEPU}_INCLUDE_DIR )
|
||||
- list(APPEND DEPS_INCLUDE_DIRS ${${DEPU}_INCLUDE_DIR})
|
||||
+ if( ${dep}_INCLUDE_DIR )
|
||||
+ list(APPEND DEPS_INCLUDE_DIRS ${${dep}_INCLUDE_DIR})
|
||||
endif()
|
||||
- if( ${DEPU}_INCLUDE_DIRS )
|
||||
- list(APPEND DEPS_INCLUDE_DIRS ${${DEPU}_INCLUDE_DIRS})
|
||||
+ if( ${dep}_INCLUDE_DIRS )
|
||||
+ list(APPEND DEPS_INCLUDE_DIRS ${${dep}_INCLUDE_DIRS})
|
||||
endif()
|
||||
- list(APPEND DEPS_LIBRARIES ${${DEPU}_LIBRARIES})
|
||||
+ list(APPEND DEPS_LIBRARIES ${${dep}_LIBRARIES})
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
194
external/wasmedge/conandata.yml
vendored
194
external/wasmedge/conandata.yml
vendored
@@ -1,194 +0,0 @@
|
||||
sources:
|
||||
"0.13.5":
|
||||
Windows:
|
||||
"x86_64":
|
||||
Visual Studio:
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.13.5/WasmEdge-0.13.5-windows.zip"
|
||||
sha256: "db533289ba26ec557b5193593c9ed03db75be3bc7aa737e2caa5b56b8eef888a"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.13.5/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Linux:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.13.5/WasmEdge-0.13.5-manylinux2014_x86_64.tar.gz"
|
||||
sha256: "3686e0226871bf17b62ec57e1c15778c2947834b90af0dfad14f2e0202bf9284"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.13.5/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.13.5/WasmEdge-0.13.5-manylinux2014_aarch64.tar.gz"
|
||||
sha256: "472de88e0257c539c120b33fdd1805e1e95063121acc2df1d5626e4676b93529"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Macos:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.13.5/WasmEdge-0.13.5-darwin_x86_64.tar.gz"
|
||||
sha256: "b7fdfaf59805951241f47690917b501ddfa06d9b6f7e0262e44e784efe4a7b33"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.13.5/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.13.5/WasmEdge-0.13.5-darwin_arm64.tar.gz"
|
||||
sha256: "acc93721210294ced0887352f360e42e46dcc05332e6dd78c1452fb3a35d5255"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.13.5/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Android:
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.13.5/WasmEdge-0.13.5-android_aarch64.tar.gz"
|
||||
sha256: "59a0d68a0c7368b51cc65cb5a44a68037d79fd449883ef42792178d57c8784a8"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.13.5/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"0.11.2":
|
||||
Windows:
|
||||
"x86_64":
|
||||
Visual Studio:
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.2/WasmEdge-0.11.2-windows.zip"
|
||||
sha256: "ca49b98c0cf5f187e08c3ba71afc8d71365fde696f10b4219379a4a4d1a91e6d"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.2/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Linux:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.2/WasmEdge-0.11.2-manylinux2014_x86_64.tar.gz"
|
||||
sha256: "784bf1eb25928e2cf02aa88e9372388fad682b4a188485da3cd9162caeedf143"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.2/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.2/WasmEdge-0.11.2-manylinux2014_aarch64.tar.gz"
|
||||
sha256: "a2766a4c1edbaea298a30e5431a4e795003a10d8398a933d923f23d4eb4fa5d1"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Macos:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.2/WasmEdge-0.11.2-darwin_x86_64.tar.gz"
|
||||
sha256: "aedec53f29b1e0b657e46e67dba3e2f32a2924f4d9136e60073ea1aba3073e70"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.2/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.2/WasmEdge-0.11.2-darwin_arm64.tar.gz"
|
||||
sha256: "fe391df90e1eee69cf1e976f5ddf60c20f29b651710aaa4fc03e2ab4fe52c0d3"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.2/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Android:
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.2/WasmEdge-0.11.2-android_aarch64.tar.gz"
|
||||
sha256: "69e308f5927c753b2bb5639569d10219b60598174d8b304bdf310093fd7b2464"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.2/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"0.11.1":
|
||||
Windows:
|
||||
"x86_64":
|
||||
Visual Studio:
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.1/WasmEdge-0.11.1-windows.zip"
|
||||
sha256: "c86f6384555a0484a5dd81faba5636bba78f5e3d6eaf627d880e34843f9e24bf"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Linux:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.1/WasmEdge-0.11.1-manylinux2014_x86_64.tar.gz"
|
||||
sha256: "76ce4ea0eb86adfa52c73f6c6b44383626d94990e0923cae8b1e6f060ef2bf5b"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.1/WasmEdge-0.11.1-manylinux2014_aarch64.tar.gz"
|
||||
sha256: "cb9ea32932360463991cfda80e09879b2cf6c69737f12f3f2b371cd0af4e9ce8"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Macos:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.1/WasmEdge-0.11.1-darwin_x86_64.tar.gz"
|
||||
sha256: "56df2b00669c25b8143ea2c17370256cd6a33f3b316d3b47857dd38d603cb69a"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.1/WasmEdge-0.11.1-darwin_arm64.tar.gz"
|
||||
sha256: "82f7da1a7a36ec1923fb045193784dd090a03109e84da042af97297205a71f08"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Android:
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.11.1/WasmEdge-0.11.1-android_aarch64.tar.gz"
|
||||
sha256: "af8694e93bf72ac5506450d4caebccc340fbba254dca3d58ec0712e96ec9dedd"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.11.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"0.10.0":
|
||||
Windows:
|
||||
"x86_64":
|
||||
Visual Studio:
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.10.0/WasmEdge-0.10.0-windows.zip"
|
||||
sha256: "63b8a02cced52a723aa283dba02bbe887656256ecca69bb0fff17872c0fb5ebc"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.10.0/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Linux:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.10.0/WasmEdge-0.10.0-manylinux2014_x86_64.tar.gz"
|
||||
sha256: "4c1ffca9fd8cbdeb8f0951ddaffbbefe81ae123d5b80f61e80ea8d9b56853cde"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.10.0/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.10.0/WasmEdge-0.10.0-manylinux2014_aarch64.tar.gz"
|
||||
sha256: "c000bf96d0a73a1d360659246c0806c2ce78620b6f78c1147fbf9e2be0280bd9"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.10.0/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"0.9.1":
|
||||
Windows:
|
||||
"x86_64":
|
||||
Visual Studio:
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.9.1/WasmEdge-0.9.1-windows.zip"
|
||||
sha256: "68240d8aee23d44db5cc252d8c1cf5d0c77ab709a122af2747a4b836ba461671"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.9.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Linux:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.9.1/WasmEdge-0.9.1-manylinux2014_x86_64.tar.gz"
|
||||
sha256: "bcb6fe3d6e30db0d0aa267ec3bd9b7248f8c8c387620cef4049d682d293c8371"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.9.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.9.1/WasmEdge-0.9.1-manylinux2014_aarch64.tar.gz"
|
||||
sha256: "515bcac3520cd546d9d14372b7930ab48b43f1c5dc258a9f61a82b22c0107eef"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.9.1/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"0.9.0":
|
||||
Windows:
|
||||
"x86_64":
|
||||
Visual Studio:
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.9.0/WasmEdge-0.9.0-windows.zip"
|
||||
sha256: "f81bfea4cf09053510e3e74c16c1ee010fc93def8a7e78744443b950f0011c3b"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.9.0/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Linux:
|
||||
"x86_64":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.9.0/WasmEdge-0.9.0-manylinux2014_x86_64.tar.gz"
|
||||
sha256: "27847f15e4294e707486458e857d7cb11806481bb67a26f076a717a1446827ed"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.9.0/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.9.0/WasmEdge-0.9.0-manylinux2014_aarch64.tar.gz"
|
||||
sha256: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.9.0/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
Macos:
|
||||
"armv8":
|
||||
"gcc":
|
||||
- url: "https://github.com/WasmEdge/WasmEdge/releases/download/0.9.0/WasmEdge-0.9.0-darwin_arm64.tar.gz"
|
||||
sha256: "236a407a646f746ab78a1d0a39fa4e85fe28eae219b1635ba49f908d7944686d"
|
||||
- url: "https://raw.githubusercontent.com/WasmEdge/WasmEdge/0.9.0/LICENSE"
|
||||
sha256: "c71d239df91726fc519c6eb72d318ec65820627232b2f796219e87dcf35d0ab4"
|
||||
92
external/wasmedge/conanfile.py
vendored
92
external/wasmedge/conanfile.py
vendored
@@ -1,92 +0,0 @@
|
||||
from conan import ConanFile
|
||||
from conan.tools.files import get, copy, download
|
||||
from conan.tools.scm import Version
|
||||
from conan.errors import ConanInvalidConfiguration
|
||||
|
||||
import os
|
||||
|
||||
required_conan_version = ">=1.53.0"
|
||||
|
||||
class WasmedgeConan(ConanFile):
|
||||
name = "wasmedge"
|
||||
description = ("WasmEdge is a lightweight, high-performance, and extensible WebAssembly runtime"
|
||||
"for cloud native, edge, and decentralized applications."
|
||||
"It powers serverless apps, embedded functions, microservices, smart contracts, and IoT devices.")
|
||||
license = "Apache-2.0"
|
||||
url = "https://github.com/conan-io/conan-center-index"
|
||||
homepage = "https://github.com/WasmEdge/WasmEdge/"
|
||||
topics = ("webassembly", "wasm", "wasi", "emscripten")
|
||||
package_type = "shared-library"
|
||||
settings = "os", "arch", "compiler", "build_type"
|
||||
|
||||
@property
|
||||
def _compiler_alias(self):
|
||||
return {
|
||||
"Visual Studio": "Visual Studio",
|
||||
# "Visual Studio": "msvc",
|
||||
"msvc": "msvc",
|
||||
}.get(str(self.info.settings.compiler), "gcc")
|
||||
|
||||
def configure(self):
|
||||
self.settings.compiler.rm_safe("libcxx")
|
||||
self.settings.compiler.rm_safe("cppstd")
|
||||
|
||||
def validate(self):
|
||||
try:
|
||||
self.conan_data["sources"][self.version][str(self.settings.os)][str(self.settings.arch)][self._compiler_alias]
|
||||
except KeyError:
|
||||
raise ConanInvalidConfiguration("Binaries for this combination of version/os/arch/compiler are not available")
|
||||
|
||||
def package_id(self):
|
||||
del self.info.settings.compiler.version
|
||||
self.info.settings.compiler = self._compiler_alias
|
||||
|
||||
def build(self):
|
||||
# This is packaging binaries so the download needs to be in build
|
||||
get(self, **self.conan_data["sources"][self.version][str(self.settings.os)][str(self.settings.arch)][self._compiler_alias][0],
|
||||
destination=self.source_folder, strip_root=True)
|
||||
download(self, filename="LICENSE",
|
||||
**self.conan_data["sources"][self.version][str(self.settings.os)][str(self.settings.arch)][self._compiler_alias][1])
|
||||
|
||||
def package(self):
|
||||
copy(self, pattern="*.h", dst=os.path.join(self.package_folder, "include"), src=os.path.join(self.source_folder, "include"), keep_path=True)
|
||||
copy(self, pattern="*.inc", dst=os.path.join(self.package_folder, "include"), src=os.path.join(self.source_folder, "include"), keep_path=True)
|
||||
|
||||
srclibdir = os.path.join(self.source_folder, "lib64" if self.settings.os == "Linux" else "lib")
|
||||
srcbindir = os.path.join(self.source_folder, "bin")
|
||||
dstlibdir = os.path.join(self.package_folder, "lib")
|
||||
dstbindir = os.path.join(self.package_folder, "bin")
|
||||
if Version(self.version) >= "0.11.1":
|
||||
copy(self, pattern="wasmedge.lib", src=srclibdir, dst=dstlibdir, keep_path=False)
|
||||
copy(self, pattern="wasmedge.dll", src=srcbindir, dst=dstbindir, keep_path=False)
|
||||
copy(self, pattern="libwasmedge.so*", src=srclibdir, dst=dstlibdir, keep_path=False)
|
||||
copy(self, pattern="libwasmedge*.dylib", src=srclibdir, dst=dstlibdir, keep_path=False)
|
||||
else:
|
||||
copy(self, pattern="wasmedge_c.lib", src=srclibdir, dst=dstlibdir, keep_path=False)
|
||||
copy(self, pattern="wasmedge_c.dll", src=srcbindir, dst=dstbindir, keep_path=False)
|
||||
copy(self, pattern="libwasmedge_c.so*", src=srclibdir, dst=dstlibdir, keep_path=False)
|
||||
copy(self, pattern="libwasmedge_c*.dylib", src=srclibdir, dst=dstlibdir, keep_path=False)
|
||||
|
||||
copy(self, pattern="wasmedge*", src=srcbindir, dst=dstbindir, keep_path=False)
|
||||
copy(self, pattern="LICENSE", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"), keep_path=False)
|
||||
|
||||
def package_info(self):
|
||||
if Version(self.version) >= "0.11.1":
|
||||
self.cpp_info.libs = ["wasmedge"]
|
||||
else:
|
||||
self.cpp_info.libs = ["wasmedge_c"]
|
||||
|
||||
bindir = os.path.join(self.package_folder, "bin")
|
||||
self.output.info("Appending PATH environment variable: {}".format(bindir))
|
||||
self.env_info.PATH.append(bindir)
|
||||
|
||||
if self.settings.os == "Windows":
|
||||
self.cpp_info.system_libs.append("ws2_32")
|
||||
self.cpp_info.system_libs.append("wsock32")
|
||||
self.cpp_info.system_libs.append("shlwapi")
|
||||
|
||||
if self.settings.os in ["Linux", "FreeBSD"]:
|
||||
self.cpp_info.system_libs.append("m")
|
||||
self.cpp_info.system_libs.append("dl")
|
||||
self.cpp_info.system_libs.append("rt")
|
||||
self.cpp_info.system_libs.append("pthread")
|
||||
7602
patches/0001-Revert-Use-the-Conan-package-manager-4367.patch
Normal file
7602
patches/0001-Revert-Use-the-Conan-package-manager-4367.patch
Normal file
File diff suppressed because it is too large
Load Diff
@@ -17,6 +17,7 @@ if [[ "$GITHUB_REPOSITORY" == "" ]]; then
|
||||
BUILD_CORES=8
|
||||
fi
|
||||
|
||||
EXIT_IF_CONTAINER_RUNNING=${EXIT_IF_CONTAINER_RUNNING:-1}
|
||||
# Ensure still works outside of GH Actions by setting these to /dev/null
|
||||
# GA will run this script and then delete it at the end of the job
|
||||
JOB_CLEANUP_SCRIPT=${JOB_CLEANUP_SCRIPT:-/dev/null}
|
||||
@@ -24,6 +25,19 @@ NORMALIZED_WORKFLOW=$(echo "$GITHUB_WORKFLOW" | tr -c 'a-zA-Z0-9' '-')
|
||||
NORMALIZED_REF=$(echo "$GITHUB_REF" | tr -c 'a-zA-Z0-9' '-')
|
||||
CONTAINER_NAME="xahaud_cached_builder_${NORMALIZED_WORKFLOW}-${NORMALIZED_REF}"
|
||||
|
||||
# Check if the container is already running
|
||||
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
|
||||
echo "⚠️ A running container (${CONTAINER_NAME}) was detected."
|
||||
|
||||
if [[ "$EXIT_IF_CONTAINER_RUNNING" -eq 1 ]]; then
|
||||
echo "❌ EXIT_IF_CONTAINER_RUNNING is set. Exiting."
|
||||
exit 1
|
||||
else
|
||||
echo "🛑 Stopping the running container: ${CONTAINER_NAME}"
|
||||
docker stop "${CONTAINER_NAME}"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "-- BUILD CORES: $BUILD_CORES"
|
||||
echo "-- GITHUB_REPOSITORY: $GITHUB_REPOSITORY"
|
||||
echo "-- GITHUB_SHA: $GITHUB_SHA"
|
||||
@@ -46,7 +60,7 @@ fi
|
||||
|
||||
STATIC_CONTAINER=$(docker ps -a | grep $CONTAINER_NAME |wc -l)
|
||||
|
||||
# if [[ "$STATIC_CONTAINER" -gt "0" && "$GITHUB_REPOSITORY" != "" ]]; then
|
||||
#if [[ "$STATIC_CONTAINER" -gt "0" && "$GITHUB_REPOSITORY" != "" ]]; then
|
||||
if false; then
|
||||
echo "Static container, execute in static container to have max. cache"
|
||||
docker start $CONTAINER_NAME
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
cmake_minimum_required(VERSION 3.11)
|
||||
|
||||
project(ed25519
|
||||
LANGUAGES C
|
||||
)
|
||||
|
||||
if(PROJECT_NAME STREQUAL CMAKE_PROJECT_NAME)
|
||||
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}/output/$<CONFIG>/lib")
|
||||
endif()
|
||||
|
||||
if(NOT TARGET OpenSSL::SSL)
|
||||
find_package(OpenSSL)
|
||||
endif()
|
||||
|
||||
add_library(ed25519 STATIC
|
||||
ed25519.c
|
||||
)
|
||||
add_library(ed25519::ed25519 ALIAS ed25519)
|
||||
target_link_libraries(ed25519 PUBLIC OpenSSL::SSL)
|
||||
|
||||
include(GNUInstallDirs)
|
||||
|
||||
#[=========================================================[
|
||||
NOTE for macos:
|
||||
https://github.com/floodyberry/ed25519-donna/issues/29
|
||||
our source for ed25519-donna-portable.h has been
|
||||
patched to workaround this.
|
||||
#]=========================================================]
|
||||
target_include_directories(ed25519 PUBLIC
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>
|
||||
)
|
||||
|
||||
install(
|
||||
TARGETS ed25519
|
||||
EXPORT ${PROJECT_NAME}-exports
|
||||
ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}"
|
||||
)
|
||||
install(
|
||||
EXPORT ${PROJECT_NAME}-exports
|
||||
DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}"
|
||||
FILE ${PROJECT_NAME}-targets.cmake
|
||||
NAMESPACE ${PROJECT_NAME}::
|
||||
)
|
||||
install(
|
||||
FILES ed25519.h
|
||||
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}"
|
||||
)
|
||||
@@ -39,7 +39,7 @@ BookListeners::removeSubscriber(std::uint64_t seq)
|
||||
|
||||
void
|
||||
BookListeners::publish(
|
||||
MultiApiJson const& jvObj,
|
||||
Json::Value const& jvObj,
|
||||
hash_set<std::uint64_t>& havePublished)
|
||||
{
|
||||
std::lock_guard sl(mLock);
|
||||
@@ -54,8 +54,7 @@ BookListeners::publish(
|
||||
// Only publish jvObj if this is the first occurence
|
||||
if (havePublished.emplace(p->getSeq()).second)
|
||||
{
|
||||
p->send(
|
||||
jvObj.select(apiVersionSelector(p->getApiVersion())), true);
|
||||
p->send(jvObj, true);
|
||||
}
|
||||
++it;
|
||||
}
|
||||
|
||||
@@ -20,9 +20,7 @@
|
||||
#ifndef RIPPLE_APP_LEDGER_BOOKLISTENERS_H_INCLUDED
|
||||
#define RIPPLE_APP_LEDGER_BOOKLISTENERS_H_INCLUDED
|
||||
|
||||
#include <ripple/json/MultivarJson.h>
|
||||
#include <ripple/net/InfoSub.h>
|
||||
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
|
||||
@@ -60,7 +58,7 @@ public:
|
||||
|
||||
*/
|
||||
void
|
||||
publish(MultiApiJson const& jvObj, hash_set<std::uint64_t>& havePublished);
|
||||
publish(Json::Value const& jvObj, hash_set<std::uint64_t>& havePublished);
|
||||
|
||||
private:
|
||||
std::recursive_mutex mLock;
|
||||
|
||||
@@ -197,6 +197,14 @@ private:
|
||||
std::unique_ptr<PeerSet> mPeerSet;
|
||||
};
|
||||
|
||||
/** Deserialize a ledger header from a byte array. */
|
||||
LedgerInfo
|
||||
deserializeHeader(Slice data, bool hasHash = false);
|
||||
|
||||
/** Deserialize a ledger header (prefixed with 4 bytes) from a byte array. */
|
||||
LedgerInfo
|
||||
deserializePrefixedHeader(Slice data, bool hasHash = false);
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
#endif
|
||||
|
||||
@@ -94,9 +94,6 @@ public:
|
||||
|
||||
virtual void
|
||||
stop() = 0;
|
||||
|
||||
virtual std::size_t
|
||||
cacheSize() = 0;
|
||||
};
|
||||
|
||||
std::unique_ptr<InboundLedgers>
|
||||
|
||||
@@ -311,10 +311,10 @@ Ledger::Ledger(
|
||||
Family& family,
|
||||
SHAMap const& baseState)
|
||||
: mImmutable(false)
|
||||
, info_(info)
|
||||
, txMap_(SHAMapType::TRANSACTION, family)
|
||||
, stateMap_(baseState, true)
|
||||
, rules_{config.features}
|
||||
, info_(info)
|
||||
, j_(beast::Journal(beast::Journal::getNullSink()))
|
||||
{
|
||||
}
|
||||
|
||||
@@ -93,7 +93,7 @@ public:
|
||||
/** Repair a hash to index mapping
|
||||
@param ledgerIndex The index whose mapping is to be repaired
|
||||
@param ledgerHash The hash it is to be mapped to
|
||||
@return `false` if the mapping was repaired
|
||||
@return `true` if the mapping was repaired
|
||||
*/
|
||||
bool
|
||||
fixIndex(LedgerIndex ledgerIndex, LedgerHash const& ledgerHash);
|
||||
|
||||
@@ -227,8 +227,6 @@ public:
|
||||
void
|
||||
clearLedger(std::uint32_t seq);
|
||||
bool
|
||||
isValidated(ReadView const& ledger);
|
||||
bool
|
||||
getValidatedRange(std::uint32_t& minVal, std::uint32_t& maxVal);
|
||||
bool
|
||||
getFullValidatedRange(std::uint32_t& minVal, std::uint32_t& maxVal);
|
||||
|
||||
@@ -125,27 +125,6 @@ public:
|
||||
void
|
||||
stop();
|
||||
|
||||
std::size_t
|
||||
tasksSize() const
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(mtx_);
|
||||
return tasks_.size();
|
||||
}
|
||||
|
||||
std::size_t
|
||||
deltasSize() const
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(mtx_);
|
||||
return deltas_.size();
|
||||
}
|
||||
|
||||
std::size_t
|
||||
skipListsSize() const
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(mtx_);
|
||||
return skipLists_.size();
|
||||
}
|
||||
|
||||
private:
|
||||
mutable std::mutex mtx_;
|
||||
std::vector<std::shared_ptr<LedgerReplayTask>> tasks_;
|
||||
|
||||
@@ -21,14 +21,11 @@
|
||||
#define RIPPLE_APP_LEDGER_LEDGERTOJSON_H_INCLUDED
|
||||
|
||||
#include <ripple/app/ledger/Ledger.h>
|
||||
#include <ripple/app/ledger/LedgerMaster.h>
|
||||
#include <ripple/app/misc/TxQ.h>
|
||||
#include <ripple/basics/StringUtilities.h>
|
||||
#include <ripple/basics/chrono.h>
|
||||
#include <ripple/json/Object.h>
|
||||
#include <ripple/protocol/STTx.h>
|
||||
#include <ripple/protocol/jss.h>
|
||||
#include <ripple/protocol/serialize.h>
|
||||
#include <ripple/rpc/Context.h>
|
||||
|
||||
namespace ripple {
|
||||
@@ -43,8 +40,6 @@ struct LedgerFill
|
||||
LedgerEntryType t = ltANY)
|
||||
: ledger(l), options(o), txQueue(std::move(q)), type(t), context(ctx)
|
||||
{
|
||||
if (context)
|
||||
closeTime = context->ledgerMaster.getCloseTimeBySeq(ledger.seq());
|
||||
}
|
||||
|
||||
enum Options {
|
||||
@@ -62,7 +57,6 @@ struct LedgerFill
|
||||
std::vector<TxQ::TxDetails> txQueue;
|
||||
LedgerEntryType type;
|
||||
RPC::Context* context;
|
||||
std::optional<NetClock::time_point> closeTime;
|
||||
};
|
||||
|
||||
/** Given a Ledger and options, fill a Json::Object or Json::Value with a
|
||||
@@ -76,6 +70,22 @@ addJson(Json::Value&, LedgerFill const&);
|
||||
Json::Value
|
||||
getJson(LedgerFill const&);
|
||||
|
||||
/** Serialize an object to a blob. */
|
||||
template <class Object>
|
||||
Blob
|
||||
serializeBlob(Object const& o)
|
||||
{
|
||||
Serializer s;
|
||||
o.add(s);
|
||||
return s.peekData();
|
||||
}
|
||||
|
||||
/** Serialize an object to a hex string. */
|
||||
inline std::string
|
||||
serializeHex(STObject const& o)
|
||||
{
|
||||
return strHex(serializeBlob(o));
|
||||
}
|
||||
} // namespace ripple
|
||||
|
||||
#endif
|
||||
|
||||
@@ -20,7 +20,6 @@
|
||||
#include <ripple/app/ledger/LedgerMaster.h>
|
||||
#include <ripple/app/ledger/OrderBookDB.h>
|
||||
#include <ripple/app/main/Application.h>
|
||||
#include <ripple/app/misc/AMMUtils.h>
|
||||
#include <ripple/app/misc/NetworkOPs.h>
|
||||
#include <ripple/basics/Log.h>
|
||||
#include <ripple/core/Config.h>
|
||||
@@ -94,7 +93,7 @@ OrderBookDB::update(std::shared_ptr<ReadView const> const& ledger)
|
||||
|
||||
JLOG(j_.debug()) << "Beginning update (" << ledger->seq() << ")";
|
||||
|
||||
// walk through the entire ledger looking for orderbook/AMM entries
|
||||
// walk through the entire ledger looking for orderbook entries
|
||||
int cnt = 0;
|
||||
|
||||
try
|
||||
@@ -127,21 +126,6 @@ OrderBookDB::update(std::shared_ptr<ReadView const> const& ledger)
|
||||
|
||||
++cnt;
|
||||
}
|
||||
else if (sle->getType() == ltAMM)
|
||||
{
|
||||
auto const issue1 = (*sle)[sfAsset];
|
||||
auto const issue2 = (*sle)[sfAsset2];
|
||||
auto addBook = [&](Issue const& in, Issue const& out) {
|
||||
allBooks[in].insert(out);
|
||||
|
||||
if (isXRP(out))
|
||||
xrpBooks.insert(in);
|
||||
|
||||
++cnt;
|
||||
};
|
||||
addBook(issue1, issue2);
|
||||
addBook(issue2, issue1);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (SHAMapMissingNode const& mn)
|
||||
@@ -250,7 +234,7 @@ void
|
||||
OrderBookDB::processTxn(
|
||||
std::shared_ptr<ReadView const> const& ledger,
|
||||
const AcceptedLedgerTx& alTx,
|
||||
MultiApiJson const& jvObj)
|
||||
Json::Value const& jvObj)
|
||||
{
|
||||
std::lock_guard sl(mLock);
|
||||
|
||||
|
||||
@@ -23,8 +23,6 @@
|
||||
#include <ripple/app/ledger/AcceptedLedgerTx.h>
|
||||
#include <ripple/app/ledger/BookListeners.h>
|
||||
#include <ripple/app/main/Application.h>
|
||||
#include <ripple/json/MultivarJson.h>
|
||||
|
||||
#include <mutex>
|
||||
|
||||
namespace ripple {
|
||||
@@ -65,7 +63,7 @@ public:
|
||||
processTxn(
|
||||
std::shared_ptr<ReadView const> const& ledger,
|
||||
const AcceptedLedgerTx& alTx,
|
||||
MultiApiJson const& jvObj);
|
||||
Json::Value const& jvObj);
|
||||
|
||||
private:
|
||||
Application& app_;
|
||||
|
||||
@@ -269,6 +269,36 @@ InboundLedger::neededStateHashes(int max, SHAMapSyncFilter* filter) const
|
||||
mLedger->info().accountHash, mLedger->stateMap(), max, filter);
|
||||
}
|
||||
|
||||
LedgerInfo
|
||||
deserializeHeader(Slice data, bool hasHash)
|
||||
{
|
||||
SerialIter sit(data.data(), data.size());
|
||||
|
||||
LedgerInfo info;
|
||||
|
||||
info.seq = sit.get32();
|
||||
info.drops = sit.get64();
|
||||
info.parentHash = sit.get256();
|
||||
info.txHash = sit.get256();
|
||||
info.accountHash = sit.get256();
|
||||
info.parentCloseTime =
|
||||
NetClock::time_point{NetClock::duration{sit.get32()}};
|
||||
info.closeTime = NetClock::time_point{NetClock::duration{sit.get32()}};
|
||||
info.closeTimeResolution = NetClock::duration{sit.get8()};
|
||||
info.closeFlags = sit.get8();
|
||||
|
||||
if (hasHash)
|
||||
info.hash = sit.get256();
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
LedgerInfo
|
||||
deserializePrefixedHeader(Slice data, bool hasHash)
|
||||
{
|
||||
return deserializeHeader(data + 4, hasHash);
|
||||
}
|
||||
|
||||
// See how much of the ledger data is stored locally
|
||||
// Data found in a fetch pack will be stored
|
||||
void
|
||||
@@ -532,9 +562,10 @@ InboundLedger::trigger(std::shared_ptr<Peer> const& peer, TriggerReason reason)
|
||||
|
||||
if (auto stream = journal_.debug())
|
||||
{
|
||||
stream << "Trigger acquiring ledger " << hash_;
|
||||
if (peer)
|
||||
stream << " from " << peer;
|
||||
stream << "Trigger acquiring ledger " << hash_ << " from " << peer;
|
||||
else
|
||||
stream << "Trigger acquiring ledger " << hash_;
|
||||
|
||||
if (complete_ || failed_)
|
||||
stream << "complete=" << complete_ << " failed=" << failed_;
|
||||
|
||||
@@ -443,13 +443,6 @@ public:
|
||||
mRecentFailures.clear();
|
||||
}
|
||||
|
||||
std::size_t
|
||||
cacheSize() override
|
||||
{
|
||||
ScopedLockType lock(mLock);
|
||||
return mLedgers.size();
|
||||
}
|
||||
|
||||
private:
|
||||
clock_type& m_clock;
|
||||
|
||||
|
||||
@@ -616,54 +616,6 @@ LedgerMaster::clearLedger(std::uint32_t seq)
|
||||
mCompleteLedgers.erase(seq);
|
||||
}
|
||||
|
||||
bool
|
||||
LedgerMaster::isValidated(ReadView const& ledger)
|
||||
{
|
||||
if (app_.config().reporting())
|
||||
return true; // Reporting mode only supports validated ledger
|
||||
|
||||
if (ledger.open())
|
||||
return false;
|
||||
|
||||
if (ledger.info().validated)
|
||||
return true;
|
||||
|
||||
auto const seq = ledger.info().seq;
|
||||
try
|
||||
{
|
||||
// Use the skip list in the last validated ledger to see if ledger
|
||||
// comes before the last validated ledger (and thus has been
|
||||
// validated).
|
||||
auto const hash = walkHashBySeq(seq, InboundLedger::Reason::GENERIC);
|
||||
|
||||
if (!hash || ledger.info().hash != *hash)
|
||||
{
|
||||
// This ledger's hash is not the hash of the validated ledger
|
||||
if (hash)
|
||||
{
|
||||
assert(hash->isNonZero());
|
||||
uint256 valHash =
|
||||
app_.getRelationalDatabase().getHashByIndex(seq);
|
||||
if (valHash == ledger.info().hash)
|
||||
{
|
||||
// SQL database doesn't match ledger chain
|
||||
clearLedger(seq);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
catch (SHAMapMissingNode const& mn)
|
||||
{
|
||||
JLOG(m_journal.warn()) << "Ledger #" << seq << ": " << mn.what();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Mark ledger as validated to save time if we see it again.
|
||||
ledger.info().validated = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
// returns Ledgers we have all the nodes for
|
||||
bool
|
||||
LedgerMaster::getFullValidatedRange(
|
||||
@@ -1566,7 +1518,6 @@ LedgerMaster::updatePaths()
|
||||
if (app_.getOPs().isNeedNetworkLedger())
|
||||
{
|
||||
--mPathFindThread;
|
||||
mPathLedger.reset();
|
||||
JLOG(m_journal.debug()) << "Need network ledger for updating paths";
|
||||
return;
|
||||
}
|
||||
@@ -1592,7 +1543,6 @@ LedgerMaster::updatePaths()
|
||||
else
|
||||
{ // Nothing to do
|
||||
--mPathFindThread;
|
||||
mPathLedger.reset();
|
||||
JLOG(m_journal.debug()) << "Nothing to do for updating paths";
|
||||
return;
|
||||
}
|
||||
@@ -1609,7 +1559,6 @@ LedgerMaster::updatePaths()
|
||||
<< "Published ledger too old for updating paths";
|
||||
std::lock_guard ml(m_mutex);
|
||||
--mPathFindThread;
|
||||
mPathLedger.reset();
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -1622,7 +1571,6 @@ LedgerMaster::updatePaths()
|
||||
if (!pathRequests.requestsPending())
|
||||
{
|
||||
--mPathFindThread;
|
||||
mPathLedger.reset();
|
||||
JLOG(m_journal.debug())
|
||||
<< "No path requests found. Nothing to do for updating "
|
||||
"paths. "
|
||||
@@ -1640,7 +1588,6 @@ LedgerMaster::updatePaths()
|
||||
<< "No path requests left. No need for further updating "
|
||||
"paths";
|
||||
--mPathFindThread;
|
||||
mPathLedger.reset();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
#include <ripple/app/ledger/LedgerReplayer.h>
|
||||
#include <ripple/app/ledger/impl/LedgerReplayMsgHandler.h>
|
||||
#include <ripple/app/main/Application.h>
|
||||
#include <ripple/protocol/LedgerHeader.h>
|
||||
|
||||
#include <memory>
|
||||
|
||||
|
||||
@@ -17,17 +17,13 @@
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#include <ripple/app/ledger/LedgerMaster.h>
|
||||
#include <ripple/app/ledger/LedgerToJson.h>
|
||||
#include <ripple/app/main/Application.h>
|
||||
#include <ripple/app/misc/DeliverMax.h>
|
||||
#include <ripple/app/misc/TxQ.h>
|
||||
#include <ripple/basics/base_uint.h>
|
||||
#include <ripple/core/Pg.h>
|
||||
#include <ripple/protocol/jss.h>
|
||||
#include <ripple/rpc/Context.h>
|
||||
#include <ripple/rpc/DeliveredAmount.h>
|
||||
#include <ripple/rpc/impl/RPCHelpers.h>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
@@ -53,17 +49,11 @@ isBinary(LedgerFill const& fill)
|
||||
|
||||
template <class Object>
|
||||
void
|
||||
fillJson(
|
||||
Object& json,
|
||||
bool closed,
|
||||
LedgerInfo const& info,
|
||||
bool bFull,
|
||||
unsigned apiVersion)
|
||||
fillJson(Object& json, bool closed, LedgerInfo const& info, bool bFull)
|
||||
{
|
||||
json[jss::parent_hash] = to_string(info.parentHash);
|
||||
json[jss::ledger_index] = (apiVersion > 1)
|
||||
? Json::Value(info.seq)
|
||||
: Json::Value(std::to_string(info.seq));
|
||||
json[jss::ledger_index] = to_string(info.seq);
|
||||
json[jss::seqNum] = to_string(info.seq); // DEPRECATED
|
||||
|
||||
if (closed)
|
||||
{
|
||||
@@ -80,6 +70,10 @@ fillJson(
|
||||
json[jss::account_hash] = to_string(info.accountHash);
|
||||
json[jss::total_coins] = to_string(info.drops);
|
||||
|
||||
// These next three are DEPRECATED.
|
||||
json[jss::hash] = to_string(info.hash);
|
||||
json[jss::totalCoins] = to_string(info.drops);
|
||||
json[jss::accepted] = closed;
|
||||
json[jss::close_flags] = info.closeFlags;
|
||||
|
||||
// Always show fields that contribute to the ledger hash
|
||||
@@ -93,7 +87,6 @@ fillJson(
|
||||
json[jss::close_time_human] = to_string(info.closeTime);
|
||||
if (!getCloseAgree(info))
|
||||
json[jss::close_time_estimated] = true;
|
||||
json[jss::close_time_iso] = to_string_iso(info.closeTime);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,56 +122,12 @@ fillJsonTx(
|
||||
if (bBinary)
|
||||
{
|
||||
txJson[jss::tx_blob] = serializeHex(*txn);
|
||||
if (fill.context->apiVersion > 1)
|
||||
txJson[jss::hash] = to_string(txn->getTransactionID());
|
||||
|
||||
auto const json_meta =
|
||||
(fill.context->apiVersion > 1 ? jss::meta_blob : jss::meta);
|
||||
if (stMeta)
|
||||
txJson[json_meta] = serializeHex(*stMeta);
|
||||
}
|
||||
else if (fill.context->apiVersion > 1)
|
||||
{
|
||||
copyFrom(
|
||||
txJson[jss::tx_json],
|
||||
txn->getJson(JsonOptions::disable_API_prior_V2, false));
|
||||
txJson[jss::hash] = to_string(txn->getTransactionID());
|
||||
RPC::insertDeliverMax(
|
||||
txJson[jss::tx_json], txnType, fill.context->apiVersion);
|
||||
|
||||
if (stMeta)
|
||||
{
|
||||
txJson[jss::meta] = stMeta->getJson(JsonOptions::none);
|
||||
|
||||
// If applicable, insert delivered amount
|
||||
if (txnType == ttPAYMENT || txnType == ttCHECK_CASH)
|
||||
RPC::insertDeliveredAmount(
|
||||
txJson[jss::meta],
|
||||
fill.ledger,
|
||||
txn,
|
||||
{txn->getTransactionID(), fill.ledger.seq(), *stMeta});
|
||||
}
|
||||
|
||||
if (!fill.ledger.open())
|
||||
txJson[jss::ledger_hash] = to_string(fill.ledger.info().hash);
|
||||
|
||||
const bool validated =
|
||||
fill.context->ledgerMaster.isValidated(fill.ledger);
|
||||
txJson[jss::validated] = validated;
|
||||
if (validated)
|
||||
{
|
||||
auto const seq = fill.ledger.seq();
|
||||
txJson[jss::ledger_index] = (fill.context->apiVersion > 1)
|
||||
? Json::Value(seq)
|
||||
: Json::Value(std::to_string(seq));
|
||||
if (fill.closeTime)
|
||||
txJson[jss::close_time_iso] = to_string_iso(*fill.closeTime);
|
||||
}
|
||||
txJson[jss::meta] = serializeHex(*stMeta);
|
||||
}
|
||||
else
|
||||
{
|
||||
copyFrom(txJson, txn->getJson(JsonOptions::none));
|
||||
RPC::insertDeliverMax(txJson, txnType, fill.context->apiVersion);
|
||||
if (stMeta)
|
||||
{
|
||||
txJson[jss::metaData] = stMeta->getJson(JsonOptions::none);
|
||||
@@ -308,11 +257,7 @@ fillJsonQueue(Object& json, LedgerFill const& fill)
|
||||
if (tx.lastResult)
|
||||
txJson["last_result"] = transToken(*tx.lastResult);
|
||||
|
||||
auto&& temp = fillJsonTx(fill, bBinary, bExpanded, tx.txn, nullptr);
|
||||
if (fill.context->apiVersion > 1)
|
||||
copyFrom(txJson, temp);
|
||||
else
|
||||
copyFrom(txJson[jss::tx], temp);
|
||||
txJson[jss::tx] = fillJsonTx(fill, bBinary, bExpanded, tx.txn, nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -326,13 +271,7 @@ fillJson(Object& json, LedgerFill const& fill)
|
||||
if (isBinary(fill))
|
||||
fillJsonBinary(json, !fill.ledger.open(), fill.ledger.info());
|
||||
else
|
||||
fillJson(
|
||||
json,
|
||||
!fill.ledger.open(),
|
||||
fill.ledger.info(),
|
||||
bFull,
|
||||
(fill.context ? fill.context->apiVersion
|
||||
: RPC::apiMaximumSupportedVersion));
|
||||
fillJson(json, !fill.ledger.open(), fill.ledger.info(), bFull);
|
||||
|
||||
if (bFull || fill.options & LedgerFill::dumpTxrp)
|
||||
fillJsonTx(json, fill);
|
||||
|
||||
@@ -605,13 +605,6 @@ public:
|
||||
return *m_networkOPs;
|
||||
}
|
||||
|
||||
virtual ServerHandler&
|
||||
getServerHandler() override
|
||||
{
|
||||
assert(serverHandler_);
|
||||
return *serverHandler_;
|
||||
}
|
||||
|
||||
boost::asio::io_service&
|
||||
getIOService() override
|
||||
{
|
||||
@@ -1068,172 +1061,20 @@ public:
|
||||
// VFALCO TODO fix the dependency inversion using an observer,
|
||||
// have listeners register for "onSweep ()" notification.
|
||||
|
||||
{
|
||||
std::shared_ptr<FullBelowCache const> const fullBelowCache =
|
||||
nodeFamily_.getFullBelowCache(0);
|
||||
|
||||
std::shared_ptr<TreeNodeCache const> const treeNodeCache =
|
||||
nodeFamily_.getTreeNodeCache(0);
|
||||
|
||||
std::size_t const oldFullBelowSize = fullBelowCache->size();
|
||||
std::size_t const oldTreeNodeSize = treeNodeCache->size();
|
||||
|
||||
nodeFamily_.sweep();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "NodeFamily::FullBelowCache sweep. Size before: "
|
||||
<< oldFullBelowSize
|
||||
<< "; size after: " << fullBelowCache->size();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "NodeFamily::TreeNodeCache sweep. Size before: "
|
||||
<< oldTreeNodeSize << "; size after: " << treeNodeCache->size();
|
||||
}
|
||||
nodeFamily_.sweep();
|
||||
if (shardFamily_)
|
||||
{
|
||||
std::size_t const oldFullBelowSize =
|
||||
shardFamily_->getFullBelowCacheSize();
|
||||
std::size_t const oldTreeNodeSize =
|
||||
shardFamily_->getTreeNodeCacheSize().second;
|
||||
|
||||
shardFamily_->sweep();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "ShardFamily::FullBelowCache sweep. Size before: "
|
||||
<< oldFullBelowSize
|
||||
<< "; size after: " << shardFamily_->getFullBelowCacheSize();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "ShardFamily::TreeNodeCache sweep. Size before: "
|
||||
<< oldTreeNodeSize << "; size after: "
|
||||
<< shardFamily_->getTreeNodeCacheSize().second;
|
||||
}
|
||||
{
|
||||
TaggedCache<uint256, Transaction> const& masterTxCache =
|
||||
getMasterTransaction().getCache();
|
||||
|
||||
std::size_t const oldMasterTxSize = masterTxCache.size();
|
||||
|
||||
getMasterTransaction().sweep();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "MasterTransaction sweep. Size before: " << oldMasterTxSize
|
||||
<< "; size after: " << masterTxCache.size();
|
||||
}
|
||||
{
|
||||
// Does not appear to have an associated cache.
|
||||
getNodeStore().sweep();
|
||||
}
|
||||
getMasterTransaction().sweep();
|
||||
getNodeStore().sweep();
|
||||
if (shardStore_)
|
||||
{
|
||||
// Does not appear to have an associated cache.
|
||||
shardStore_->sweep();
|
||||
}
|
||||
{
|
||||
std::size_t const oldLedgerMasterCacheSize =
|
||||
getLedgerMaster().getFetchPackCacheSize();
|
||||
|
||||
getLedgerMaster().sweep();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "LedgerMaster sweep. Size before: "
|
||||
<< oldLedgerMasterCacheSize << "; size after: "
|
||||
<< getLedgerMaster().getFetchPackCacheSize();
|
||||
}
|
||||
{
|
||||
// NodeCache == TaggedCache<SHAMapHash, Blob>
|
||||
std::size_t const oldTempNodeCacheSize = getTempNodeCache().size();
|
||||
|
||||
getTempNodeCache().sweep();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "TempNodeCache sweep. Size before: " << oldTempNodeCacheSize
|
||||
<< "; size after: " << getTempNodeCache().size();
|
||||
}
|
||||
{
|
||||
std::size_t const oldCurrentCacheSize =
|
||||
getValidations().sizeOfCurrentCache();
|
||||
std::size_t const oldSizeSeqEnforcesSize =
|
||||
getValidations().sizeOfSeqEnforcersCache();
|
||||
std::size_t const oldByLedgerSize =
|
||||
getValidations().sizeOfByLedgerCache();
|
||||
std::size_t const oldBySequenceSize =
|
||||
getValidations().sizeOfBySequenceCache();
|
||||
|
||||
getValidations().expire(m_journal);
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "Validations Current expire. Size before: "
|
||||
<< oldCurrentCacheSize
|
||||
<< "; size after: " << getValidations().sizeOfCurrentCache();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "Validations SeqEnforcer expire. Size before: "
|
||||
<< oldSizeSeqEnforcesSize << "; size after: "
|
||||
<< getValidations().sizeOfSeqEnforcersCache();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "Validations ByLedger expire. Size before: "
|
||||
<< oldByLedgerSize
|
||||
<< "; size after: " << getValidations().sizeOfByLedgerCache();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "Validations BySequence expire. Size before: "
|
||||
<< oldBySequenceSize
|
||||
<< "; size after: " << getValidations().sizeOfBySequenceCache();
|
||||
}
|
||||
{
|
||||
std::size_t const oldInboundLedgersSize =
|
||||
getInboundLedgers().cacheSize();
|
||||
|
||||
getInboundLedgers().sweep();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "InboundLedgers sweep. Size before: "
|
||||
<< oldInboundLedgersSize
|
||||
<< "; size after: " << getInboundLedgers().cacheSize();
|
||||
}
|
||||
{
|
||||
size_t const oldTasksSize = getLedgerReplayer().tasksSize();
|
||||
size_t const oldDeltasSize = getLedgerReplayer().deltasSize();
|
||||
size_t const oldSkipListsSize = getLedgerReplayer().skipListsSize();
|
||||
|
||||
getLedgerReplayer().sweep();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "LedgerReplayer tasks sweep. Size before: " << oldTasksSize
|
||||
<< "; size after: " << getLedgerReplayer().tasksSize();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "LedgerReplayer deltas sweep. Size before: "
|
||||
<< oldDeltasSize
|
||||
<< "; size after: " << getLedgerReplayer().deltasSize();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "LedgerReplayer skipLists sweep. Size before: "
|
||||
<< oldSkipListsSize
|
||||
<< "; size after: " << getLedgerReplayer().skipListsSize();
|
||||
}
|
||||
{
|
||||
std::size_t const oldAcceptedLedgerSize =
|
||||
m_acceptedLedgerCache.size();
|
||||
|
||||
m_acceptedLedgerCache.sweep();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "AcceptedLedgerCache sweep. Size before: "
|
||||
<< oldAcceptedLedgerSize
|
||||
<< "; size after: " << m_acceptedLedgerCache.size();
|
||||
}
|
||||
{
|
||||
std::size_t const oldCachedSLEsSize = cachedSLEs_.size();
|
||||
|
||||
cachedSLEs_.sweep();
|
||||
|
||||
JLOG(m_journal.debug())
|
||||
<< "CachedSLEs sweep. Size before: " << oldCachedSLEsSize
|
||||
<< "; size after: " << cachedSLEs_.size();
|
||||
}
|
||||
getLedgerMaster().sweep();
|
||||
getTempNodeCache().sweep();
|
||||
getValidations().expire(m_journal);
|
||||
getInboundLedgers().sweep();
|
||||
getLedgerReplayer().sweep();
|
||||
m_acceptedLedgerCache.sweep();
|
||||
cachedSLEs_.sweep();
|
||||
|
||||
#ifdef RIPPLED_REPORTING
|
||||
if (auto pg = dynamic_cast<PostgresDatabase*>(&*mRelationalDatabase))
|
||||
@@ -1337,6 +1178,9 @@ ApplicationImp::setup(boost::program_options::variables_map const& cmdline)
|
||||
// Optionally turn off logging to console.
|
||||
logs_->silent(config_->silent());
|
||||
|
||||
if (!config_->standalone())
|
||||
timeKeeper_->run(config_->SNTP_SERVERS);
|
||||
|
||||
if (!initRelationalDatabase() || !initNodeStore())
|
||||
return false;
|
||||
|
||||
@@ -1439,12 +1283,6 @@ ApplicationImp::setup(boost::program_options::variables_map const& cmdline)
|
||||
}
|
||||
}
|
||||
|
||||
if (auto const& forcedRange = config().FORCED_LEDGER_RANGE_PRESENT)
|
||||
{
|
||||
m_ledgerMaster->setLedgerRangePresent(
|
||||
forcedRange->first, forcedRange->second);
|
||||
}
|
||||
|
||||
if (!config().reporting())
|
||||
m_orderBookDB.setup(getLedgerMaster().getCurrentLedger());
|
||||
|
||||
@@ -1496,9 +1334,6 @@ ApplicationImp::setup(boost::program_options::variables_map const& cmdline)
|
||||
<< "Invalid entry in [" << SECTION_VALIDATOR_LIST_SITES << "]";
|
||||
return false;
|
||||
}
|
||||
|
||||
// Tell the AmendmentTable who the trusted validators are.
|
||||
m_amendmentTable->trustChanged(validators_->getQuorumKeys().second);
|
||||
}
|
||||
|
||||
if (config_->IMPORT_VL_KEYS.empty())
|
||||
|
||||
@@ -88,7 +88,6 @@ class Overlay;
|
||||
class PathRequests;
|
||||
class PendingSaves;
|
||||
class PublicKey;
|
||||
class ServerHandler;
|
||||
class SecretKey;
|
||||
class STLedgerEntry;
|
||||
class TimeKeeper;
|
||||
@@ -231,8 +230,6 @@ public:
|
||||
getOPs() = 0;
|
||||
virtual OrderBookDB&
|
||||
getOrderBookDB() = 0;
|
||||
virtual ServerHandler&
|
||||
getServerHandler() = 0;
|
||||
virtual TransactionMaster&
|
||||
getMasterTransaction() = 0;
|
||||
virtual perf::PerfLog&
|
||||
|
||||
@@ -23,7 +23,6 @@
|
||||
#include <ripple/resource/Fees.h>
|
||||
|
||||
#include <ripple/beast/net/IPAddressConversion.h>
|
||||
#include <ripple/core/ConfigSections.h>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
@@ -428,9 +427,9 @@ GRPCServerImpl::GRPCServerImpl(Application& app)
|
||||
: app_(app), journal_(app_.journal("gRPC Server"))
|
||||
{
|
||||
// if present, get endpoint from config
|
||||
if (app_.config().exists(SECTION_PORT_GRPC))
|
||||
if (app_.config().exists("port_grpc"))
|
||||
{
|
||||
Section const& section = app_.config().section(SECTION_PORT_GRPC);
|
||||
Section section = app_.config().section("port_grpc");
|
||||
|
||||
auto const optIp = section.get("ip");
|
||||
if (!optIp)
|
||||
@@ -660,7 +659,7 @@ GRPCServerImpl::setupListeners()
|
||||
secureGatewayIPs_));
|
||||
}
|
||||
return requests;
|
||||
}
|
||||
};
|
||||
|
||||
bool
|
||||
GRPCServerImpl::start()
|
||||
|
||||
@@ -378,13 +378,8 @@ run(int argc, char** argv)
|
||||
"Override the minimum validation quorum.")(
|
||||
"reportingReadOnly", "Run in read-only reporting mode")(
|
||||
"silent", "No output to the console after startup.")(
|
||||
"standalone,a", "Run with no peers.")("verbose,v", "Verbose logging.")
|
||||
|
||||
("force_ledger_present_range",
|
||||
po::value<std::string>(),
|
||||
"Specify the range of present ledgers for testing purposes. Min and "
|
||||
"max values are comma separated.")(
|
||||
"version", "Display the build version.");
|
||||
"standalone,a", "Run with no peers.")("verbose,v", "Verbose logging.")(
|
||||
"version", "Display the build version.");
|
||||
|
||||
po::options_description data("Ledger/Data Options");
|
||||
data.add_options()("import", importText.c_str())(
|
||||
@@ -429,8 +424,9 @@ run(int argc, char** argv)
|
||||
po::value<std::string>()->implicit_value(""),
|
||||
"Perform unit tests. The optional argument specifies one or "
|
||||
"more comma-separated selectors. Each selector specifies a suite name, "
|
||||
"suite name prefix, full-name (lib.module.suite), module, or library "
|
||||
"(checked in that order).")(
|
||||
"full-name (lib.module.suite), module, or library "
|
||||
"(checked in that "
|
||||
"order).")(
|
||||
"unittest-arg",
|
||||
po::value<std::string>()->implicit_value(""),
|
||||
"Supplies an argument string to unit tests. If provided, this argument "
|
||||
@@ -606,51 +602,6 @@ run(int argc, char** argv)
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (vm.contains("force_ledger_present_range"))
|
||||
{
|
||||
try
|
||||
{
|
||||
auto const r = [&vm]() -> std::vector<std::uint32_t> {
|
||||
std::vector<std::string> strVec;
|
||||
boost::split(
|
||||
strVec,
|
||||
vm["force_ledger_present_range"].as<std::string>(),
|
||||
boost::algorithm::is_any_of(","));
|
||||
std::vector<std::uint32_t> result;
|
||||
for (auto& s : strVec)
|
||||
{
|
||||
boost::trim(s);
|
||||
if (!s.empty())
|
||||
result.push_back(std::stoi(s));
|
||||
}
|
||||
return result;
|
||||
}();
|
||||
|
||||
if (r.size() == 2)
|
||||
{
|
||||
if (r[0] > r[1])
|
||||
{
|
||||
throw std::runtime_error(
|
||||
"Invalid force_ledger_present_range parameter");
|
||||
}
|
||||
config->FORCED_LEDGER_RANGE_PRESENT.emplace(r[0], r[1]);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw std::runtime_error(
|
||||
"Invalid force_ledger_present_range parameter");
|
||||
}
|
||||
}
|
||||
catch (std::exception const& e)
|
||||
{
|
||||
std::cerr << "invalid 'force_ledger_present_range' parameter. The "
|
||||
"parameter must be two numbers separated by a comma. "
|
||||
"The first number must be <= the second."
|
||||
<< std::endl;
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
if (vm.count("start"))
|
||||
{
|
||||
config->START_UP = Config::FRESH;
|
||||
@@ -801,8 +752,10 @@ run(int argc, char** argv)
|
||||
if (vm.count("debug"))
|
||||
setDebugLogSink(logs->makeSink("Debug", beast::severities::kTrace));
|
||||
|
||||
auto timeKeeper = make_TimeKeeper(logs->journal("TimeKeeper"));
|
||||
|
||||
auto app = make_Application(
|
||||
std::move(config), std::move(logs), std::make_unique<TimeKeeper>());
|
||||
std::move(config), std::move(logs), std::move(timeKeeper));
|
||||
|
||||
if (!app->setup(vm))
|
||||
return -1;
|
||||
|
||||
@@ -1,311 +0,0 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2023 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifndef RIPPLE_APP_MISC_AMMHELPERS_H_INCLUDED
|
||||
#define RIPPLE_APP_MISC_AMMHELPERS_H_INCLUDED
|
||||
|
||||
#include <ripple/basics/IOUAmount.h>
|
||||
#include <ripple/basics/Number.h>
|
||||
#include <ripple/protocol/AMMCore.h>
|
||||
#include <ripple/protocol/AmountConversions.h>
|
||||
#include <ripple/protocol/Issue.h>
|
||||
#include <ripple/protocol/Quality.h>
|
||||
#include <ripple/protocol/STAccount.h>
|
||||
#include <ripple/protocol/STAmount.h>
|
||||
|
||||
#include <type_traits>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
/** Calculate LP Tokens given AMM pool reserves.
|
||||
* @param asset1 AMM one side of the pool reserve
|
||||
* @param asset2 AMM another side of the pool reserve
|
||||
* @return LP Tokens as IOU
|
||||
*/
|
||||
STAmount
|
||||
ammLPTokens(
|
||||
STAmount const& asset1,
|
||||
STAmount const& asset2,
|
||||
Issue const& lptIssue);
|
||||
|
||||
/** Calculate LP Tokens given asset's deposit amount.
|
||||
* @param asset1Balance current AMM asset1 balance
|
||||
* @param asset1Deposit requested asset1 deposit amount
|
||||
* @param lptAMMBalance AMM LPT balance
|
||||
* @param tfee trading fee in basis points
|
||||
* @return tokens
|
||||
*/
|
||||
STAmount
|
||||
lpTokensIn(
|
||||
STAmount const& asset1Balance,
|
||||
STAmount const& asset1Deposit,
|
||||
STAmount const& lptAMMBalance,
|
||||
std::uint16_t tfee);
|
||||
|
||||
/** Calculate asset deposit given LP Tokens.
|
||||
* @param asset1Balance current AMM asset1 balance
|
||||
* @param lpTokens LP Tokens
|
||||
* @param lptAMMBalance AMM LPT balance
|
||||
* @param tfee trading fee in basis points
|
||||
* @return
|
||||
*/
|
||||
STAmount
|
||||
ammAssetIn(
|
||||
STAmount const& asset1Balance,
|
||||
STAmount const& lptAMMBalance,
|
||||
STAmount const& lpTokens,
|
||||
std::uint16_t tfee);
|
||||
|
||||
/** Calculate LP Tokens given asset's withdraw amount. Return 0
|
||||
* if can't calculate.
|
||||
* @param asset1Balance current AMM asset1 balance
|
||||
* @param asset1Withdraw requested asset1 withdraw amount
|
||||
* @param lptAMMBalance AMM LPT balance
|
||||
* @param tfee trading fee in basis points
|
||||
* @return tokens out amount
|
||||
*/
|
||||
STAmount
|
||||
lpTokensOut(
|
||||
STAmount const& asset1Balance,
|
||||
STAmount const& asset1Withdraw,
|
||||
STAmount const& lptAMMBalance,
|
||||
std::uint16_t tfee);
|
||||
|
||||
/** Calculate asset withdrawal by tokens
|
||||
* @param assetBalance balance of the asset being withdrawn
|
||||
* @param lptAMMBalance total AMM Tokens balance
|
||||
* @param lpTokens LP Tokens balance
|
||||
* @param tfee trading fee in basis points
|
||||
* @return calculated asset amount
|
||||
*/
|
||||
STAmount
|
||||
withdrawByTokens(
|
||||
STAmount const& assetBalance,
|
||||
STAmount const& lptAMMBalance,
|
||||
STAmount const& lpTokens,
|
||||
std::uint16_t tfee);
|
||||
|
||||
/** Check if the relative distance between the qualities
|
||||
* is within the requested distance.
|
||||
* @param calcQuality calculated quality
|
||||
* @param reqQuality requested quality
|
||||
* @param dist requested relative distance
|
||||
* @return true if within dist, false otherwise
|
||||
*/
|
||||
inline bool
|
||||
withinRelativeDistance(
|
||||
Quality const& calcQuality,
|
||||
Quality const& reqQuality,
|
||||
Number const& dist)
|
||||
{
|
||||
if (calcQuality == reqQuality)
|
||||
return true;
|
||||
auto const [min, max] = std::minmax(calcQuality, reqQuality);
|
||||
// Relative distance is (max - min)/max. Can't use basic operations
|
||||
// on Quality. Have to use Quality::rate() instead, which
|
||||
// is inverse of quality: (1/max.rate - 1/min.rate)/(1/max.rate)
|
||||
return ((min.rate() - max.rate()) / min.rate()) < dist;
|
||||
}
|
||||
|
||||
/** Check if the relative distance between the amounts
|
||||
* is within the requested distance.
|
||||
* @param calc calculated amount
|
||||
* @param req requested amount
|
||||
* @param dist requested relative distance
|
||||
* @return true if within dist, false otherwise
|
||||
*/
|
||||
// clang-format off
|
||||
template <typename Amt>
|
||||
requires(
|
||||
std::is_same_v<Amt, STAmount> || std::is_same_v<Amt, IOUAmount> ||
|
||||
std::is_same_v<Amt, XRPAmount>)
|
||||
bool
|
||||
withinRelativeDistance(Amt const& calc, Amt const& req, Number const& dist)
|
||||
{
|
||||
if (calc == req)
|
||||
return true;
|
||||
auto const [min, max] = std::minmax(calc, req);
|
||||
return ((max - min) / max) < dist;
|
||||
}
|
||||
// clang-format on
|
||||
|
||||
/** Finds takerPays (i) and takerGets (o) such that given pool composition
|
||||
* poolGets(I) and poolPays(O): (O - o) / (I + i) = quality.
|
||||
* Where takerGets is calculated as the swapAssetIn (see below).
|
||||
* The above equation produces the quadratic equation:
|
||||
* i^2*(1-fee) + i*I*(2-fee) + I^2 - I*O/quality,
|
||||
* which is solved for i, and o is found with swapAssetIn().
|
||||
* @param pool AMM pool balances
|
||||
* @param quality requested quality
|
||||
* @param tfee trading fee in basis points
|
||||
* @return seated in/out amounts if the quality can be changed
|
||||
*/
|
||||
template <typename TIn, typename TOut>
|
||||
std::optional<TAmounts<TIn, TOut>>
|
||||
changeSpotPriceQuality(
|
||||
TAmounts<TIn, TOut> const& pool,
|
||||
Quality const& quality,
|
||||
std::uint16_t tfee)
|
||||
{
|
||||
auto const f = feeMult(tfee); // 1 - fee
|
||||
auto const& a = f;
|
||||
auto const b = pool.in * (1 + f);
|
||||
Number const c = pool.in * pool.in - pool.in * pool.out * quality.rate();
|
||||
if (auto const res = b * b - 4 * a * c; res < 0)
|
||||
return std::nullopt;
|
||||
else if (auto const nTakerPaysPropose = (-b + root2(res)) / (2 * a);
|
||||
nTakerPaysPropose > 0)
|
||||
{
|
||||
auto const nTakerPays = [&]() {
|
||||
// The fee might make the AMM offer quality less than CLOB quality.
|
||||
// Therefore, AMM offer has to satisfy this constraint: o / i >= q.
|
||||
// Substituting o with swapAssetIn() gives:
|
||||
// i <= O / q - I / (1 - fee).
|
||||
auto const nTakerPaysConstraint =
|
||||
pool.out * quality.rate() - pool.in / f;
|
||||
if (nTakerPaysPropose > nTakerPaysConstraint)
|
||||
return nTakerPaysConstraint;
|
||||
return nTakerPaysPropose;
|
||||
}();
|
||||
if (nTakerPays <= 0)
|
||||
return std::nullopt;
|
||||
auto const takerPays = toAmount<TIn>(
|
||||
getIssue(pool.in), nTakerPays, Number::rounding_mode::upward);
|
||||
// should not fail
|
||||
if (auto const amounts =
|
||||
TAmounts<TIn, TOut>{
|
||||
takerPays, swapAssetIn(pool, takerPays, tfee)};
|
||||
Quality{amounts} < quality &&
|
||||
!withinRelativeDistance(Quality{amounts}, quality, Number(1, -7)))
|
||||
Throw<std::runtime_error>("changeSpotPriceQuality failed");
|
||||
else
|
||||
return amounts;
|
||||
}
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
/** AMM pool invariant - the product (A * B) after swap in/out has to remain
|
||||
* at least the same: (A + in) * (B - out) >= A * B
|
||||
* XRP round-off may result in a smaller product after swap in/out.
|
||||
* To address this:
|
||||
* - if on swapIn the out is XRP then the amount is round-off
|
||||
* downward, making the product slightly larger since out
|
||||
* value is reduced.
|
||||
* - if on swapOut the in is XRP then the amount is round-off
|
||||
* upward, making the product slightly larger since in
|
||||
* value is increased.
|
||||
*/
|
||||
|
||||
/** Swap assetIn into the pool and swap out a proportional amount
|
||||
* of the other asset. Implements AMM Swap in.
|
||||
* @see [XLS30d:AMM
|
||||
* Swap](https://github.com/XRPLF/XRPL-Standards/discussions/78)
|
||||
* @param pool current AMM pool balances
|
||||
* @param assetIn amount to swap in
|
||||
* @param tfee trading fee in basis points
|
||||
* @return
|
||||
*/
|
||||
template <typename TIn, typename TOut>
|
||||
TOut
|
||||
swapAssetIn(
|
||||
TAmounts<TIn, TOut> const& pool,
|
||||
TIn const& assetIn,
|
||||
std::uint16_t tfee)
|
||||
{
|
||||
return toAmount<TOut>(
|
||||
getIssue(pool.out),
|
||||
pool.out - (pool.in * pool.out) / (pool.in + assetIn * feeMult(tfee)),
|
||||
Number::rounding_mode::downward);
|
||||
}
|
||||
|
||||
/** Swap assetOut out of the pool and swap in a proportional amount
|
||||
* of the other asset. Implements AMM Swap out.
|
||||
* @see [XLS30d:AMM
|
||||
* Swap](https://github.com/XRPLF/XRPL-Standards/discussions/78)
|
||||
* @param pool current AMM pool balances
|
||||
* @param assetOut amount to swap out
|
||||
* @param tfee trading fee in basis points
|
||||
* @return
|
||||
*/
|
||||
template <typename TIn, typename TOut>
|
||||
TIn
|
||||
swapAssetOut(
|
||||
TAmounts<TIn, TOut> const& pool,
|
||||
TOut const& assetOut,
|
||||
std::uint16_t tfee)
|
||||
{
|
||||
return toAmount<TIn>(
|
||||
getIssue(pool.in),
|
||||
((pool.in * pool.out) / (pool.out - assetOut) - pool.in) /
|
||||
feeMult(tfee),
|
||||
Number::rounding_mode::upward);
|
||||
}
|
||||
|
||||
/** Return square of n.
|
||||
*/
|
||||
Number
|
||||
square(Number const& n);
|
||||
|
||||
/** Adjust LP tokens to deposit/withdraw.
|
||||
* Amount type keeps 16 digits. Maintaining the LP balance by adding deposited
|
||||
* tokens or subtracting withdrawn LP tokens from LP balance results in
|
||||
* losing precision in LP balance. I.e. the resulting LP balance
|
||||
* is less than the actual sum of LP tokens. To adjust for this, subtract
|
||||
* old tokens balance from the new one for deposit or vice versa for withdraw
|
||||
* to cancel out the precision loss.
|
||||
* @param lptAMMBalance LPT AMM Balance
|
||||
* @param lpTokens LP tokens to deposit or withdraw
|
||||
* @param isDeposit true if deposit, false if withdraw
|
||||
*/
|
||||
STAmount
|
||||
adjustLPTokens(
|
||||
STAmount const& lptAMMBalance,
|
||||
STAmount const& lpTokens,
|
||||
bool isDeposit);
|
||||
|
||||
/** Calls adjustLPTokens() and adjusts deposit or withdraw amounts if
|
||||
* the adjusted LP tokens are less than the provided LP tokens.
|
||||
* @param amountBalance asset1 pool balance
|
||||
* @param amount asset1 to deposit or withdraw
|
||||
* @param amount2 asset2 to deposit or withdraw
|
||||
* @param lptAMMBalance LPT AMM Balance
|
||||
* @param lpTokens LP tokens to deposit or withdraw
|
||||
* @param tfee trading fee in basis points
|
||||
* @param isDeposit true if deposit, false if withdraw
|
||||
* @return
|
||||
*/
|
||||
std::tuple<STAmount, std::optional<STAmount>, STAmount>
|
||||
adjustAmountsByLPTokens(
|
||||
STAmount const& amountBalance,
|
||||
STAmount const& amount,
|
||||
std::optional<STAmount> const& amount2,
|
||||
STAmount const& lptAMMBalance,
|
||||
STAmount const& lpTokens,
|
||||
std::uint16_t tfee,
|
||||
bool isDeposit);
|
||||
|
||||
/** Positive solution for quadratic equation:
|
||||
* x = (-b + sqrt(b**2 + 4*a*c))/(2*a)
|
||||
*/
|
||||
Number
|
||||
solveQuadraticEq(Number const& a, Number const& b, Number const& c);
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
#endif // RIPPLE_APP_MISC_AMMHELPERS_H_INCLUDED
|
||||
@@ -1,118 +0,0 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2023 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
#ifndef RIPPLE_APP_MISC_AMMUTILS_H_INLCUDED
|
||||
#define RIPPLE_APP_MISC_AMMUTILS_H_INLCUDED
|
||||
|
||||
#include <ripple/basics/Expected.h>
|
||||
#include <ripple/beast/utility/Journal.h>
|
||||
#include <ripple/ledger/View.h>
|
||||
#include <ripple/protocol/STAmount.h>
|
||||
#include <ripple/protocol/STLedgerEntry.h>
|
||||
#include <ripple/protocol/TER.h>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
class ReadView;
|
||||
class ApplyView;
|
||||
class Sandbox;
|
||||
class NetClock;
|
||||
|
||||
/** Get AMM pool balances.
|
||||
*/
|
||||
std::pair<STAmount, STAmount>
|
||||
ammPoolHolds(
|
||||
ReadView const& view,
|
||||
AccountID const& ammAccountID,
|
||||
Issue const& issue1,
|
||||
Issue const& issue2,
|
||||
FreezeHandling freezeHandling,
|
||||
beast::Journal const j);
|
||||
|
||||
/** Get AMM pool and LP token balances. If both optIssue are
|
||||
* provided then they are used as the AMM token pair issues.
|
||||
* Otherwise the missing issues are fetched from ammSle.
|
||||
*/
|
||||
Expected<std::tuple<STAmount, STAmount, STAmount>, TER>
|
||||
ammHolds(
|
||||
ReadView const& view,
|
||||
SLE const& ammSle,
|
||||
std::optional<Issue> const& optIssue1,
|
||||
std::optional<Issue> const& optIssue2,
|
||||
FreezeHandling freezeHandling,
|
||||
beast::Journal const j);
|
||||
|
||||
/** Get the balance of LP tokens.
|
||||
*/
|
||||
STAmount
|
||||
ammLPHolds(
|
||||
ReadView const& view,
|
||||
Currency const& cur1,
|
||||
Currency const& cur2,
|
||||
AccountID const& ammAccount,
|
||||
AccountID const& lpAccount,
|
||||
beast::Journal const j);
|
||||
|
||||
STAmount
|
||||
ammLPHolds(
|
||||
ReadView const& view,
|
||||
SLE const& ammSle,
|
||||
AccountID const& lpAccount,
|
||||
beast::Journal const j);
|
||||
|
||||
/** Get AMM trading fee for the given account. The fee is discounted
|
||||
* if the account is the auction slot owner or one of the slot's authorized
|
||||
* accounts.
|
||||
*/
|
||||
std::uint16_t
|
||||
getTradingFee(
|
||||
ReadView const& view,
|
||||
SLE const& ammSle,
|
||||
AccountID const& account);
|
||||
|
||||
/** Returns total amount held by AMM for the given token.
|
||||
*/
|
||||
STAmount
|
||||
ammAccountHolds(
|
||||
ReadView const& view,
|
||||
AccountID const& ammAccountID,
|
||||
Issue const& issue);
|
||||
|
||||
/** Delete trustlines to AMM. If all trustlines are deleted then
|
||||
* AMM object and account are deleted. Otherwise tecIMPCOMPLETE is returned.
|
||||
*/
|
||||
TER
|
||||
deleteAMMAccount(
|
||||
Sandbox& view,
|
||||
Issue const& asset,
|
||||
Issue const& asset2,
|
||||
beast::Journal j);
|
||||
|
||||
/** Initialize Auction and Voting slots and set the trading/discounted fee.
|
||||
*/
|
||||
void
|
||||
initializeFeeAuctionVote(
|
||||
ApplyView& view,
|
||||
std::shared_ptr<SLE>& ammSle,
|
||||
AccountID const& account,
|
||||
Issue const& lptIssue,
|
||||
std::uint16_t tfee);
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
#endif // RIPPLE_APP_MISC_AMMUTILS_H_INLCUDED
|
||||
@@ -111,10 +111,6 @@ public:
|
||||
std::set<uint256> const& enabled,
|
||||
majorityAmendments_t const& majority) = 0;
|
||||
|
||||
// Called when the set of trusted validators changes.
|
||||
virtual void
|
||||
trustChanged(hash_set<PublicKey> const& allTrusted) = 0;
|
||||
|
||||
// Called by the consensus code when we need to
|
||||
// inject pseudo-transactions
|
||||
virtual std::map<uint256, std::uint32_t>
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
#include <ripple/app/misc/ValidatorList.h>
|
||||
#include <ripple/app/rdb/backend/SQLiteDatabase.h>
|
||||
#include <ripple/basics/UptimeClock.h>
|
||||
#include <ripple/basics/mulDiv.h>
|
||||
#include <ripple/ledger/CachedSLEs.h>
|
||||
#include <ripple/nodestore/Database.h>
|
||||
#include <ripple/nodestore/DatabaseShard.h>
|
||||
@@ -835,7 +834,7 @@ private:
|
||||
escalationMetrics.openLedgerFeeLevel,
|
||||
loadBaseServer,
|
||||
escalationMetrics.referenceFeeLevel)
|
||||
.value_or(muldiv_max);
|
||||
.second;
|
||||
|
||||
header->load_factor = std::max(
|
||||
safe_cast<std::uint64_t>(loadFactorServer),
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2023 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifndef RIPPLE_APP_MISC_DELIVERMAX_H_INCLUDED
|
||||
#define RIPPLE_APP_MISC_DELIVERMAX_H_INCLUDED
|
||||
|
||||
#include <ripple/protocol/TxFormats.h>
|
||||
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
|
||||
namespace Json {
|
||||
class Value;
|
||||
}
|
||||
|
||||
namespace ripple {
|
||||
|
||||
namespace RPC {
|
||||
|
||||
/**
|
||||
Copy `Amount` field to `DeliverMax` field in transaction output JSON.
|
||||
This only applies to Payment transaction type, all others are ignored.
|
||||
|
||||
When apiVersion > 1 will also remove `Amount` field, forcing users
|
||||
to access this value using new `DeliverMax` field only.
|
||||
@{
|
||||
*/
|
||||
|
||||
void
|
||||
insertDeliverMax(Json::Value& tx_json, TxType txnType, unsigned int apiVersion);
|
||||
|
||||
/** @} */
|
||||
|
||||
} // namespace RPC
|
||||
} // namespace ripple
|
||||
|
||||
#endif
|
||||
@@ -190,27 +190,11 @@ lower) fee to get into the same position as a reference transaction.
|
||||
|
||||
### Consensus Health
|
||||
|
||||
For consensus to be considered healthy, the peers on the network
|
||||
should largely remain in sync with one another. It is particularly
|
||||
important for the validators to remain in sync, because that is required
|
||||
for participation in consensus. However, the network tolerates some
|
||||
validators being out of sync. Fundamentally, network health is a
|
||||
function of validators reaching consensus on sets of recently submitted
|
||||
transactions.
|
||||
|
||||
Another factor to consider is
|
||||
the duration of the consensus process itself. This generally takes
|
||||
under 5 seconds on the main network under low volume. This is based on
|
||||
historical observations. However factors such as transaction volume
|
||||
can increase consensus duration. This is because rippled performs
|
||||
more work as transaction volume increases. Under sufficient load this
|
||||
tends to increase consensus duration. It's possible that relatively high
|
||||
consensus duration indicates a problem, but it is not appropriate to
|
||||
conclude so without investigation. The upper limit for consensus
|
||||
duration should be roughly 20 seconds. That is far above the normal.
|
||||
If the network takes this long to close ledgers, then it is almost
|
||||
certain that there is a problem with the network. This circumstance
|
||||
often coincides with new ledgers with zero transactions.
|
||||
For consensus to be considered healthy, the consensus process must take
|
||||
less than 5 seconds. This time limit was chosen based on observed past
|
||||
behavior of the network. Note that this is not necessarily the time between
|
||||
ledger closings, as consensus usually starts some amount of time after
|
||||
a ledger opens.
|
||||
|
||||
### Other Constants
|
||||
|
||||
|
||||
@@ -30,7 +30,6 @@
|
||||
#include <ripple/app/ledger/TransactionMaster.h>
|
||||
#include <ripple/app/main/LoadManager.h>
|
||||
#include <ripple/app/misc/AmendmentTable.h>
|
||||
#include <ripple/app/misc/DeliverMax.h>
|
||||
#include <ripple/app/misc/HashRouter.h>
|
||||
#include <ripple/app/misc/LoadFeeTrack.h>
|
||||
#include <ripple/app/misc/NetworkOPs.h>
|
||||
@@ -54,7 +53,6 @@
|
||||
#include <ripple/consensus/ConsensusParms.h>
|
||||
#include <ripple/crypto/RFC1751.h>
|
||||
#include <ripple/crypto/csprng.h>
|
||||
#include <ripple/json/MultivarJson.h>
|
||||
#include <ripple/json/to_string.h>
|
||||
#include <ripple/net/RPCErr.h>
|
||||
#include <ripple/nodestore/DatabaseShard.h>
|
||||
@@ -64,22 +62,17 @@
|
||||
#include <ripple/protocol/BuildInfo.h>
|
||||
#include <ripple/protocol/Feature.h>
|
||||
#include <ripple/protocol/STParsedJSON.h>
|
||||
#include <ripple/protocol/jss.h>
|
||||
#include <ripple/resource/Fees.h>
|
||||
#include <ripple/resource/ResourceManager.h>
|
||||
#include <ripple/rpc/BookChanges.h>
|
||||
#include <ripple/rpc/CTID.h>
|
||||
#include <ripple/rpc/DeliveredAmount.h>
|
||||
#include <ripple/rpc/ServerHandler.h>
|
||||
#include <ripple/rpc/impl/RPCHelpers.h>
|
||||
#include <ripple/rpc/impl/UDPInfoSub.h>
|
||||
#include <boost/asio/ip/host_name.hpp>
|
||||
#include <boost/asio/steady_timer.hpp>
|
||||
#include <exception>
|
||||
|
||||
#include <algorithm>
|
||||
#include <mutex>
|
||||
#include <optional>
|
||||
#include <set>
|
||||
#include <string>
|
||||
#include <tuple>
|
||||
@@ -206,7 +199,7 @@ public:
|
||||
strOperatingMode(bool const admin = false) const override;
|
||||
|
||||
StateAccounting::CounterData
|
||||
getStateAccountingData() override;
|
||||
getStateAccountingData();
|
||||
|
||||
//
|
||||
// Transaction operations.
|
||||
@@ -555,25 +548,22 @@ private:
|
||||
void
|
||||
processClusterTimer();
|
||||
|
||||
MultiApiJson
|
||||
Json::Value
|
||||
transJson(
|
||||
std::shared_ptr<STTx const> const& transaction,
|
||||
const STTx& transaction,
|
||||
TER result,
|
||||
bool validated,
|
||||
std::shared_ptr<ReadView const> const& ledger,
|
||||
std::optional<std::reference_wrapper<TxMeta const>> meta);
|
||||
std::shared_ptr<ReadView const> const& ledger);
|
||||
|
||||
void
|
||||
pubValidatedTransaction(
|
||||
std::shared_ptr<ReadView const> const& ledger,
|
||||
AcceptedLedgerTx const& transaction,
|
||||
bool last);
|
||||
AcceptedLedgerTx const& transaction);
|
||||
|
||||
void
|
||||
pubAccountTransaction(
|
||||
std::shared_ptr<ReadView const> const& ledger,
|
||||
AcceptedLedgerTx const& transaction,
|
||||
bool last);
|
||||
AcceptedLedgerTx const& transaction);
|
||||
|
||||
void
|
||||
pubProposedAccountTransaction(
|
||||
@@ -693,10 +683,11 @@ private:
|
||||
sPeerStatus, // Peer status changes.
|
||||
sConsensusPhase, // Consensus phase
|
||||
sBookChanges, // Per-ledger order book changes
|
||||
sLastEntry // Any new entry must be ADDED ABOVE this one
|
||||
};
|
||||
|
||||
std::array<SubMapType, SubTypes::sLastEntry> mStreamMaps;
|
||||
sLastEntry = sBookChanges // as this name implies, any new entry
|
||||
// must be ADDED ABOVE this one
|
||||
};
|
||||
std::array<SubMapType, SubTypes::sLastEntry + 1> mStreamMaps;
|
||||
|
||||
ServerFeeSummary mLastFeeSummary;
|
||||
|
||||
@@ -720,10 +711,10 @@ private:
|
||||
std::mutex validationsMutex_;
|
||||
|
||||
RCLConsensus&
|
||||
getConsensus() override;
|
||||
getConsensus();
|
||||
|
||||
LedgerMaster&
|
||||
getLedgerMaster() override;
|
||||
getLedgerMaster();
|
||||
|
||||
private:
|
||||
struct Stats
|
||||
@@ -1840,12 +1831,7 @@ NetworkOPsImp::beginConsensus(uint256 const& networkClosed)
|
||||
app_.getHashRouter());
|
||||
|
||||
if (!changes.added.empty() || !changes.removed.empty())
|
||||
{
|
||||
app_.getValidations().trustChanged(changes.added, changes.removed);
|
||||
// Update the AmendmentTable so it tracks the current validators.
|
||||
app_.getAmendmentTable().trustChanged(
|
||||
app_.validators().getQuorumKeys().second);
|
||||
}
|
||||
|
||||
mConsensus.startRound(
|
||||
app_.timeKeeper().closeTime(),
|
||||
@@ -2066,7 +2052,7 @@ NetworkOPsImp::pubServer()
|
||||
f.em->openLedgerFeeLevel,
|
||||
f.loadBaseServer,
|
||||
f.em->referenceFeeLevel)
|
||||
.value_or(ripple::muldiv_max));
|
||||
.second);
|
||||
|
||||
jvObj[jss::load_factor] = trunc32(loadFactor);
|
||||
jvObj[jss::load_factor_fee_escalation] =
|
||||
@@ -2166,10 +2152,8 @@ NetworkOPsImp::pubValidation(std::shared_ptr<STValidation> const& val)
|
||||
if (masterKey != signerPublic)
|
||||
jvObj[jss::master_key] = toBase58(TokenType::NodePublic, masterKey);
|
||||
|
||||
// NOTE *seq is a number, but old API versions used string. We replace
|
||||
// number with a string using MultiApiJson near end of this function
|
||||
if (auto const seq = (*val)[~sfLedgerSequence])
|
||||
jvObj[jss::ledger_index] = *seq;
|
||||
jvObj[jss::ledger_index] = to_string(*seq);
|
||||
|
||||
if (val->isFieldPresent(sfAmendments))
|
||||
{
|
||||
@@ -2207,28 +2191,12 @@ NetworkOPsImp::pubValidation(std::shared_ptr<STValidation> const& val)
|
||||
reserveIncXRP && reserveIncXRP->native())
|
||||
jvObj[jss::reserve_inc] = reserveIncXRP->xrp().jsonClipped();
|
||||
|
||||
// NOTE Use MultiApiJson to publish two slightly different JSON objects
|
||||
// for consumers supporting different API versions
|
||||
MultiApiJson multiObj{jvObj};
|
||||
visit<RPC::apiMinimumSupportedVersion, RPC::apiMaximumValidVersion>(
|
||||
multiObj, //
|
||||
[](Json::Value& jvTx, unsigned int apiVersion) {
|
||||
// Type conversion for older API versions to string
|
||||
if (jvTx.isMember(jss::ledger_index) && apiVersion < 2)
|
||||
{
|
||||
jvTx[jss::ledger_index] =
|
||||
std::to_string(jvTx[jss::ledger_index].asUInt());
|
||||
}
|
||||
});
|
||||
|
||||
for (auto i = mStreamMaps[sValidations].begin();
|
||||
i != mStreamMaps[sValidations].end();)
|
||||
{
|
||||
if (auto p = i->second.lock())
|
||||
{
|
||||
p->send(
|
||||
multiObj.select(apiVersionSelector(p->getApiVersion())),
|
||||
true);
|
||||
p->send(jvObj, true);
|
||||
++i;
|
||||
}
|
||||
else
|
||||
@@ -2578,7 +2546,7 @@ NetworkOPsImp::getServerInfo(bool human, bool admin, bool counters)
|
||||
escalationMetrics.openLedgerFeeLevel,
|
||||
loadBaseServer,
|
||||
escalationMetrics.referenceFeeLevel)
|
||||
.value_or(ripple::muldiv_max);
|
||||
.second;
|
||||
|
||||
auto const loadFactor = std::max(
|
||||
safe_cast<std::uint64_t>(loadFactorServer),
|
||||
@@ -2678,10 +2646,13 @@ NetworkOPsImp::getServerInfo(bool human, bool admin, bool counters)
|
||||
l[jss::reserve_inc_native] =
|
||||
lpClosed->fees().increment.decimalXRP();
|
||||
|
||||
if (auto const closeOffset = app_.timeKeeper().closeOffset();
|
||||
std::abs(closeOffset.count()) >= 60)
|
||||
l[jss::close_time_offset] =
|
||||
static_cast<std::uint32_t>(closeOffset.count());
|
||||
auto const nowOffset = app_.timeKeeper().nowOffset();
|
||||
if (std::abs(nowOffset.count()) >= 60)
|
||||
l[jss::system_time_offset] = nowOffset.count();
|
||||
|
||||
auto const closeOffset = app_.timeKeeper().closeOffset();
|
||||
if (std::abs(closeOffset.count()) >= 60)
|
||||
l[jss::close_time_offset] = closeOffset.count();
|
||||
|
||||
#if RIPPLED_REPORTING
|
||||
std::int64_t const dbAge =
|
||||
@@ -2738,51 +2709,6 @@ NetworkOPsImp::getServerInfo(bool human, bool admin, bool counters)
|
||||
info["reporting"] = app_.getReportingETL().getInfo();
|
||||
}
|
||||
|
||||
// This array must be sorted in increasing order.
|
||||
static constexpr std::array<std::string_view, 7> protocols{
|
||||
"http", "https", "peer", "ws", "ws2", "wss", "wss2"};
|
||||
static_assert(std::is_sorted(std::begin(protocols), std::end(protocols)));
|
||||
{
|
||||
Json::Value ports{Json::arrayValue};
|
||||
for (auto const& port : app_.getServerHandler().setup().ports)
|
||||
{
|
||||
// Don't publish admin ports for non-admin users
|
||||
if (!admin &&
|
||||
!(port.admin_nets_v4.empty() && port.admin_nets_v6.empty() &&
|
||||
port.admin_user.empty() && port.admin_password.empty()))
|
||||
continue;
|
||||
std::vector<std::string> proto;
|
||||
std::set_intersection(
|
||||
std::begin(port.protocol),
|
||||
std::end(port.protocol),
|
||||
std::begin(protocols),
|
||||
std::end(protocols),
|
||||
std::back_inserter(proto));
|
||||
if (!proto.empty())
|
||||
{
|
||||
auto& jv = ports.append(Json::Value(Json::objectValue));
|
||||
jv[jss::port] = std::to_string(port.port);
|
||||
jv[jss::protocol] = Json::Value{Json::arrayValue};
|
||||
for (auto const& p : proto)
|
||||
jv[jss::protocol].append(p);
|
||||
}
|
||||
}
|
||||
|
||||
if (app_.config().exists(SECTION_PORT_GRPC))
|
||||
{
|
||||
auto const& grpcSection = app_.config().section(SECTION_PORT_GRPC);
|
||||
auto const optPort = grpcSection.get("port");
|
||||
if (optPort && grpcSection.get("ip"))
|
||||
{
|
||||
auto& jv = ports.append(Json::Value(Json::objectValue));
|
||||
jv[jss::port] = *optPort;
|
||||
jv[jss::protocol] = Json::Value{Json::arrayValue};
|
||||
jv[jss::protocol].append("grpc");
|
||||
}
|
||||
}
|
||||
info[jss::ports] = std::move(ports);
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
@@ -2808,8 +2734,7 @@ NetworkOPsImp::pubProposedTransaction(
|
||||
if (hook::isEmittedTxn(*transaction))
|
||||
return;
|
||||
|
||||
MultiApiJson jvObj =
|
||||
transJson(transaction, result, false, ledger, std::nullopt);
|
||||
Json::Value jvObj = transJson(*transaction, result, false, ledger);
|
||||
|
||||
{
|
||||
std::lock_guard sl(mSubLock);
|
||||
@@ -2821,8 +2746,7 @@ NetworkOPsImp::pubProposedTransaction(
|
||||
|
||||
if (p)
|
||||
{
|
||||
p->send(
|
||||
jvObj.select(apiVersionSelector(p->getApiVersion())), true);
|
||||
p->send(jvObj, true);
|
||||
++it;
|
||||
}
|
||||
else
|
||||
@@ -3100,8 +3024,7 @@ NetworkOPsImp::pubLedger(std::shared_ptr<ReadView const> const& lpAccepted)
|
||||
for (auto const& accTx : *alpAccepted)
|
||||
{
|
||||
JLOG(m_journal.trace()) << "pubAccepted: " << accTx->getJson();
|
||||
pubValidatedTransaction(
|
||||
lpAccepted, *accTx, accTx == *(--alpAccepted->end()));
|
||||
pubValidatedTransaction(lpAccepted, *accTx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3147,13 +3070,12 @@ NetworkOPsImp::getLocalTxCount()
|
||||
|
||||
// This routine should only be used to publish accepted or validated
|
||||
// transactions.
|
||||
MultiApiJson
|
||||
Json::Value
|
||||
NetworkOPsImp::transJson(
|
||||
std::shared_ptr<STTx const> const& transaction,
|
||||
const STTx& transaction,
|
||||
TER result,
|
||||
bool validated,
|
||||
std::shared_ptr<ReadView const> const& ledger,
|
||||
std::optional<std::reference_wrapper<TxMeta const>> meta)
|
||||
std::shared_ptr<ReadView const> const& ledger)
|
||||
{
|
||||
Json::Value jvObj(Json::objectValue);
|
||||
std::string sToken;
|
||||
@@ -3162,27 +3084,16 @@ NetworkOPsImp::transJson(
|
||||
transResultInfo(result, sToken, sHuman);
|
||||
|
||||
jvObj[jss::type] = "transaction";
|
||||
// NOTE jvObj is not a finished object for either API version. After
|
||||
// it's populated, we need to finish it for a specific API version. This is
|
||||
// done in a loop, near the end of this function.
|
||||
jvObj[jss::transaction] =
|
||||
transaction->getJson(JsonOptions::disable_API_prior_V2, false);
|
||||
|
||||
if (meta)
|
||||
{
|
||||
jvObj[jss::meta] = meta->get().getJson(JsonOptions::none);
|
||||
RPC::insertDeliveredAmount(
|
||||
jvObj[jss::meta], *ledger, transaction, meta->get());
|
||||
}
|
||||
jvObj[jss::transaction] = transaction.getJson(JsonOptions::none);
|
||||
|
||||
// add CTID where the needed data for it exists
|
||||
if (auto const& lookup = ledger->txRead(transaction->getTransactionID());
|
||||
if (auto const& lookup = ledger->txRead(transaction.getTransactionID());
|
||||
lookup.second && lookup.second->isFieldPresent(sfTransactionIndex))
|
||||
{
|
||||
uint32_t txnSeq = lookup.second->getFieldU32(sfTransactionIndex);
|
||||
uint32_t netID = app_.config().NETWORK_ID;
|
||||
if (transaction->isFieldPresent(sfNetworkID))
|
||||
netID = transaction->getFieldU32(sfNetworkID);
|
||||
if (transaction.isFieldPresent(sfNetworkID))
|
||||
netID = transaction.getFieldU32(sfNetworkID);
|
||||
|
||||
if (txnSeq <= 0xFFFFU && netID < 0xFFFFU &&
|
||||
ledger->info().seq < 0xFFFFFFFUL)
|
||||
@@ -3194,16 +3105,13 @@ NetworkOPsImp::transJson(
|
||||
}
|
||||
}
|
||||
|
||||
if (!ledger->open())
|
||||
jvObj[jss::ledger_hash] = to_string(ledger->info().hash);
|
||||
|
||||
if (validated)
|
||||
{
|
||||
jvObj[jss::ledger_index] = ledger->info().seq;
|
||||
jvObj[jss::ledger_hash] = to_string(ledger->info().hash);
|
||||
jvObj[jss::transaction][jss::date] =
|
||||
ledger->info().closeTime.time_since_epoch().count();
|
||||
jvObj[jss::validated] = true;
|
||||
jvObj[jss::close_time_iso] = to_string_iso(ledger->info().closeTime);
|
||||
|
||||
// WRITEME: Put the account next seq here
|
||||
}
|
||||
@@ -3218,10 +3126,10 @@ NetworkOPsImp::transJson(
|
||||
jvObj[jss::engine_result_code] = result;
|
||||
jvObj[jss::engine_result_message] = sHuman;
|
||||
|
||||
if (transaction->getTxnType() == ttOFFER_CREATE)
|
||||
if (transaction.getTxnType() == ttOFFER_CREATE)
|
||||
{
|
||||
auto const account = transaction->getAccountID(sfAccount);
|
||||
auto const amount = transaction->getFieldAmount(sfTakerGets);
|
||||
auto const account = transaction.getAccountID(sfAccount);
|
||||
auto const amount = transaction.getFieldAmount(sfTakerGets);
|
||||
|
||||
// If the offer create is not self funded then add the owner balance
|
||||
if (account != amount.issue().account)
|
||||
@@ -3236,40 +3144,24 @@ NetworkOPsImp::transJson(
|
||||
}
|
||||
}
|
||||
|
||||
std::string const hash = to_string(transaction->getTransactionID());
|
||||
MultiApiJson multiObj{jvObj};
|
||||
visit<RPC::apiMinimumSupportedVersion, RPC::apiMaximumValidVersion>(
|
||||
multiObj, //
|
||||
[&](Json::Value& jvTx, unsigned int apiVersion) {
|
||||
RPC::insertDeliverMax(
|
||||
jvTx[jss::transaction], transaction->getTxnType(), apiVersion);
|
||||
|
||||
if (apiVersion > 1)
|
||||
{
|
||||
jvTx[jss::tx_json] = jvTx.removeMember(jss::transaction);
|
||||
jvTx[jss::hash] = hash;
|
||||
}
|
||||
else
|
||||
{
|
||||
jvTx[jss::transaction][jss::hash] = hash;
|
||||
}
|
||||
});
|
||||
|
||||
return multiObj;
|
||||
return jvObj;
|
||||
}
|
||||
|
||||
void
|
||||
NetworkOPsImp::pubValidatedTransaction(
|
||||
std::shared_ptr<ReadView const> const& ledger,
|
||||
const AcceptedLedgerTx& transaction,
|
||||
bool last)
|
||||
const AcceptedLedgerTx& transaction)
|
||||
{
|
||||
auto const& stTxn = transaction.getTxn();
|
||||
|
||||
// Create two different Json objects, for different API versions
|
||||
auto const metaRef = std::ref(transaction.getMeta());
|
||||
auto const trResult = transaction.getResult();
|
||||
MultiApiJson jvObj = transJson(stTxn, trResult, true, ledger, metaRef);
|
||||
Json::Value jvObj =
|
||||
transJson(*stTxn, transaction.getResult(), true, ledger);
|
||||
|
||||
{
|
||||
auto const& meta = transaction.getMeta();
|
||||
jvObj[jss::meta] = meta.getJson(JsonOptions::none);
|
||||
RPC::insertDeliveredAmount(jvObj[jss::meta], *ledger, stTxn, meta);
|
||||
}
|
||||
|
||||
{
|
||||
std::lock_guard sl(mSubLock);
|
||||
@@ -3281,8 +3173,7 @@ NetworkOPsImp::pubValidatedTransaction(
|
||||
|
||||
if (p)
|
||||
{
|
||||
p->send(
|
||||
jvObj.select(apiVersionSelector(p->getApiVersion())), true);
|
||||
p->send(jvObj, true);
|
||||
++it;
|
||||
}
|
||||
else
|
||||
@@ -3297,8 +3188,7 @@ NetworkOPsImp::pubValidatedTransaction(
|
||||
|
||||
if (p)
|
||||
{
|
||||
p->send(
|
||||
jvObj.select(apiVersionSelector(p->getApiVersion())), true);
|
||||
p->send(jvObj, true);
|
||||
++it;
|
||||
}
|
||||
else
|
||||
@@ -3309,14 +3199,13 @@ NetworkOPsImp::pubValidatedTransaction(
|
||||
if (isTesSuccess(transaction.getResult()))
|
||||
app_.getOrderBookDB().processTxn(ledger, transaction, jvObj);
|
||||
|
||||
pubAccountTransaction(ledger, transaction, last);
|
||||
pubAccountTransaction(ledger, transaction);
|
||||
}
|
||||
|
||||
void
|
||||
NetworkOPsImp::pubAccountTransaction(
|
||||
std::shared_ptr<ReadView const> const& ledger,
|
||||
AcceptedLedgerTx const& transaction,
|
||||
bool last)
|
||||
AcceptedLedgerTx const& transaction)
|
||||
{
|
||||
hash_set<InfoSub::pointer> notify;
|
||||
int iProposed = 0;
|
||||
@@ -3411,35 +3300,27 @@ NetworkOPsImp::pubAccountTransaction(
|
||||
{
|
||||
auto const& stTxn = transaction.getTxn();
|
||||
|
||||
// Create two different Json objects, for different API versions
|
||||
auto const metaRef = std::ref(transaction.getMeta());
|
||||
auto const trResult = transaction.getResult();
|
||||
MultiApiJson jvObj = transJson(stTxn, trResult, true, ledger, metaRef);
|
||||
Json::Value jvObj =
|
||||
transJson(*stTxn, transaction.getResult(), true, ledger);
|
||||
|
||||
for (InfoSub::ref isrListener : notify)
|
||||
{
|
||||
isrListener->send(
|
||||
jvObj.select(apiVersionSelector(isrListener->getApiVersion())),
|
||||
true);
|
||||
auto const& meta = transaction.getMeta();
|
||||
|
||||
jvObj[jss::meta] = meta.getJson(JsonOptions::none);
|
||||
RPC::insertDeliveredAmount(jvObj[jss::meta], *ledger, stTxn, meta);
|
||||
}
|
||||
|
||||
if (last)
|
||||
jvObj.set(jss::account_history_boundary, true);
|
||||
for (InfoSub::ref isrListener : notify)
|
||||
isrListener->send(jvObj, true);
|
||||
|
||||
assert(
|
||||
jvObj.isMember(jss::account_history_tx_stream) ==
|
||||
MultiApiJson::none);
|
||||
assert(!jvObj.isMember(jss::account_history_tx_stream));
|
||||
for (auto& info : accountHistoryNotify)
|
||||
{
|
||||
auto& index = info.index_;
|
||||
if (index->forwardTxIndex_ == 0 && !index->haveHistorical_)
|
||||
jvObj.set(jss::account_history_tx_first, true);
|
||||
|
||||
jvObj.set(jss::account_history_tx_index, index->forwardTxIndex_++);
|
||||
|
||||
info.sink_->send(
|
||||
jvObj.select(apiVersionSelector(info.sink_->getApiVersion())),
|
||||
true);
|
||||
jvObj[jss::account_history_tx_first] = true;
|
||||
jvObj[jss::account_history_tx_index] = index->forwardTxIndex_++;
|
||||
info.sink_->send(jvObj, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3493,26 +3374,19 @@ NetworkOPsImp::pubProposedAccountTransaction(
|
||||
|
||||
if (!notify.empty() || !accountHistoryNotify.empty())
|
||||
{
|
||||
// Create two different Json objects, for different API versions
|
||||
MultiApiJson jvObj = transJson(tx, result, false, ledger, std::nullopt);
|
||||
Json::Value jvObj = transJson(*tx, result, false, ledger);
|
||||
|
||||
for (InfoSub::ref isrListener : notify)
|
||||
isrListener->send(
|
||||
jvObj.select(apiVersionSelector(isrListener->getApiVersion())),
|
||||
true);
|
||||
isrListener->send(jvObj, true);
|
||||
|
||||
assert(
|
||||
jvObj.isMember(jss::account_history_tx_stream) ==
|
||||
MultiApiJson::none);
|
||||
assert(!jvObj.isMember(jss::account_history_tx_stream));
|
||||
for (auto& info : accountHistoryNotify)
|
||||
{
|
||||
auto& index = info.index_;
|
||||
if (index->forwardTxIndex_ == 0 && !index->haveHistorical_)
|
||||
jvObj.set(jss::account_history_tx_first, true);
|
||||
jvObj.set(jss::account_history_tx_index, index->forwardTxIndex_++);
|
||||
info.sink_->send(
|
||||
jvObj.select(apiVersionSelector(info.sink_->getApiVersion())),
|
||||
true);
|
||||
jvObj[jss::account_history_tx_first] = true;
|
||||
jvObj[jss::account_history_tx_index] = index->forwardTxIndex_++;
|
||||
info.sink_->send(jvObj, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3703,7 +3577,7 @@ NetworkOPsImp::addAccountHistoryJob(SubAccountHistoryInfoWeak subInfo)
|
||||
|
||||
auto send = [&](Json::Value const& jvObj,
|
||||
bool unsubscribe) -> bool {
|
||||
if (auto sptr = subInfo.sinkWptr_.lock())
|
||||
if (auto sptr = subInfo.sinkWptr_.lock(); sptr)
|
||||
{
|
||||
sptr->send(jvObj, true);
|
||||
if (unsubscribe)
|
||||
@@ -3714,22 +3588,6 @@ NetworkOPsImp::addAccountHistoryJob(SubAccountHistoryInfoWeak subInfo)
|
||||
return false;
|
||||
};
|
||||
|
||||
auto sendMultiApiJson = [&](MultiApiJson const& jvObj,
|
||||
bool unsubscribe) -> bool {
|
||||
if (auto sptr = subInfo.sinkWptr_.lock())
|
||||
{
|
||||
sptr->send(
|
||||
jvObj.select(apiVersionSelector(sptr->getApiVersion())),
|
||||
true);
|
||||
|
||||
if (unsubscribe)
|
||||
unsubAccountHistory(sptr, accountId, false);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
auto getMoreTxns =
|
||||
[&](std::uint32_t minLedger,
|
||||
std::uint32_t maxLedger,
|
||||
@@ -3853,11 +3711,8 @@ NetworkOPsImp::addAccountHistoryJob(SubAccountHistoryInfoWeak subInfo)
|
||||
|
||||
auto const& txns = dbResult->first;
|
||||
marker = dbResult->second;
|
||||
size_t num_txns = txns.size();
|
||||
for (size_t i = 0; i < num_txns; ++i)
|
||||
for (auto const& [tx, meta] : txns)
|
||||
{
|
||||
auto const& [tx, meta] = txns[i];
|
||||
|
||||
if (!tx || !meta)
|
||||
{
|
||||
JLOG(m_journal.debug())
|
||||
@@ -3888,22 +3743,16 @@ NetworkOPsImp::addAccountHistoryJob(SubAccountHistoryInfoWeak subInfo)
|
||||
send(rpcError(rpcINTERNAL), true);
|
||||
return;
|
||||
}
|
||||
|
||||
auto const mRef = std::ref(*meta);
|
||||
auto const trR = meta->getResultTER();
|
||||
MultiApiJson jvTx =
|
||||
transJson(stTxn, trR, true, curTxLedger, mRef);
|
||||
|
||||
jvTx.set(
|
||||
jss::account_history_tx_index, txHistoryIndex--);
|
||||
if (i + 1 == num_txns ||
|
||||
txns[i + 1].first->getLedger() != tx->getLedger())
|
||||
jvTx.set(jss::account_history_boundary, true);
|
||||
|
||||
Json::Value jvTx = transJson(
|
||||
*stTxn, meta->getResultTER(), true, curTxLedger);
|
||||
jvTx[jss::meta] = meta->getJson(JsonOptions::none);
|
||||
jvTx[jss::account_history_tx_index] = txHistoryIndex--;
|
||||
RPC::insertDeliveredAmount(
|
||||
jvTx[jss::meta], *curTxLedger, stTxn, *meta);
|
||||
if (isFirstTx(tx, meta))
|
||||
{
|
||||
jvTx.set(jss::account_history_tx_first, true);
|
||||
sendMultiApiJson(jvTx, false);
|
||||
jvTx[jss::account_history_tx_first] = true;
|
||||
send(jvTx, false);
|
||||
|
||||
JLOG(m_journal.trace())
|
||||
<< "AccountHistory job for account "
|
||||
@@ -3913,7 +3762,7 @@ NetworkOPsImp::addAccountHistoryJob(SubAccountHistoryInfoWeak subInfo)
|
||||
}
|
||||
else
|
||||
{
|
||||
sendMultiApiJson(jvTx, false);
|
||||
send(jvTx, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -296,6 +296,8 @@ SHAMapStoreImp::run()
|
||||
fullBelowCache_ = &(*app_.getNodeFamily().getFullBelowCache(0));
|
||||
treeNodeCache_ = &(*app_.getNodeFamily().getTreeNodeCache(0));
|
||||
|
||||
bool const isMem = app_.config().mem_backend();
|
||||
|
||||
if (advisoryDelete_)
|
||||
canDelete_ = state_db_.getCanDelete();
|
||||
|
||||
|
||||
@@ -24,11 +24,10 @@
|
||||
#include <ripple/beast/utility/Journal.h>
|
||||
#include <ripple/protocol/ErrorCodes.h>
|
||||
#include <ripple/protocol/Protocol.h>
|
||||
#include <ripple/protocol/STBase.h>
|
||||
#include <ripple/protocol/STTx.h>
|
||||
#include <ripple/protocol/TER.h>
|
||||
#include <ripple/protocol/TxMeta.h>
|
||||
|
||||
#include <boost/optional.hpp>
|
||||
#include <optional>
|
||||
#include <variant>
|
||||
|
||||
@@ -100,13 +99,13 @@ public:
|
||||
LedgerIndex
|
||||
getLedger() const
|
||||
{
|
||||
return mLedgerIndex;
|
||||
return mInLedger;
|
||||
}
|
||||
|
||||
bool
|
||||
isValidated() const
|
||||
{
|
||||
return mLedgerIndex != 0;
|
||||
return mInLedger != 0;
|
||||
}
|
||||
|
||||
TransStatus
|
||||
@@ -143,7 +142,7 @@ public:
|
||||
void
|
||||
setLedger(LedgerIndex ledger)
|
||||
{
|
||||
mLedgerIndex = ledger;
|
||||
mInLedger = ledger;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -391,7 +390,7 @@ private:
|
||||
|
||||
uint256 mTransactionID;
|
||||
|
||||
LedgerIndex mLedgerIndex = 0;
|
||||
LedgerIndex mInLedger = 0;
|
||||
std::optional<uint32_t> mTxnSeq;
|
||||
std::optional<uint16_t> mNetworkID;
|
||||
|
||||
|
||||
@@ -504,7 +504,7 @@ private:
|
||||
@param seriesSize Total number of transactions in the series to be
|
||||
processed.
|
||||
|
||||
@return A `std::pair` indicating
|
||||
@return A `std::pair` as returned from @ref `mulDiv` indicating
|
||||
whether the calculation result overflows.
|
||||
*/
|
||||
static std::pair<bool, FeeLevel64>
|
||||
@@ -875,15 +875,20 @@ template <class T>
|
||||
XRPAmount
|
||||
toDrops(FeeLevel<T> const& level, XRPAmount baseFee)
|
||||
{
|
||||
return mulDiv(level, baseFee, TxQ::baseLevel)
|
||||
.value_or(XRPAmount(STAmount::cMaxNativeN));
|
||||
if (auto const drops = mulDiv(level, baseFee, TxQ::baseLevel); drops.first)
|
||||
return drops.second;
|
||||
|
||||
return XRPAmount(STAmount::cMaxNativeN);
|
||||
}
|
||||
|
||||
inline FeeLevel64
|
||||
toFeeLevel(XRPAmount const& drops, XRPAmount const& baseFee)
|
||||
{
|
||||
return mulDiv(drops, TxQ::baseLevel, baseFee)
|
||||
.value_or(FeeLevel64(std::numeric_limits<std::uint64_t>::max()));
|
||||
if (auto const feeLevel = mulDiv(drops, TxQ::baseLevel, baseFee);
|
||||
feeLevel.first)
|
||||
return feeLevel.second;
|
||||
|
||||
return FeeLevel64(std::numeric_limits<std::uint64_t>::max());
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -20,10 +20,6 @@
|
||||
#ifndef RIPPLE_APP_MISC_DETAIL_WORK_H_INCLUDED
|
||||
#define RIPPLE_APP_MISC_DETAIL_WORK_H_INCLUDED
|
||||
|
||||
// TODO: This include <cstdint> is a workaround for beast compilation bug.
|
||||
// Remove when fix https://github.com/boostorg/beast/pull/2682/ is available.
|
||||
#include <cstdint>
|
||||
|
||||
#include <boost/beast/http/message.hpp>
|
||||
#include <boost/beast/http/string_body.hpp>
|
||||
|
||||
|
||||
@@ -1,206 +0,0 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2023 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#include <ripple/app/misc/AMMHelpers.h>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
STAmount
|
||||
ammLPTokens(
|
||||
STAmount const& asset1,
|
||||
STAmount const& asset2,
|
||||
Issue const& lptIssue)
|
||||
{
|
||||
auto const tokens = root2(asset1 * asset2);
|
||||
return toSTAmount(lptIssue, tokens);
|
||||
}
|
||||
|
||||
/*
|
||||
* Equation 3:
|
||||
* t = T * [(b/B - (sqrt(f2**2 - b/(B*f1)) - f2)) /
|
||||
* (1 + sqrt(f2**2 - b/(B*f1)) - f2)]
|
||||
* where f1 = 1 - tfee, f2 = (1 - tfee/2)/f1
|
||||
*/
|
||||
STAmount
|
||||
lpTokensIn(
|
||||
STAmount const& asset1Balance,
|
||||
STAmount const& asset1Deposit,
|
||||
STAmount const& lptAMMBalance,
|
||||
std::uint16_t tfee)
|
||||
{
|
||||
auto const f1 = feeMult(tfee);
|
||||
auto const f2 = feeMultHalf(tfee) / f1;
|
||||
Number const r = asset1Deposit / asset1Balance;
|
||||
auto const c = root2(f2 * f2 + r / f1) - f2;
|
||||
auto const t = lptAMMBalance * (r - c) / (1 + c);
|
||||
return toSTAmount(lptAMMBalance.issue(), t);
|
||||
}
|
||||
|
||||
/* Equation 4 solves equation 3 for b:
|
||||
* Let f1 = 1 - tfee, f2 = (1 - tfee/2)/f1, t1 = t/T, t2 = 1 + t1, R = b/B
|
||||
* then
|
||||
* t1 = [R - sqrt(f2**2 + R/f1) + f2] / [1 + sqrt(f2**2 + R/f1] - f2] =>
|
||||
* sqrt(f2**2 + R/f1)*(t1 + 1) = R + f2 + t1*f2 - t1 =>
|
||||
* sqrt(f2**2 + R/f1)*t2 = R + t2*f2 - t1 =>
|
||||
* sqrt(f2**2 + R/f1) = R/t2 + f2 - t1/t2, let d = f2 - t1/t2 =>
|
||||
* sqrt(f2**2 + R/f1) = R/t2 + d =>
|
||||
* f2**2 + R/f1 = (R/t2)**2 +2*d*R/t2 + d**2 =>
|
||||
* (R/t2)**2 + R*(2*d/t2 - 1/f1) + d**2 - f2**2 = 0
|
||||
*/
|
||||
STAmount
|
||||
ammAssetIn(
|
||||
STAmount const& asset1Balance,
|
||||
STAmount const& lptAMMBalance,
|
||||
STAmount const& lpTokens,
|
||||
std::uint16_t tfee)
|
||||
{
|
||||
auto const f1 = feeMult(tfee);
|
||||
auto const f2 = feeMultHalf(tfee) / f1;
|
||||
auto const t1 = lpTokens / lptAMMBalance;
|
||||
auto const t2 = 1 + t1;
|
||||
auto const d = f2 - t1 / t2;
|
||||
auto const a = 1 / (t2 * t2);
|
||||
auto const b = 2 * d / t2 - 1 / f1;
|
||||
auto const c = d * d - f2 * f2;
|
||||
return toSTAmount(
|
||||
asset1Balance.issue(), asset1Balance * solveQuadraticEq(a, b, c));
|
||||
}
|
||||
|
||||
/* Equation 7:
|
||||
* t = T * (c - sqrt(c**2 - 4*R))/2
|
||||
* where R = b/B, c = R*fee + 2 - fee
|
||||
*/
|
||||
STAmount
|
||||
lpTokensOut(
|
||||
STAmount const& asset1Balance,
|
||||
STAmount const& asset1Withdraw,
|
||||
STAmount const& lptAMMBalance,
|
||||
std::uint16_t tfee)
|
||||
{
|
||||
Number const fr = asset1Withdraw / asset1Balance;
|
||||
auto const f1 = getFee(tfee);
|
||||
auto const c = fr * f1 + 2 - f1;
|
||||
auto const t = lptAMMBalance * (c - root2(c * c - 4 * fr)) / 2;
|
||||
return toSTAmount(lptAMMBalance.issue(), t);
|
||||
}
|
||||
|
||||
/* Equation 8 solves equation 7 for b:
|
||||
* c - 2*t/T = sqrt(c**2 - 4*R) =>
|
||||
* c**2 - 4*c*t/T + 4*t**2/T**2 = c**2 - 4*R =>
|
||||
* -4*c*t/T + 4*t**2/T**2 = -4*R =>
|
||||
* -c*t/T + t**2/T**2 = -R -=>
|
||||
* substitute c = R*f + 2 - f =>
|
||||
* -(t/T)*(R*f + 2 - f) + (t/T)**2 = -R, let t1 = t/T =>
|
||||
* -t1*R*f -2*t1 +t1*f +t1**2 = -R =>
|
||||
* R = (t1**2 + t1*(f - 2)) / (t1*f - 1)
|
||||
*/
|
||||
STAmount
|
||||
withdrawByTokens(
|
||||
STAmount const& assetBalance,
|
||||
STAmount const& lptAMMBalance,
|
||||
STAmount const& lpTokens,
|
||||
std::uint16_t tfee)
|
||||
{
|
||||
auto const f = getFee(tfee);
|
||||
Number const t1 = lpTokens / lptAMMBalance;
|
||||
auto const b = assetBalance * (t1 * t1 - t1 * (2 - f)) / (t1 * f - 1);
|
||||
return toSTAmount(assetBalance.issue(), b);
|
||||
}
|
||||
|
||||
Number
|
||||
square(Number const& n)
|
||||
{
|
||||
return n * n;
|
||||
}
|
||||
|
||||
STAmount
|
||||
adjustLPTokens(
|
||||
STAmount const& lptAMMBalance,
|
||||
STAmount const& lpTokens,
|
||||
bool isDeposit)
|
||||
{
|
||||
// Force rounding downward to ensure adjusted tokens are less or equal
|
||||
// to requested tokens.
|
||||
saveNumberRoundMode rm(Number::setround(Number::rounding_mode::downward));
|
||||
if (isDeposit)
|
||||
return (lptAMMBalance + lpTokens) - lptAMMBalance;
|
||||
return (lpTokens - lptAMMBalance) + lptAMMBalance;
|
||||
}
|
||||
|
||||
std::tuple<STAmount, std::optional<STAmount>, STAmount>
|
||||
adjustAmountsByLPTokens(
|
||||
STAmount const& amountBalance,
|
||||
STAmount const& amount,
|
||||
std::optional<STAmount> const& amount2,
|
||||
STAmount const& lptAMMBalance,
|
||||
STAmount const& lpTokens,
|
||||
std::uint16_t tfee,
|
||||
bool isDeposit)
|
||||
{
|
||||
auto const lpTokensActual =
|
||||
adjustLPTokens(lptAMMBalance, lpTokens, isDeposit);
|
||||
|
||||
if (lpTokensActual == beast::zero)
|
||||
{
|
||||
auto const amount2Opt =
|
||||
amount2 ? std::make_optional(STAmount{}) : std::nullopt;
|
||||
return std::make_tuple(STAmount{}, amount2Opt, lpTokensActual);
|
||||
}
|
||||
|
||||
if (lpTokensActual < lpTokens)
|
||||
{
|
||||
// Equal trade
|
||||
if (amount2)
|
||||
{
|
||||
Number const fr = lpTokensActual / lpTokens;
|
||||
auto const amountActual = toSTAmount(amount.issue(), fr * amount);
|
||||
auto const amount2Actual =
|
||||
toSTAmount(amount2->issue(), fr * *amount2);
|
||||
return std::make_tuple(
|
||||
amountActual < amount ? amountActual : amount,
|
||||
amount2Actual < amount2 ? amount2Actual : amount2,
|
||||
lpTokensActual);
|
||||
}
|
||||
|
||||
// Single trade
|
||||
auto const amountActual = [&]() {
|
||||
if (isDeposit)
|
||||
return ammAssetIn(
|
||||
amountBalance, lptAMMBalance, lpTokensActual, tfee);
|
||||
else
|
||||
return withdrawByTokens(
|
||||
amountBalance, lptAMMBalance, lpTokens, tfee);
|
||||
}();
|
||||
return amountActual < amount
|
||||
? std::make_tuple(amountActual, std::nullopt, lpTokensActual)
|
||||
: std::make_tuple(amount, std::nullopt, lpTokensActual);
|
||||
}
|
||||
|
||||
assert(lpTokensActual == lpTokens);
|
||||
|
||||
return {amount, amount2, lpTokensActual};
|
||||
}
|
||||
|
||||
Number
|
||||
solveQuadraticEq(Number const& a, Number const& b, Number const& c)
|
||||
{
|
||||
return (-b + root2(b * b - 4 * a * c)) / (2 * a);
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
@@ -1,332 +0,0 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2023 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
#include <ripple/app/misc/AMMUtils.h>
|
||||
#include <ripple/basics/Log.h>
|
||||
#include <ripple/ledger/Sandbox.h>
|
||||
#include <ripple/protocol/AMMCore.h>
|
||||
#include <ripple/protocol/STAccount.h>
|
||||
#include <ripple/protocol/STObject.h>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
std::pair<STAmount, STAmount>
|
||||
ammPoolHolds(
|
||||
ReadView const& view,
|
||||
AccountID const& ammAccountID,
|
||||
Issue const& issue1,
|
||||
Issue const& issue2,
|
||||
FreezeHandling freezeHandling,
|
||||
beast::Journal const j)
|
||||
{
|
||||
auto const assetInBalance =
|
||||
accountHolds(view, ammAccountID, issue1, freezeHandling, j);
|
||||
auto const assetOutBalance =
|
||||
accountHolds(view, ammAccountID, issue2, freezeHandling, j);
|
||||
return std::make_pair(assetInBalance, assetOutBalance);
|
||||
}
|
||||
|
||||
Expected<std::tuple<STAmount, STAmount, STAmount>, TER>
|
||||
ammHolds(
|
||||
ReadView const& view,
|
||||
SLE const& ammSle,
|
||||
std::optional<Issue> const& optIssue1,
|
||||
std::optional<Issue> const& optIssue2,
|
||||
FreezeHandling freezeHandling,
|
||||
beast::Journal const j)
|
||||
{
|
||||
auto const issues = [&]() -> std::optional<std::pair<Issue, Issue>> {
|
||||
auto const issue1 = ammSle[sfAsset];
|
||||
auto const issue2 = ammSle[sfAsset2];
|
||||
if (optIssue1 && optIssue2)
|
||||
{
|
||||
if (invalidAMMAssetPair(
|
||||
*optIssue1,
|
||||
*optIssue2,
|
||||
std::make_optional(std::make_pair(issue1, issue2))))
|
||||
{
|
||||
JLOG(j.debug()) << "ammHolds: Invalid optIssue1 or optIssue2 "
|
||||
<< *optIssue1 << " " << *optIssue2;
|
||||
return std::nullopt;
|
||||
}
|
||||
return std::make_optional(std::make_pair(*optIssue1, *optIssue2));
|
||||
}
|
||||
auto const singleIssue =
|
||||
[&issue1, &issue2, &j](
|
||||
Issue checkIssue,
|
||||
const char* label) -> std::optional<std::pair<Issue, Issue>> {
|
||||
if (checkIssue == issue1)
|
||||
return std::make_optional(std::make_pair(issue1, issue2));
|
||||
else if (checkIssue == issue2)
|
||||
return std::make_optional(std::make_pair(issue2, issue1));
|
||||
JLOG(j.debug())
|
||||
<< "ammHolds: Invalid " << label << " " << checkIssue;
|
||||
return std::nullopt;
|
||||
};
|
||||
if (optIssue1)
|
||||
{
|
||||
return singleIssue(*optIssue1, "optIssue1");
|
||||
}
|
||||
else if (optIssue2)
|
||||
{
|
||||
return singleIssue(*optIssue2, "optIssue2");
|
||||
}
|
||||
return std::make_optional(std::make_pair(issue1, issue2));
|
||||
}();
|
||||
if (!issues)
|
||||
return Unexpected(tecAMM_INVALID_TOKENS);
|
||||
auto const [asset1, asset2] = ammPoolHolds(
|
||||
view,
|
||||
ammSle.getAccountID(sfAccount),
|
||||
issues->first,
|
||||
issues->second,
|
||||
freezeHandling,
|
||||
j);
|
||||
return std::make_tuple(asset1, asset2, ammSle[sfLPTokenBalance]);
|
||||
}
|
||||
|
||||
STAmount
|
||||
ammLPHolds(
|
||||
ReadView const& view,
|
||||
Currency const& cur1,
|
||||
Currency const& cur2,
|
||||
AccountID const& ammAccount,
|
||||
AccountID const& lpAccount,
|
||||
beast::Journal const j)
|
||||
{
|
||||
return accountHolds(
|
||||
view,
|
||||
lpAccount,
|
||||
ammLPTCurrency(cur1, cur2),
|
||||
ammAccount,
|
||||
FreezeHandling::fhZERO_IF_FROZEN,
|
||||
j);
|
||||
}
|
||||
|
||||
STAmount
|
||||
ammLPHolds(
|
||||
ReadView const& view,
|
||||
SLE const& ammSle,
|
||||
AccountID const& lpAccount,
|
||||
beast::Journal const j)
|
||||
{
|
||||
return ammLPHolds(
|
||||
view,
|
||||
ammSle[sfAsset].currency,
|
||||
ammSle[sfAsset2].currency,
|
||||
ammSle[sfAccount],
|
||||
lpAccount,
|
||||
j);
|
||||
}
|
||||
|
||||
std::uint16_t
|
||||
getTradingFee(ReadView const& view, SLE const& ammSle, AccountID const& account)
|
||||
{
|
||||
using namespace std::chrono;
|
||||
assert(
|
||||
!view.rules().enabled(fixInnerObjTemplate) ||
|
||||
ammSle.isFieldPresent(sfAuctionSlot));
|
||||
if (ammSle.isFieldPresent(sfAuctionSlot))
|
||||
{
|
||||
auto const& auctionSlot =
|
||||
static_cast<STObject const&>(ammSle.peekAtField(sfAuctionSlot));
|
||||
// Not expired
|
||||
if (auto const expiration = auctionSlot[~sfExpiration];
|
||||
duration_cast<seconds>(
|
||||
view.info().parentCloseTime.time_since_epoch())
|
||||
.count() < expiration)
|
||||
{
|
||||
if (auctionSlot[~sfAccount] == account)
|
||||
return auctionSlot[sfDiscountedFee];
|
||||
if (auctionSlot.isFieldPresent(sfAuthAccounts))
|
||||
{
|
||||
for (auto const& acct :
|
||||
auctionSlot.getFieldArray(sfAuthAccounts))
|
||||
if (acct[~sfAccount] == account)
|
||||
return auctionSlot[sfDiscountedFee];
|
||||
}
|
||||
}
|
||||
}
|
||||
return ammSle[sfTradingFee];
|
||||
}
|
||||
|
||||
STAmount
|
||||
ammAccountHolds(
|
||||
ReadView const& view,
|
||||
AccountID const& ammAccountID,
|
||||
Issue const& issue)
|
||||
{
|
||||
if (isXRP(issue))
|
||||
{
|
||||
if (auto const sle = view.read(keylet::account(ammAccountID)))
|
||||
return (*sle)[sfBalance];
|
||||
}
|
||||
else if (auto const sle = view.read(
|
||||
keylet::line(ammAccountID, issue.account, issue.currency));
|
||||
sle &&
|
||||
!isFrozen(view, ammAccountID, issue.currency, issue.account))
|
||||
{
|
||||
auto amount = (*sle)[sfBalance];
|
||||
if (ammAccountID > issue.account)
|
||||
amount.negate();
|
||||
amount.setIssuer(issue.account);
|
||||
return amount;
|
||||
}
|
||||
|
||||
return STAmount{issue};
|
||||
}
|
||||
|
||||
static TER
|
||||
deleteAMMTrustLines(
|
||||
Sandbox& sb,
|
||||
AccountID const& ammAccountID,
|
||||
std::uint16_t maxTrustlinesToDelete,
|
||||
beast::Journal j)
|
||||
{
|
||||
return cleanupOnAccountDelete(
|
||||
sb,
|
||||
keylet::ownerDir(ammAccountID),
|
||||
[&](LedgerEntryType nodeType,
|
||||
uint256 const&,
|
||||
std::shared_ptr<SLE>& sleItem) -> std::pair<TER, SkipEntry> {
|
||||
// Skip AMM
|
||||
if (nodeType == LedgerEntryType::ltAMM)
|
||||
return {tesSUCCESS, SkipEntry::Yes};
|
||||
// Should only have the trustlines
|
||||
if (nodeType != LedgerEntryType::ltRIPPLE_STATE)
|
||||
{
|
||||
JLOG(j.error())
|
||||
<< "deleteAMMTrustLines: deleting non-trustline "
|
||||
<< nodeType;
|
||||
return {tecINTERNAL, SkipEntry::No};
|
||||
}
|
||||
|
||||
// Trustlines must have zero balance
|
||||
if (sleItem->getFieldAmount(sfBalance) != beast::zero)
|
||||
{
|
||||
JLOG(j.error())
|
||||
<< "deleteAMMTrustLines: deleting trustline with "
|
||||
"non-zero balance.";
|
||||
return {tecINTERNAL, SkipEntry::No};
|
||||
}
|
||||
|
||||
return {
|
||||
deleteAMMTrustLine(sb, sleItem, ammAccountID, j),
|
||||
SkipEntry::No};
|
||||
},
|
||||
j,
|
||||
maxTrustlinesToDelete);
|
||||
}
|
||||
|
||||
TER
|
||||
deleteAMMAccount(
|
||||
Sandbox& sb,
|
||||
Issue const& asset,
|
||||
Issue const& asset2,
|
||||
beast::Journal j)
|
||||
{
|
||||
auto ammSle = sb.peek(keylet::amm(asset, asset2));
|
||||
if (!ammSle)
|
||||
{
|
||||
JLOG(j.error()) << "deleteAMMAccount: AMM object does not exist "
|
||||
<< asset << " " << asset2;
|
||||
return tecINTERNAL;
|
||||
}
|
||||
|
||||
auto const ammAccountID = (*ammSle)[sfAccount];
|
||||
auto sleAMMRoot = sb.peek(keylet::account(ammAccountID));
|
||||
if (!sleAMMRoot)
|
||||
{
|
||||
JLOG(j.error()) << "deleteAMMAccount: AMM account does not exist "
|
||||
<< to_string(ammAccountID);
|
||||
return tecINTERNAL;
|
||||
}
|
||||
|
||||
if (auto const ter =
|
||||
deleteAMMTrustLines(sb, ammAccountID, maxDeletableAMMTrustLines, j);
|
||||
ter != tesSUCCESS)
|
||||
return ter;
|
||||
|
||||
auto const ownerDirKeylet = keylet::ownerDir(ammAccountID);
|
||||
if (!sb.dirRemove(
|
||||
ownerDirKeylet, (*ammSle)[sfOwnerNode], ammSle->key(), false))
|
||||
{
|
||||
JLOG(j.error()) << "deleteAMMAccount: failed to remove dir link";
|
||||
return tecINTERNAL;
|
||||
}
|
||||
if (sb.exists(ownerDirKeylet) && !sb.emptyDirDelete(ownerDirKeylet))
|
||||
{
|
||||
JLOG(j.error()) << "deleteAMMAccount: cannot delete root dir node of "
|
||||
<< toBase58(ammAccountID);
|
||||
return tecINTERNAL;
|
||||
}
|
||||
|
||||
sb.erase(ammSle);
|
||||
sb.erase(sleAMMRoot);
|
||||
|
||||
return tesSUCCESS;
|
||||
}
|
||||
|
||||
void
|
||||
initializeFeeAuctionVote(
|
||||
ApplyView& view,
|
||||
std::shared_ptr<SLE>& ammSle,
|
||||
AccountID const& account,
|
||||
Issue const& lptIssue,
|
||||
std::uint16_t tfee)
|
||||
{
|
||||
auto const& rules = view.rules();
|
||||
// AMM creator gets the voting slot.
|
||||
STArray voteSlots;
|
||||
STObject voteEntry = STObject::makeInnerObject(sfVoteEntry, rules);
|
||||
if (tfee != 0)
|
||||
voteEntry.setFieldU16(sfTradingFee, tfee);
|
||||
voteEntry.setFieldU32(sfVoteWeight, VOTE_WEIGHT_SCALE_FACTOR);
|
||||
voteEntry.setAccountID(sfAccount, account);
|
||||
voteSlots.push_back(voteEntry);
|
||||
ammSle->setFieldArray(sfVoteSlots, voteSlots);
|
||||
// AMM creator gets the auction slot for free.
|
||||
// AuctionSlot is created on AMMCreate and updated on AMMDeposit
|
||||
// when AMM is in an empty state
|
||||
if (rules.enabled(fixInnerObjTemplate) &&
|
||||
!ammSle->isFieldPresent(sfAuctionSlot))
|
||||
{
|
||||
STObject auctionSlot = STObject::makeInnerObject(sfAuctionSlot, rules);
|
||||
ammSle->set(std::move(auctionSlot));
|
||||
}
|
||||
STObject& auctionSlot = ammSle->peekFieldObject(sfAuctionSlot);
|
||||
auctionSlot.setAccountID(sfAccount, account);
|
||||
// current + sec in 24h
|
||||
auto const expiration = std::chrono::duration_cast<std::chrono::seconds>(
|
||||
view.info().parentCloseTime.time_since_epoch())
|
||||
.count() +
|
||||
TOTAL_TIME_SLOT_SECS;
|
||||
auctionSlot.setFieldU32(sfExpiration, expiration);
|
||||
auctionSlot.setFieldAmount(sfPrice, STAmount{lptIssue, 0});
|
||||
// Set the fee
|
||||
if (tfee != 0)
|
||||
ammSle->setFieldU16(sfTradingFee, tfee);
|
||||
else if (ammSle->isFieldPresent(sfTradingFee))
|
||||
ammSle->makeFieldAbsent(sfTradingFee);
|
||||
if (auto const dfee = tfee / AUCTION_SLOT_DISCOUNTED_FEE_FRACTION)
|
||||
auctionSlot.setFieldU16(sfDiscountedFee, dfee);
|
||||
else if (auctionSlot.isFieldPresent(sfDiscountedFee))
|
||||
auctionSlot.makeFieldAbsent(sfDiscountedFee);
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
@@ -67,155 +67,6 @@ parseSection(Section const& section)
|
||||
return names;
|
||||
}
|
||||
|
||||
/** TrustedVotes records the most recent votes from trusted validators.
|
||||
We keep a record in an effort to avoid "flapping" while amendment voting
|
||||
is in process.
|
||||
|
||||
If a trusted validator loses synchronization near a flag ledger their
|
||||
amendment votes may be lost during that round. If the validator is a
|
||||
bit flaky, then this can cause an amendment to appear to repeatedly
|
||||
gain and lose support.
|
||||
|
||||
TrustedVotes addresses the problem by holding on to the last vote seen
|
||||
from every trusted validator. So if any given validator is off line near
|
||||
a flag ledger we can assume that they did not change their vote.
|
||||
|
||||
If we haven't seen any STValidations from a validator for several hours we
|
||||
lose confidence that the validator hasn't changed their position. So
|
||||
there's a timeout. We remove upVotes if they haven't been updated in
|
||||
several hours.
|
||||
*/
|
||||
class TrustedVotes
|
||||
{
|
||||
private:
|
||||
static constexpr NetClock::time_point maxTimeout =
|
||||
NetClock::time_point::max();
|
||||
|
||||
// Associates each trusted validator with the last votes we saw from them
|
||||
// and an expiration for that record.
|
||||
struct UpvotesAndTimeout
|
||||
{
|
||||
std::vector<uint256> upVotes;
|
||||
NetClock::time_point timeout = maxTimeout;
|
||||
};
|
||||
hash_map<PublicKey, UpvotesAndTimeout> recordedVotes_;
|
||||
|
||||
public:
|
||||
TrustedVotes() = default;
|
||||
TrustedVotes(TrustedVotes const& rhs) = delete;
|
||||
TrustedVotes&
|
||||
operator=(TrustedVotes const& rhs) = delete;
|
||||
|
||||
// Called when the list of trusted validators changes.
|
||||
//
|
||||
// Call with AmendmentTable::mutex_ locked.
|
||||
void
|
||||
trustChanged(
|
||||
hash_set<PublicKey> const& allTrusted,
|
||||
std::lock_guard<std::mutex> const& lock)
|
||||
{
|
||||
decltype(recordedVotes_) newRecordedVotes;
|
||||
newRecordedVotes.reserve(allTrusted.size());
|
||||
|
||||
// Make sure every PublicKey in allTrusted is represented in
|
||||
// recordedVotes_. Also make sure recordedVotes_ contains
|
||||
// no additional PublicKeys.
|
||||
for (auto& trusted : allTrusted)
|
||||
{
|
||||
if (recordedVotes_.contains(trusted))
|
||||
{
|
||||
// Preserve this validator's previously saved voting state.
|
||||
newRecordedVotes.insert(recordedVotes_.extract(trusted));
|
||||
}
|
||||
else
|
||||
{
|
||||
// New validators have a starting position of no on everything.
|
||||
// Add the entry with an empty vector and maxTimeout.
|
||||
newRecordedVotes[trusted];
|
||||
}
|
||||
}
|
||||
// The votes of any no-longer-trusted validators will be destroyed
|
||||
// when changedTrustedVotes goes out of scope.
|
||||
recordedVotes_.swap(newRecordedVotes);
|
||||
}
|
||||
|
||||
// Called when we receive the latest votes.
|
||||
//
|
||||
// Call with AmendmentTable::mutex_ locked.
|
||||
void
|
||||
recordVotes(
|
||||
Rules const& rules,
|
||||
std::vector<std::shared_ptr<STValidation>> const& valSet,
|
||||
NetClock::time_point const closeTime,
|
||||
std::lock_guard<std::mutex> const& lock)
|
||||
{
|
||||
// When we get an STValidation we save the upVotes it contains, but
|
||||
// we also set an expiration for those upVotes. The following constant
|
||||
// controls the timeout.
|
||||
//
|
||||
// There really is no "best" timeout to choose for when we finally
|
||||
// lose confidence that we know how a validator is voting. But part
|
||||
// of the point of recording validator votes is to avoid flapping of
|
||||
// amendment votes. A 24h timeout says that we will change the local
|
||||
// record of a validator's vote to "no" 24h after the last vote seen
|
||||
// from that validator. So flapping due to that validator being off
|
||||
// line will happen less frequently than every 24 hours.
|
||||
using namespace std::chrono_literals;
|
||||
static constexpr NetClock::duration expiresAfter = 24h;
|
||||
|
||||
// Walk all validations and replace previous votes from trusted
|
||||
// validators with these newest votes.
|
||||
for (auto const& val : valSet)
|
||||
{
|
||||
// If this validation comes from one of our trusted validators...
|
||||
if (auto const iter = recordedVotes_.find(val->getSignerPublic());
|
||||
iter != recordedVotes_.end())
|
||||
{
|
||||
iter->second.timeout = closeTime + expiresAfter;
|
||||
if (val->isFieldPresent(sfAmendments))
|
||||
{
|
||||
auto const& choices = val->getFieldV256(sfAmendments);
|
||||
iter->second.upVotes.assign(choices.begin(), choices.end());
|
||||
}
|
||||
else
|
||||
{
|
||||
// This validator does not upVote any amendments right now.
|
||||
iter->second.upVotes.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Now remove any expired records from recordedVotes_.
|
||||
std::for_each(
|
||||
recordedVotes_.begin(),
|
||||
recordedVotes_.end(),
|
||||
[&closeTime](decltype(recordedVotes_)::value_type& votes) {
|
||||
if (closeTime > votes.second.timeout)
|
||||
{
|
||||
votes.second.timeout = maxTimeout;
|
||||
votes.second.upVotes.clear();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Return the information needed by AmendmentSet to determine votes.
|
||||
//
|
||||
// Call with AmendmentTable::mutex_ locked.
|
||||
[[nodiscard]] std::pair<int, hash_map<uint256, int>>
|
||||
getVotes(Rules const& rules, std::lock_guard<std::mutex> const& lock) const
|
||||
{
|
||||
hash_map<uint256, int> ret;
|
||||
for (auto& validatorVotes : recordedVotes_)
|
||||
{
|
||||
for (uint256 const& amendment : validatorVotes.second.upVotes)
|
||||
{
|
||||
ret[amendment] += 1;
|
||||
}
|
||||
}
|
||||
return {recordedVotes_.size(), ret};
|
||||
}
|
||||
};
|
||||
|
||||
/** Current state of an amendment.
|
||||
Tells if a amendment is supported, enabled or vetoed. A vetoed amendment
|
||||
means the node will never announce its support.
|
||||
@@ -253,9 +104,30 @@ private:
|
||||
// number of votes needed
|
||||
int threshold_ = 0;
|
||||
|
||||
void
|
||||
computeThreshold(int trustedValidations, Rules const& rules)
|
||||
public:
|
||||
AmendmentSet(
|
||||
Rules const& rules,
|
||||
std::vector<std::shared_ptr<STValidation>> const& valSet)
|
||||
: rules_(rules)
|
||||
{
|
||||
// process validations for ledger before flag ledger
|
||||
for (auto const& val : valSet)
|
||||
{
|
||||
if (val->isTrusted())
|
||||
{
|
||||
if (val->isFieldPresent(sfAmendments))
|
||||
{
|
||||
auto const choices = val->getFieldV256(sfAmendments);
|
||||
std::for_each(
|
||||
choices.begin(),
|
||||
choices.end(),
|
||||
[&](auto const& amendment) { ++votes_[amendment]; });
|
||||
}
|
||||
|
||||
++trustedValidations_;
|
||||
}
|
||||
}
|
||||
|
||||
threshold_ = !rules_.enabled(fixAmendmentMajorityCalc)
|
||||
? std::max(
|
||||
1L,
|
||||
@@ -271,22 +143,6 @@ private:
|
||||
postFixAmendmentMajorityCalcThreshold.den));
|
||||
}
|
||||
|
||||
public:
|
||||
AmendmentSet(
|
||||
Rules const& rules,
|
||||
TrustedVotes const& trustedVotes,
|
||||
std::lock_guard<std::mutex> const& lock)
|
||||
: rules_(rules)
|
||||
{
|
||||
// process validations for ledger before flag ledger.
|
||||
auto [trustedCount, newVotes] = trustedVotes.getVotes(rules, lock);
|
||||
|
||||
trustedValidations_ = trustedCount;
|
||||
votes_.swap(newVotes);
|
||||
|
||||
computeThreshold(trustedValidations_, rules);
|
||||
}
|
||||
|
||||
bool
|
||||
passes(uint256 const& amendment) const
|
||||
{
|
||||
@@ -347,9 +203,6 @@ private:
|
||||
hash_map<uint256, AmendmentState> amendmentMap_;
|
||||
std::uint32_t lastUpdateSeq_;
|
||||
|
||||
// Record of the last votes seen from trusted validators.
|
||||
TrustedVotes previousTrustedVotes_;
|
||||
|
||||
// Time that an amendment must hold a majority for
|
||||
std::chrono::seconds const majorityTime_;
|
||||
|
||||
@@ -441,9 +294,6 @@ public:
|
||||
std::set<uint256> const& enabled,
|
||||
majorityAmendments_t const& majority) override;
|
||||
|
||||
void
|
||||
trustChanged(hash_set<PublicKey> const& allTrusted) override;
|
||||
|
||||
std::vector<uint256>
|
||||
doValidation(std::set<uint256> const& enabledAmendments) const override;
|
||||
|
||||
@@ -783,14 +633,8 @@ AmendmentTableImpl::doVoting(
|
||||
<< ": " << enabledAmendments.size() << ", "
|
||||
<< majorityAmendments.size() << ", " << valSet.size();
|
||||
|
||||
std::lock_guard lock(mutex_);
|
||||
auto vote = std::make_unique<AmendmentSet>(rules, valSet);
|
||||
|
||||
// Keep a record of the votes we received.
|
||||
previousTrustedVotes_.recordVotes(rules, valSet, closeTime, lock);
|
||||
|
||||
// Tally the most recent votes.
|
||||
auto vote =
|
||||
std::make_unique<AmendmentSet>(rules, previousTrustedVotes_, lock);
|
||||
JLOG(j_.debug()) << "Received " << vote->trustedValidations()
|
||||
<< " trusted validations, threshold is: "
|
||||
<< vote->threshold();
|
||||
@@ -799,6 +643,8 @@ AmendmentTableImpl::doVoting(
|
||||
// the value of the flags in the pseudo-transaction
|
||||
std::map<uint256, std::uint32_t> actions;
|
||||
|
||||
std::lock_guard lock(mutex_);
|
||||
|
||||
// process all amendments we know of
|
||||
for (auto const& entry : amendmentMap_)
|
||||
{
|
||||
@@ -894,13 +740,6 @@ AmendmentTableImpl::doValidatedLedger(
|
||||
firstUnsupportedExpected_ = *firstUnsupportedExpected_ + majorityTime_;
|
||||
}
|
||||
|
||||
void
|
||||
AmendmentTableImpl::trustChanged(hash_set<PublicKey> const& allTrusted)
|
||||
{
|
||||
std::lock_guard lock(mutex_);
|
||||
previousTrustedVotes_.trustChanged(allTrusted, lock);
|
||||
}
|
||||
|
||||
void
|
||||
AmendmentTableImpl::injectJson(
|
||||
Json::Value& v,
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2023 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#include <ripple/app/misc/DeliverMax.h>
|
||||
|
||||
#include <ripple/protocol/jss.h>
|
||||
|
||||
namespace ripple {
|
||||
namespace RPC {
|
||||
|
||||
void
|
||||
insertDeliverMax(Json::Value& tx_json, TxType txnType, unsigned int apiVersion)
|
||||
{
|
||||
if (tx_json.isMember(jss::Amount))
|
||||
{
|
||||
if (txnType == ttPAYMENT)
|
||||
{
|
||||
tx_json[jss::DeliverMax] = tx_json[jss::Amount];
|
||||
if (apiVersion > 1)
|
||||
tx_json.removeMember(jss::Amount);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace RPC
|
||||
} // namespace ripple
|
||||
@@ -109,9 +109,9 @@ scaleFeeLoad(
|
||||
|
||||
auto const result = mulDiv(
|
||||
fee, feeFactor, safe_cast<std::uint64_t>(feeTrack.getLoadBase()));
|
||||
if (!result)
|
||||
if (!result.first)
|
||||
Throw<std::overflow_error>("scaleFeeLoad");
|
||||
return *result;
|
||||
return result.second;
|
||||
}
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
@@ -67,7 +67,7 @@ Transaction::setStatus(
|
||||
std::optional<std::uint16_t> netID)
|
||||
{
|
||||
mStatus = ts;
|
||||
mLedgerIndex = lseq;
|
||||
mInLedger = lseq;
|
||||
if (tseq)
|
||||
mTxnSeq = tseq;
|
||||
if (netID)
|
||||
@@ -176,26 +176,16 @@ Transaction::load(
|
||||
Json::Value
|
||||
Transaction::getJson(JsonOptions options, bool binary) const
|
||||
{
|
||||
// Note, we explicitly suppress `include_date` option here
|
||||
Json::Value ret(
|
||||
mTransaction->getJson(options & ~JsonOptions::include_date, binary));
|
||||
Json::Value ret(mTransaction->getJson(JsonOptions::none, binary));
|
||||
|
||||
// NOTE Binary STTx::getJson output might not be a JSON object
|
||||
if (ret.isObject() && mLedgerIndex)
|
||||
if (mInLedger)
|
||||
{
|
||||
if (!(options & JsonOptions::disable_API_prior_V2))
|
||||
{
|
||||
// Behaviour before API version 2
|
||||
ret[jss::inLedger] = mLedgerIndex;
|
||||
}
|
||||
ret[jss::inLedger] = mInLedger; // Deprecated.
|
||||
ret[jss::ledger_index] = mInLedger;
|
||||
|
||||
// TODO: disable_API_prior_V3 to disable output of both `date` and
|
||||
// `ledger_index` elements (taking precedence over include_date)
|
||||
ret[jss::ledger_index] = mLedgerIndex;
|
||||
|
||||
if (options & JsonOptions::include_date)
|
||||
if (options == JsonOptions::include_date)
|
||||
{
|
||||
auto ct = mApp.getLedgerMaster().getCloseTimeBySeq(mLedgerIndex);
|
||||
auto ct = mApp.getLedgerMaster().getCloseTimeBySeq(mInLedger);
|
||||
if (ct)
|
||||
ret[jss::date] = ct->time_since_epoch().count();
|
||||
}
|
||||
@@ -207,10 +197,10 @@ Transaction::getJson(JsonOptions options, bool binary) const
|
||||
netID = mTransaction->getFieldU32(sfNetworkID);
|
||||
|
||||
if (mTxnSeq && netID && *mTxnSeq <= 0xFFFFU && *netID < 0xFFFFU &&
|
||||
mLedgerIndex < 0xFFFFFFFUL)
|
||||
mInLedger < 0xFFFFFFFUL)
|
||||
{
|
||||
std::optional<std::string> ctid =
|
||||
RPC::encodeCTID(mLedgerIndex, *mTxnSeq, *netID);
|
||||
RPC::encodeCTID(mInLedger, *mTxnSeq, *netID);
|
||||
if (ctid)
|
||||
ret[jss::ctid] = *ctid;
|
||||
}
|
||||
|
||||
@@ -61,8 +61,12 @@ getFeeLevelPaid(ReadView const& view, STTx const& tx)
|
||||
return FeeLevel64(0);
|
||||
}
|
||||
|
||||
return mulDiv(effectiveFeePaid, TxQ::baseLevel, baseFee)
|
||||
.value_or(FeeLevel64(std::numeric_limits<std::uint64_t>::max()));
|
||||
if (std::pair<bool, FeeLevel64> const feeLevelPaid =
|
||||
mulDiv(effectiveFeePaid, TxQ::baseLevel, baseFee);
|
||||
feeLevelPaid.first)
|
||||
return feeLevelPaid.second;
|
||||
|
||||
return FeeLevel64(std::numeric_limits<std::uint64_t>::max());
|
||||
}
|
||||
|
||||
static std::optional<LedgerIndex>
|
||||
@@ -84,8 +88,7 @@ getFirstLedgerSequence(STTx const& tx)
|
||||
static FeeLevel64
|
||||
increase(FeeLevel64 level, std::uint32_t increasePercent)
|
||||
{
|
||||
return mulDiv(level, 100 + increasePercent, 100)
|
||||
.value_or(static_cast<FeeLevel64>(ripple::muldiv_max));
|
||||
return mulDiv(level, 100 + increasePercent, 100).second;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
@@ -129,19 +132,16 @@ TxQ::FeeMetrics::update(
|
||||
// upperLimit must be >= minimumTxnCount_ or std::clamp can give
|
||||
// unexpected results
|
||||
auto const upperLimit = std::max<std::uint64_t>(
|
||||
mulDiv(txnsExpected_, cutPct, 100).value_or(ripple::muldiv_max),
|
||||
minimumTxnCount_);
|
||||
mulDiv(txnsExpected_, cutPct, 100).second, minimumTxnCount_);
|
||||
txnsExpected_ = std::clamp<std::uint64_t>(
|
||||
mulDiv(size, cutPct, 100).value_or(ripple::muldiv_max),
|
||||
minimumTxnCount_,
|
||||
upperLimit);
|
||||
mulDiv(size, cutPct, 100).second, minimumTxnCount_, upperLimit);
|
||||
recentTxnCounts_.clear();
|
||||
}
|
||||
else if (size > txnsExpected_ || size > targetTxnCount_)
|
||||
{
|
||||
recentTxnCounts_.push_back(
|
||||
mulDiv(size, 100 + setup.normalConsensusIncreasePercent, 100)
|
||||
.value_or(ripple::muldiv_max));
|
||||
.second);
|
||||
auto const iter =
|
||||
std::max_element(recentTxnCounts_.begin(), recentTxnCounts_.end());
|
||||
BOOST_ASSERT(iter != recentTxnCounts_.end());
|
||||
@@ -199,8 +199,7 @@ TxQ::FeeMetrics::scaleFeeLevel(Snapshot const& snapshot, OpenView const& view)
|
||||
{
|
||||
// Compute escalated fee level
|
||||
// Don't care about the overflow flag
|
||||
return mulDiv(multiplier, current * current, target * target)
|
||||
.value_or(static_cast<FeeLevel64>(ripple::muldiv_max));
|
||||
return mulDiv(multiplier, current * current, target * target).second;
|
||||
}
|
||||
|
||||
return baseLevel;
|
||||
@@ -283,7 +282,7 @@ TxQ::FeeMetrics::escalatedSeriesFeeLevel(
|
||||
auto const totalFeeLevel = mulDiv(
|
||||
multiplier, sumNlast.second - sumNcurrent.second, target * target);
|
||||
|
||||
return {totalFeeLevel.has_value(), *totalFeeLevel};
|
||||
return totalFeeLevel;
|
||||
}
|
||||
|
||||
LedgerHash TxQ::MaybeTx::parentHashComp{};
|
||||
@@ -1996,11 +1995,8 @@ TxQ::getTxRequiredFeeAndSeq(
|
||||
|
||||
std::uint32_t const accountSeq = sle ? (*sle)[sfSequence] : 0;
|
||||
std::uint32_t const availableSeq = nextQueuableSeqImpl(sle, lock).value();
|
||||
return {
|
||||
mulDiv(fee, baseFee, baseLevel)
|
||||
.value_or(XRPAmount(std::numeric_limits<std::int64_t>::max())),
|
||||
accountSeq,
|
||||
availableSeq};
|
||||
|
||||
return {mulDiv(fee, baseFee, baseLevel).second, accountSeq, availableSeq};
|
||||
}
|
||||
|
||||
std::vector<TxQ::TxDetails>
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2023 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifndef RIPPLE_APP_PATHS_AMMCONTEXT_H_INCLUDED
|
||||
#define RIPPLE_APP_PATHS_AMMCONTEXT_H_INCLUDED
|
||||
|
||||
#include <ripple/protocol/AccountID.h>
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
/** Maintains AMM info per overall payment engine execution and
|
||||
* individual iteration.
|
||||
* Only one instance of this class is created in Flow.cpp::flow().
|
||||
* The reference is percolated through calls to AMMLiquidity class,
|
||||
* which handles AMM offer generation.
|
||||
*/
|
||||
class AMMContext
|
||||
{
|
||||
public:
|
||||
// Restrict number of AMM offers. If this restriction is removed
|
||||
// then need to restrict in some other way because AMM offers are
|
||||
// not counted in the BookStep offer counter.
|
||||
constexpr static std::uint8_t MaxIterations = 30;
|
||||
|
||||
private:
|
||||
// Tx account owner is required to get the AMM trading fee in BookStep
|
||||
AccountID account_;
|
||||
// true if payment has multiple paths
|
||||
bool multiPath_{false};
|
||||
// Is true if AMM offer is consumed during a payment engine iteration.
|
||||
bool ammUsed_{false};
|
||||
// Counter of payment engine iterations with consumed AMM
|
||||
std::uint16_t ammIters_{0};
|
||||
|
||||
public:
|
||||
AMMContext(AccountID const& account, bool multiPath)
|
||||
: account_(account), multiPath_(multiPath)
|
||||
{
|
||||
}
|
||||
~AMMContext() = default;
|
||||
AMMContext(AMMContext const&) = delete;
|
||||
AMMContext&
|
||||
operator=(AMMContext const&) = delete;
|
||||
|
||||
bool
|
||||
multiPath() const
|
||||
{
|
||||
return multiPath_;
|
||||
}
|
||||
|
||||
void
|
||||
setMultiPath(bool fs)
|
||||
{
|
||||
multiPath_ = fs;
|
||||
}
|
||||
|
||||
void
|
||||
setAMMUsed()
|
||||
{
|
||||
ammUsed_ = true;
|
||||
}
|
||||
|
||||
void
|
||||
update()
|
||||
{
|
||||
if (ammUsed_)
|
||||
++ammIters_;
|
||||
ammUsed_ = false;
|
||||
}
|
||||
|
||||
bool
|
||||
maxItersReached() const
|
||||
{
|
||||
return ammIters_ >= MaxIterations;
|
||||
}
|
||||
|
||||
std::uint16_t
|
||||
curIters() const
|
||||
{
|
||||
return ammIters_;
|
||||
}
|
||||
|
||||
AccountID
|
||||
account() const
|
||||
{
|
||||
return account_;
|
||||
}
|
||||
|
||||
/** Strand execution may fail. Reset the flag at the start
|
||||
* of each payment engine iteration.
|
||||
*/
|
||||
void
|
||||
clear()
|
||||
{
|
||||
ammUsed_ = false;
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
#endif // RIPPLE_APP_PATHS_AMMCONTEXT_H_INCLUDED
|
||||
@@ -1,148 +0,0 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2023 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifndef RIPPLE_APP_TX_AMMLIQUIDITY_H_INCLUDED
|
||||
#define RIPPLE_APP_TX_AMMLIQUIDITY_H_INCLUDED
|
||||
|
||||
#include "ripple/app/misc/AMMHelpers.h"
|
||||
#include "ripple/app/misc/AMMUtils.h"
|
||||
#include "ripple/app/paths/AMMContext.h"
|
||||
#include "ripple/basics/Log.h"
|
||||
#include "ripple/ledger/ReadView.h"
|
||||
#include "ripple/ledger/View.h"
|
||||
#include "ripple/protocol/Quality.h"
|
||||
#include "ripple/protocol/STLedgerEntry.h"
|
||||
|
||||
namespace ripple {
|
||||
|
||||
template <typename TIn, typename TOut>
|
||||
class AMMOffer;
|
||||
|
||||
/** AMMLiquidity class provides AMM offers to BookStep class.
|
||||
* The offers are generated in two ways. If there are multiple
|
||||
* paths specified to the payment transaction then the offers
|
||||
* are generated based on the Fibonacci sequence with
|
||||
* a limited number of payment engine iterations consuming AMM offers.
|
||||
* These offers behave the same way as CLOB offers in that if
|
||||
* there is a limiting step, then the offers are adjusted
|
||||
* based on their quality.
|
||||
* If there is only one path specified in the payment transaction
|
||||
* then the offers are generated based on the competing CLOB offer
|
||||
* quality. In this case the offer's size is set in such a way
|
||||
* that the new AMM's pool spot price quality is equal to the CLOB's
|
||||
* offer quality.
|
||||
*/
|
||||
template <typename TIn, typename TOut>
|
||||
class AMMLiquidity
|
||||
{
|
||||
private:
|
||||
inline static const Number InitialFibSeqPct = Number(5) / 20000;
|
||||
AMMContext& ammContext_;
|
||||
AccountID const ammAccountID_;
|
||||
std::uint32_t const tradingFee_;
|
||||
Issue const issueIn_;
|
||||
Issue const issueOut_;
|
||||
// Initial AMM pool balances
|
||||
TAmounts<TIn, TOut> const initialBalances_;
|
||||
beast::Journal const j_;
|
||||
|
||||
public:
|
||||
AMMLiquidity(
|
||||
ReadView const& view,
|
||||
AccountID const& ammAccountID,
|
||||
std::uint32_t tradingFee,
|
||||
Issue const& in,
|
||||
Issue const& out,
|
||||
AMMContext& ammContext,
|
||||
beast::Journal j);
|
||||
~AMMLiquidity() = default;
|
||||
AMMLiquidity(AMMLiquidity const&) = delete;
|
||||
AMMLiquidity&
|
||||
operator=(AMMLiquidity const&) = delete;
|
||||
|
||||
/** Generate AMM offer. Returns nullopt if clobQuality is provided
|
||||
* and it is better than AMM offer quality. Otherwise returns AMM offer.
|
||||
* If clobQuality is provided then AMM offer size is set based on the
|
||||
* quality.
|
||||
*/
|
||||
std::optional<AMMOffer<TIn, TOut>>
|
||||
getOffer(ReadView const& view, std::optional<Quality> const& clobQuality)
|
||||
const;
|
||||
|
||||
AccountID const&
|
||||
ammAccount() const
|
||||
{
|
||||
return ammAccountID_;
|
||||
}
|
||||
|
||||
bool
|
||||
multiPath() const
|
||||
{
|
||||
return ammContext_.multiPath();
|
||||
}
|
||||
|
||||
std::uint32_t
|
||||
tradingFee() const
|
||||
{
|
||||
return tradingFee_;
|
||||
}
|
||||
|
||||
AMMContext&
|
||||
context() const
|
||||
{
|
||||
return ammContext_;
|
||||
}
|
||||
|
||||
Issue const&
|
||||
issueIn() const
|
||||
{
|
||||
return issueIn_;
|
||||
}
|
||||
|
||||
Issue const&
|
||||
issueOut() const
|
||||
{
|
||||
return issueOut_;
|
||||
}
|
||||
|
||||
private:
|
||||
/** Fetches current AMM balances.
|
||||
*/
|
||||
TAmounts<TIn, TOut>
|
||||
fetchBalances(ReadView const& view) const;
|
||||
|
||||
/** Generate AMM offers with the offer size based on Fibonacci sequence.
|
||||
* The sequence corresponds to the payment engine iterations with AMM
|
||||
* liquidity. Iterations that don't consume AMM offers don't count.
|
||||
* The number of iterations with AMM offers is limited.
|
||||
* If the generated offer exceeds the pool balance then the function
|
||||
* throws overflow exception.
|
||||
*/
|
||||
TAmounts<TIn, TOut>
|
||||
generateFibSeqOffer(TAmounts<TIn, TOut> const& balances) const;
|
||||
|
||||
/** Generate max offer
|
||||
*/
|
||||
AMMOffer<TIn, TOut>
|
||||
maxOffer(TAmounts<TIn, TOut> const& balances) const;
|
||||
};
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
#endif // RIPPLE_APP_TX_AMMLIQUIDITY_H_INCLUDED
|
||||
@@ -1,149 +0,0 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of rippled: https://github.com/ripple/rippled
|
||||
Copyright (c) 2023 Ripple Labs Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#ifndef RIPPLE_APP_AMMOFFER_H_INCLUDED
|
||||
#define RIPPLE_APP_AMMOFFER_H_INCLUDED
|
||||
|
||||
#include <ripple/ledger/ApplyView.h>
|
||||
#include <ripple/ledger/View.h>
|
||||
#include <ripple/protocol/Quality.h>
|
||||
#include <ripple/protocol/TER.h>
|
||||
|
||||
namespace ripple {
|
||||
|
||||
template <typename TIn, typename TOut>
|
||||
class AMMLiquidity;
|
||||
class QualityFunction;
|
||||
|
||||
/** Represents synthetic AMM offer in BookStep. AMMOffer mirrors TOffer
|
||||
* methods for use in generic BookStep methods. AMMOffer amounts
|
||||
* are changed indirectly in BookStep limiting steps.
|
||||
*/
|
||||
template <typename TIn, typename TOut>
|
||||
class AMMOffer
|
||||
{
|
||||
private:
|
||||
AMMLiquidity<TIn, TOut> const& ammLiquidity_;
|
||||
// Initial offer amounts. It is fibonacci seq generated for multi-path.
|
||||
// If the offer size is set based on the competing CLOB offer then
|
||||
// the AMM offer size is such that if the offer is consumed then
|
||||
// the updated AMM pool SP quality is going to be equal to competing
|
||||
// CLOB offer quality. If there is no competing CLOB offer then
|
||||
// the initial size is set to in=cMax[Native,Value],balances.out.
|
||||
// While this is not a "real" offer it simulates the case of
|
||||
// the swap out of the entire side of the pool, in which case
|
||||
// the swap in amount is infinite.
|
||||
TAmounts<TIn, TOut> const amounts_;
|
||||
// If seated then current pool balances. Used in one-path limiting steps
|
||||
// to swap in/out.
|
||||
std::optional<TAmounts<TIn, TOut>> const balances_;
|
||||
// The Spot Price quality if balances != amounts
|
||||
// else the amounts quality
|
||||
Quality const quality_;
|
||||
// AMM offer can be consumed once at a given iteration
|
||||
bool consumed_;
|
||||
|
||||
public:
|
||||
AMMOffer(
|
||||
AMMLiquidity<TIn, TOut> const& ammLiquidity,
|
||||
TAmounts<TIn, TOut> const& amounts,
|
||||
std::optional<TAmounts<TIn, TOut>> const& balances,
|
||||
Quality const& quality);
|
||||
|
||||
Quality
|
||||
quality() const noexcept
|
||||
{
|
||||
return quality_;
|
||||
}
|
||||
|
||||
Issue const&
|
||||
issueIn() const;
|
||||
|
||||
Issue const&
|
||||
issueOut() const;
|
||||
|
||||
AccountID const&
|
||||
owner() const;
|
||||
|
||||
std::optional<uint256>
|
||||
key() const
|
||||
{
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
TAmounts<TIn, TOut> const&
|
||||
amount() const;
|
||||
|
||||
void
|
||||
consume(ApplyView& view, TAmounts<TIn, TOut> const& consumed);
|
||||
|
||||
bool
|
||||
fully_consumed() const
|
||||
{
|
||||
return consumed_;
|
||||
}
|
||||
|
||||
/** Limit out of the provided offer. If one-path then swapOut
|
||||
* using current balances. If multi-path then ceil_out using
|
||||
* current quality.
|
||||
*/
|
||||
TAmounts<TIn, TOut>
|
||||
limitOut(
|
||||
TAmounts<TIn, TOut> const& offrAmt,
|
||||
TOut const& limit,
|
||||
bool fixReducedOffers,
|
||||
bool roundUp) const;
|
||||
|
||||
/** Limit in of the provided offer. If one-path then swapIn
|
||||
* using current balances. If multi-path then ceil_in using
|
||||
* current quality.
|
||||
*/
|
||||
TAmounts<TIn, TOut>
|
||||
limitIn(TAmounts<TIn, TOut> const& offrAmt, TIn const& limit) const;
|
||||
|
||||
QualityFunction
|
||||
getQualityFunc() const;
|
||||
|
||||
/** Send funds without incurring the transfer fee
|
||||
*/
|
||||
template <typename... Args>
|
||||
static TER
|
||||
send(Args&&... args)
|
||||
{
|
||||
return accountSend(std::forward<Args>(args)..., WaiveTransferFee::Yes);
|
||||
}
|
||||
|
||||
bool
|
||||
isFunded() const
|
||||
{
|
||||
// AMM offer is fully funded by the pool
|
||||
return true;
|
||||
}
|
||||
|
||||
static std::pair<std::uint32_t, std::uint32_t>
|
||||
adjustRates(std::uint32_t ofrInRate, std::uint32_t ofrOutRate)
|
||||
{
|
||||
// AMM doesn't pay transfer fee on Payment tx
|
||||
return {ofrInRate, QUALITY_ONE};
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace ripple
|
||||
|
||||
#endif // RIPPLE_APP_AMMOFFER_H_INCLUDED
|
||||
@@ -17,7 +17,6 @@
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#include <ripple/app/paths/AMMContext.h>
|
||||
#include <ripple/app/paths/Credit.h>
|
||||
#include <ripple/app/paths/Flow.h>
|
||||
#include <ripple/app/paths/impl/AmountSpec.h>
|
||||
@@ -65,7 +64,7 @@ flow(
|
||||
bool defaultPaths,
|
||||
bool partialPayment,
|
||||
bool ownerPaysTransferFee,
|
||||
OfferCrossing offerCrossing,
|
||||
bool offerCrossing,
|
||||
std::optional<Quality> const& limitQuality,
|
||||
std::optional<STAmount> const& sendMax,
|
||||
beast::Journal j,
|
||||
@@ -85,8 +84,6 @@ flow(
|
||||
if (sendMax)
|
||||
sendMaxIssue = sendMax->issue();
|
||||
|
||||
AMMContext ammContext(src, false);
|
||||
|
||||
// convert the paths to a collection of strands. Each strand is the
|
||||
// collection of account->account steps and book steps that may be used in
|
||||
// this payment.
|
||||
@@ -101,7 +98,6 @@ flow(
|
||||
defaultPaths,
|
||||
ownerPaysTransferFee,
|
||||
offerCrossing,
|
||||
ammContext,
|
||||
j);
|
||||
|
||||
if (!isTesSuccess(toStrandsTer))
|
||||
@@ -111,8 +107,6 @@ flow(
|
||||
return result;
|
||||
}
|
||||
|
||||
ammContext.setMultiPath(strands.size() > 1);
|
||||
|
||||
if (j.trace())
|
||||
{
|
||||
j.trace() << "\nsrc: " << src << "\ndst: " << dst
|
||||
@@ -151,7 +145,6 @@ flow(
|
||||
limitQuality,
|
||||
sendMax,
|
||||
j,
|
||||
ammContext,
|
||||
flowDebugInfo));
|
||||
}
|
||||
|
||||
@@ -170,7 +163,6 @@ flow(
|
||||
limitQuality,
|
||||
sendMax,
|
||||
j,
|
||||
ammContext,
|
||||
flowDebugInfo));
|
||||
}
|
||||
|
||||
@@ -189,7 +181,6 @@ flow(
|
||||
limitQuality,
|
||||
sendMax,
|
||||
j,
|
||||
ammContext,
|
||||
flowDebugInfo));
|
||||
}
|
||||
|
||||
@@ -207,7 +198,6 @@ flow(
|
||||
limitQuality,
|
||||
sendMax,
|
||||
j,
|
||||
ammContext,
|
||||
flowDebugInfo));
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user