mirror of
https://github.com/XRPLF/rippled.git
synced 2025-11-04 11:15:56 +00:00
Compare commits
67 Commits
vlntb/refa
...
vlntb/tagg
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a24caba617 | ||
|
|
cf415e2a8a | ||
|
|
1226c92aa6 | ||
|
|
2107b09445 | ||
|
|
7775c725f3 | ||
|
|
c61096239c | ||
|
|
c5fe970646 | ||
|
|
c57cd8b23e | ||
|
|
c14ce956ad | ||
|
|
095dc4d9cc | ||
|
|
2e255812ae | ||
|
|
896b8c3b54 | ||
|
|
58dd07bbdf | ||
|
|
b13370ac0d | ||
|
|
f847e3287c | ||
|
|
56c1e078f2 | ||
|
|
afc05659ed | ||
|
|
b04d239926 | ||
|
|
dc1caa41b2 | ||
|
|
ceb0ce5634 | ||
|
|
fb89213d4d | ||
|
|
d8628d481d | ||
|
|
a14551b151 | ||
|
|
de33a6a241 | ||
|
|
28eec6ce1b | ||
|
|
c9a723128a | ||
|
|
da82e52613 | ||
|
|
c9d73b6135 | ||
|
|
b7ed99426b | ||
|
|
97f0747e10 | ||
|
|
abf12db788 | ||
|
|
bdfc376951 | ||
|
|
b40a3684ae | ||
|
|
86ef16dbeb | ||
|
|
39b5031ab5 | ||
|
|
94decc753b | ||
|
|
991891625a | ||
|
|
69314e6832 | ||
|
|
dbeb841b5a | ||
|
|
4eae037fee | ||
|
|
b5a63b39d3 | ||
|
|
6419f9a253 | ||
|
|
31c99caa65 | ||
|
|
d835e97490 | ||
|
|
baf4b8381f | ||
|
|
9b45b6888b | ||
|
|
7179ce9c58 | ||
|
|
921aef9934 | ||
|
|
e7a7bb83c1 | ||
|
|
5c2a3a2779 | ||
|
|
b2960b9e7f | ||
|
|
5713f9782a | ||
|
|
60e340d356 | ||
|
|
80d82c5b2b | ||
|
|
433eeabfa5 | ||
|
|
faa781b71f | ||
|
|
c233df720a | ||
|
|
7ff4f79d30 | ||
|
|
60909655d3 | ||
|
|
03e46cd026 | ||
|
|
e95683a0fb | ||
|
|
13353ae36d | ||
|
|
1a40f18bdd | ||
|
|
90e6380383 | ||
|
|
8bfaa7fe0a | ||
|
|
c9135a63cd | ||
|
|
452263eaa5 |
@@ -1,5 +1,21 @@
|
||||
---
|
||||
Language: Cpp
|
||||
BreakBeforeBraces: Custom
|
||||
BraceWrapping:
|
||||
AfterClass: true
|
||||
AfterControlStatement: true
|
||||
AfterEnum: false
|
||||
AfterFunction: true
|
||||
AfterNamespace: false
|
||||
AfterObjCDeclaration: true
|
||||
AfterStruct: true
|
||||
AfterUnion: true
|
||||
BeforeCatch: true
|
||||
BeforeElse: true
|
||||
IndentBraces: false
|
||||
KeepEmptyLinesAtTheStartOfBlocks: false
|
||||
MaxEmptyLinesToKeep: 1
|
||||
---
|
||||
Language: Cpp
|
||||
AccessModifierOffset: -4
|
||||
AlignAfterOpenBracket: AlwaysBreak
|
||||
AlignConsecutiveAssignments: false
|
||||
@@ -18,56 +34,41 @@ AlwaysBreakBeforeMultilineStrings: true
|
||||
AlwaysBreakTemplateDeclarations: true
|
||||
BinPackArguments: false
|
||||
BinPackParameters: false
|
||||
BraceWrapping:
|
||||
AfterClass: true
|
||||
AfterControlStatement: true
|
||||
AfterEnum: false
|
||||
AfterFunction: true
|
||||
AfterNamespace: false
|
||||
AfterObjCDeclaration: true
|
||||
AfterStruct: true
|
||||
AfterUnion: true
|
||||
BeforeCatch: true
|
||||
BeforeElse: true
|
||||
IndentBraces: false
|
||||
BreakBeforeBinaryOperators: false
|
||||
BreakBeforeBraces: Custom
|
||||
BreakBeforeTernaryOperators: true
|
||||
BreakConstructorInitializersBeforeComma: true
|
||||
ColumnLimit: 80
|
||||
CommentPragmas: '^ IWYU pragma:'
|
||||
ColumnLimit: 80
|
||||
CommentPragmas: "^ IWYU pragma:"
|
||||
ConstructorInitializerAllOnOneLineOrOnePerLine: true
|
||||
ConstructorInitializerIndentWidth: 4
|
||||
ContinuationIndentWidth: 4
|
||||
Cpp11BracedListStyle: true
|
||||
DerivePointerAlignment: false
|
||||
DisableFormat: false
|
||||
DisableFormat: false
|
||||
ExperimentalAutoDetectBinPacking: false
|
||||
ForEachMacros: [ Q_FOREACH, BOOST_FOREACH ]
|
||||
IncludeBlocks: Regroup
|
||||
ForEachMacros: [Q_FOREACH, BOOST_FOREACH]
|
||||
IncludeBlocks: Regroup
|
||||
IncludeCategories:
|
||||
- Regex: '^<(test)/'
|
||||
Priority: 0
|
||||
- Regex: '^<(xrpld)/'
|
||||
Priority: 1
|
||||
- Regex: '^<(xrpl)/'
|
||||
Priority: 2
|
||||
- Regex: '^<(boost)/'
|
||||
Priority: 3
|
||||
- Regex: '^.*/'
|
||||
Priority: 4
|
||||
- Regex: '^.*\.h'
|
||||
Priority: 5
|
||||
- Regex: '.*'
|
||||
Priority: 6
|
||||
IncludeIsMainRegex: '$'
|
||||
- Regex: "^<(test)/"
|
||||
Priority: 0
|
||||
- Regex: "^<(xrpld)/"
|
||||
Priority: 1
|
||||
- Regex: "^<(xrpl)/"
|
||||
Priority: 2
|
||||
- Regex: "^<(boost)/"
|
||||
Priority: 3
|
||||
- Regex: "^.*/"
|
||||
Priority: 4
|
||||
- Regex: '^.*\.h'
|
||||
Priority: 5
|
||||
- Regex: ".*"
|
||||
Priority: 6
|
||||
IncludeIsMainRegex: "$"
|
||||
IndentCaseLabels: true
|
||||
IndentFunctionDeclarationAfterType: false
|
||||
IndentRequiresClause: true
|
||||
IndentWidth: 4
|
||||
IndentWidth: 4
|
||||
IndentWrappedFunctionNames: false
|
||||
KeepEmptyLinesAtTheStartOfBlocks: false
|
||||
MaxEmptyLinesToKeep: 1
|
||||
NamespaceIndentation: None
|
||||
ObjCSpaceAfterProperty: false
|
||||
ObjCSpaceBeforeProtocolList: false
|
||||
@@ -78,20 +79,25 @@ PenaltyBreakString: 1000
|
||||
PenaltyExcessCharacter: 1000000
|
||||
PenaltyReturnTypeOnItsOwnLine: 200
|
||||
PointerAlignment: Left
|
||||
ReflowComments: true
|
||||
ReflowComments: true
|
||||
RequiresClausePosition: OwnLine
|
||||
SortIncludes: true
|
||||
SortIncludes: true
|
||||
SpaceAfterCStyleCast: false
|
||||
SpaceBeforeAssignmentOperators: true
|
||||
SpaceBeforeParens: ControlStatements
|
||||
SpaceInEmptyParentheses: false
|
||||
SpacesBeforeTrailingComments: 2
|
||||
SpacesInAngles: false
|
||||
SpacesInAngles: false
|
||||
SpacesInContainerLiterals: true
|
||||
SpacesInCStyleCastParentheses: false
|
||||
SpacesInParentheses: false
|
||||
SpacesInSquareBrackets: false
|
||||
Standard: Cpp11
|
||||
TabWidth: 8
|
||||
UseTab: Never
|
||||
QualifierAlignment: Right
|
||||
Standard: Cpp11
|
||||
TabWidth: 8
|
||||
UseTab: Never
|
||||
QualifierAlignment: Right
|
||||
---
|
||||
Language: Proto
|
||||
BasedOnStyle: Google
|
||||
ColumnLimit: 0
|
||||
IndentWidth: 2
|
||||
|
||||
@@ -27,7 +27,7 @@ github_checks:
|
||||
parsers:
|
||||
cobertura:
|
||||
partials_as_hits: true
|
||||
handle_missing_conditions : true
|
||||
handle_missing_conditions: true
|
||||
|
||||
slack_app: false
|
||||
|
||||
|
||||
@@ -11,3 +11,6 @@ b9d007813378ad0ff45660dc07285b823c7e9855
|
||||
fe9a5365b8a52d4acc42eb27369247e6f238a4f9
|
||||
9a93577314e6a8d4b4a8368cc9d2b15a5d8303e8
|
||||
552377c76f55b403a1c876df873a23d780fcc81c
|
||||
97f0747e103f13e26e45b731731059b32f7679ac
|
||||
b13370ac0d207217354f1fc1c29aef87769fb8a1
|
||||
896b8c3b54a22b0497cb0d1ce95e1095f9a227ce
|
||||
|
||||
13
.github/ISSUE_TEMPLATE/bug_report.md
vendored
13
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -2,30 +2,35 @@
|
||||
name: Bug Report
|
||||
about: Create a report to help us improve rippled
|
||||
title: "[Title with short description] (Version: [rippled version])"
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
labels: ""
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
<!-- Please search existing issues to avoid creating duplicates.-->
|
||||
|
||||
## Issue Description
|
||||
|
||||
<!--Provide a summary for your issue/bug.-->
|
||||
|
||||
## Steps to Reproduce
|
||||
|
||||
<!--List in detail the exact steps to reproduce the unexpected behavior of the software.-->
|
||||
|
||||
## Expected Result
|
||||
|
||||
<!--Explain in detail what behavior you expected to happen.-->
|
||||
|
||||
## Actual Result
|
||||
|
||||
<!--Explain in detail what behavior actually happened.-->
|
||||
|
||||
## Environment
|
||||
|
||||
<!--Please describe your environment setup (such as Ubuntu 18.04 with Boost 1.70).-->
|
||||
<!-- If you are using a formal release, please use the version returned by './rippled --version' as the version number-->
|
||||
<!-- If you are working off of develop, please add the git hash via 'git rev-parse HEAD'-->
|
||||
|
||||
## Supporting Files
|
||||
|
||||
<!--If you have supporting files such as a log, feel free to post a link here using Github Gist.-->
|
||||
<!--Consider adding configuration files with private information removed via Github Gist. -->
|
||||
|
||||
|
||||
8
.github/ISSUE_TEMPLATE/feature_request.md
vendored
8
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -3,19 +3,23 @@ name: Feature Request
|
||||
about: Suggest a new feature for the rippled project
|
||||
title: "[Title with short description] (Version: [rippled version])"
|
||||
labels: Feature Request
|
||||
assignees: ''
|
||||
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
<!-- Please search existing issues to avoid creating duplicates.-->
|
||||
|
||||
## Summary
|
||||
|
||||
<!-- Provide a summary to the feature request-->
|
||||
|
||||
## Motivation
|
||||
|
||||
<!-- Why do we need this feature?-->
|
||||
|
||||
## Solution
|
||||
|
||||
<!-- What is the solution?-->
|
||||
|
||||
## Paths Not Taken
|
||||
|
||||
<!-- What other alternatives have been considered?-->
|
||||
|
||||
62
.github/actions/build-deps/action.yml
vendored
Normal file
62
.github/actions/build-deps/action.yml
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
# This action installs and optionally uploads Conan dependencies to a remote
|
||||
# repository. The dependencies will only be uploaded if the credentials are
|
||||
# provided.
|
||||
name: Build Conan dependencies
|
||||
|
||||
# Note that actions do not support 'type' and all inputs are strings, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
conan_remote_name:
|
||||
description: "The name of the Conan remote to use."
|
||||
required: true
|
||||
conan_remote_url:
|
||||
description: "The URL of the Conan endpoint to use."
|
||||
required: true
|
||||
conan_remote_username:
|
||||
description: "The username for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||
required: false
|
||||
default: ""
|
||||
conan_remote_password:
|
||||
description: "The password for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||
required: false
|
||||
default: ""
|
||||
force_build:
|
||||
description: 'Force building of all dependencies ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
force_upload:
|
||||
description: 'Force uploading of all dependencies ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install Conan dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Installing dependencies.'
|
||||
mkdir -p ${{ inputs.build_dir }}
|
||||
cd ${{ inputs.build_dir }}
|
||||
conan install \
|
||||
--output-folder . \
|
||||
--build ${{ inputs.force_build == 'true' && '"*"' || 'missing' }} \
|
||||
--options:host '&:tests=True' \
|
||||
--options:host '&:xrpld=True' \
|
||||
--settings:all build_type=${{ inputs.build_type }} \
|
||||
--format=json ..
|
||||
- name: Upload Conan dependencies
|
||||
if: ${{ inputs.conan_remote_username != '' && inputs.conan_remote_password != '' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo "Logging into Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
|
||||
conan remote login ${{ inputs.conan_remote_name }} "${{ inputs.conan_remote_username }}" --password "${{ inputs.conan_remote_password }}"
|
||||
echo 'Uploading dependencies.'
|
||||
conan upload '*' --confirm --check ${{ inputs.force_upload == 'true' && '--force' || '' }} --remote=${{ inputs.conan_remote_name }}
|
||||
95
.github/actions/build-test/action.yml
vendored
Normal file
95
.github/actions/build-test/action.yml
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
# This action build and tests the binary. The Conan dependencies must have
|
||||
# already been installed (see the build-deps action).
|
||||
name: Build and Test
|
||||
|
||||
# Note that actions do not support 'type' and all inputs are strings, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
build_only:
|
||||
description: 'Whether to only build or to build and test the code ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: false
|
||||
default: ""
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
required: true
|
||||
codecov_token:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: false
|
||||
default: ""
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Configure CMake
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Configuring CMake.'
|
||||
cmake \
|
||||
-G '${{ inputs.os == 'windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.build_type }} \
|
||||
${{ inputs.cmake_args }} \
|
||||
..
|
||||
- name: Build the binary
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Building binary.'
|
||||
cmake \
|
||||
--build . \
|
||||
--config ${{ inputs.build_type }} \
|
||||
--parallel $(nproc) \
|
||||
--target ${{ inputs.cmake_target }}
|
||||
- name: Check linking
|
||||
if: ${{ inputs.os == 'linux' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Checking linking.'
|
||||
ldd ./rippled
|
||||
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
|
||||
echo 'The binary is statically linked.'
|
||||
else
|
||||
echo 'The binary is dynamically linked.'
|
||||
exit 1
|
||||
fi
|
||||
- name: Verify voidstar
|
||||
if: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Verifying presence of instrumentation.'
|
||||
./rippled --version | grep libvoidstar
|
||||
- name: Test the binary
|
||||
if: ${{ inputs.build_only == 'false' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}/${{ inputs.os == 'windows' && inputs.build_type || '' }}
|
||||
run: |
|
||||
echo 'Testing binary.'
|
||||
./rippled --unittest --unittest-jobs $(nproc)
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
- name: Upload coverage report
|
||||
if: ${{ inputs.cmake_target == 'coverage' }}
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
with:
|
||||
disable_search: true
|
||||
disable_telem: true
|
||||
fail_ci_if_error: true
|
||||
files: ${{ inputs.build_dir }}/coverage.xml
|
||||
plugins: noop
|
||||
token: ${{ inputs.codecov_token }}
|
||||
verbose: true
|
||||
34
.github/actions/build/action.yml
vendored
34
.github/actions/build/action.yml
vendored
@@ -1,34 +0,0 @@
|
||||
name: build
|
||||
inputs:
|
||||
generator:
|
||||
default: null
|
||||
configuration:
|
||||
required: true
|
||||
cmake-args:
|
||||
default: null
|
||||
cmake-target:
|
||||
default: all
|
||||
# An implicit input is the environment variable `build_dir`.
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: configure
|
||||
shell: bash
|
||||
run: |
|
||||
cd ${build_dir}
|
||||
cmake \
|
||||
${{ inputs.generator && format('-G "{0}"', inputs.generator) || '' }} \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }} \
|
||||
-Dtests=TRUE \
|
||||
-Dxrpld=TRUE \
|
||||
${{ inputs.cmake-args }} \
|
||||
..
|
||||
- name: build
|
||||
shell: bash
|
||||
run: |
|
||||
cmake \
|
||||
--build ${build_dir} \
|
||||
--config ${{ inputs.configuration }} \
|
||||
--parallel ${NUM_PROCESSORS:-$(nproc)} \
|
||||
--target ${{ inputs.cmake-target }}
|
||||
57
.github/actions/dependencies/action.yml
vendored
57
.github/actions/dependencies/action.yml
vendored
@@ -1,57 +0,0 @@
|
||||
name: dependencies
|
||||
inputs:
|
||||
configuration:
|
||||
required: true
|
||||
# An implicit input is the environment variable `build_dir`.
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: unlock Conan
|
||||
shell: bash
|
||||
run: conan remove --locks
|
||||
- name: export custom recipes
|
||||
shell: bash
|
||||
run: |
|
||||
conan config set general.revisions_enabled=1
|
||||
conan export external/snappy snappy/1.1.10@
|
||||
conan export external/rocksdb rocksdb/9.7.3@
|
||||
conan export external/soci soci/4.0.3@
|
||||
conan export external/nudb nudb/2.0.8@
|
||||
- name: add Ripple Conan remote
|
||||
shell: bash
|
||||
run: |
|
||||
conan remote list
|
||||
conan remote remove ripple || true
|
||||
# Do not quote the URL. An empty string will be accepted (with
|
||||
# a non-fatal warning), but a missing argument will not.
|
||||
conan remote add ripple ${{ env.CONAN_URL }} --insert 0
|
||||
- name: try to authenticate to Ripple Conan remote
|
||||
id: remote
|
||||
shell: bash
|
||||
run: |
|
||||
# `conan user` implicitly uses the environment variables
|
||||
# CONAN_LOGIN_USERNAME_<REMOTE> and CONAN_PASSWORD_<REMOTE>.
|
||||
# https://docs.conan.io/1/reference/commands/misc/user.html#using-environment-variables
|
||||
# https://docs.conan.io/1/reference/env_vars.html#conan-login-username-conan-login-username-remote-name
|
||||
# https://docs.conan.io/1/reference/env_vars.html#conan-password-conan-password-remote-name
|
||||
echo outcome=$(conan user --remote ripple --password >&2 \
|
||||
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
|
||||
- name: list missing binaries
|
||||
id: binaries
|
||||
shell: bash
|
||||
# Print the list of dependencies that would need to be built locally.
|
||||
# A non-empty list means we have "failed" to cache binaries remotely.
|
||||
run: |
|
||||
echo missing=$(conan info . --build missing --settings build_type=${{ inputs.configuration }} --json 2>/dev/null | grep '^\[') | tee ${GITHUB_OUTPUT}
|
||||
- name: install dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ${build_dir}
|
||||
cd ${build_dir}
|
||||
conan install \
|
||||
--output-folder . \
|
||||
--build missing \
|
||||
--options tests=True \
|
||||
--options xrpld=True \
|
||||
--settings build_type=${{ inputs.configuration }} \
|
||||
..
|
||||
@@ -25,32 +25,32 @@ more dependencies listed later.
|
||||
**tl;dr:** The modules listed first are more independent than the modules
|
||||
listed later.
|
||||
|
||||
| Level / Tier | Module(s) |
|
||||
|--------------|-----------------------------------------------|
|
||||
| 01 | ripple/beast ripple/unity
|
||||
| 02 | ripple/basics
|
||||
| 03 | ripple/json ripple/crypto
|
||||
| 04 | ripple/protocol
|
||||
| 05 | ripple/core ripple/conditions ripple/consensus ripple/resource ripple/server
|
||||
| 06 | ripple/peerfinder ripple/ledger ripple/nodestore ripple/net
|
||||
| 07 | ripple/shamap ripple/overlay
|
||||
| 08 | ripple/app
|
||||
| 09 | ripple/rpc
|
||||
| 10 | ripple/perflog
|
||||
| 11 | test/jtx test/beast test/csf
|
||||
| 12 | test/unit_test
|
||||
| 13 | test/crypto test/conditions test/json test/resource test/shamap test/peerfinder test/basics test/overlay
|
||||
| 14 | test
|
||||
| 15 | test/net test/protocol test/ledger test/consensus test/core test/server test/nodestore
|
||||
| 16 | test/rpc test/app
|
||||
| Level / Tier | Module(s) |
|
||||
| ------------ | -------------------------------------------------------------------------------------------------------- |
|
||||
| 01 | ripple/beast ripple/unity |
|
||||
| 02 | ripple/basics |
|
||||
| 03 | ripple/json ripple/crypto |
|
||||
| 04 | ripple/protocol |
|
||||
| 05 | ripple/core ripple/conditions ripple/consensus ripple/resource ripple/server |
|
||||
| 06 | ripple/peerfinder ripple/ledger ripple/nodestore ripple/net |
|
||||
| 07 | ripple/shamap ripple/overlay |
|
||||
| 08 | ripple/app |
|
||||
| 09 | ripple/rpc |
|
||||
| 10 | ripple/perflog |
|
||||
| 11 | test/jtx test/beast test/csf |
|
||||
| 12 | test/unit_test |
|
||||
| 13 | test/crypto test/conditions test/json test/resource test/shamap test/peerfinder test/basics test/overlay |
|
||||
| 14 | test |
|
||||
| 15 | test/net test/protocol test/ledger test/consensus test/core test/server test/nodestore |
|
||||
| 16 | test/rpc test/app |
|
||||
|
||||
(Note that `test` levelization is *much* less important and *much* less
|
||||
(Note that `test` levelization is _much_ less important and _much_ less
|
||||
strictly enforced than `ripple` levelization, other than the requirement
|
||||
that `test` code should *never* be included in `ripple` code.)
|
||||
that `test` code should _never_ be included in `ripple` code.)
|
||||
|
||||
## Validation
|
||||
|
||||
The [levelization.sh](levelization.sh) script takes no parameters,
|
||||
The [levelization](generate.sh) script takes no parameters,
|
||||
reads no environment variables, and can be run from any directory,
|
||||
as long as it is in the expected location in the rippled repo.
|
||||
It can be run at any time from within a checked out repo, and will
|
||||
@@ -59,48 +59,48 @@ the rippled source. The only caveat is that it runs much slower
|
||||
under Windows than in Linux. It hasn't yet been tested under MacOS.
|
||||
It generates many files of [results](results):
|
||||
|
||||
* `rawincludes.txt`: The raw dump of the `#includes`
|
||||
* `paths.txt`: A second dump grouping the source module
|
||||
- `rawincludes.txt`: The raw dump of the `#includes`
|
||||
- `paths.txt`: A second dump grouping the source module
|
||||
to the destination module, deduped, and with frequency counts.
|
||||
* `includes/`: A directory where each file represents a module and
|
||||
- `includes/`: A directory where each file represents a module and
|
||||
contains a list of modules and counts that the module _includes_.
|
||||
* `includedby/`: Similar to `includes/`, but the other way around. Each
|
||||
- `includedby/`: Similar to `includes/`, but the other way around. Each
|
||||
file represents a module and contains a list of modules and counts
|
||||
that _include_ the module.
|
||||
* [`loops.txt`](results/loops.txt): A list of direct loops detected
|
||||
- [`loops.txt`](results/loops.txt): A list of direct loops detected
|
||||
between modules as they actually exist, as opposed to how they are
|
||||
desired as described above. In a perfect repo, this file will be
|
||||
empty.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../.github/workflows/levelization.yml) to validate
|
||||
Github workflow](../../workflows/check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
* [`ordering.txt`](results/ordering.txt): A list showing relationships
|
||||
- [`ordering.txt`](results/ordering.txt): A list showing relationships
|
||||
between modules where there are no loops as they actually exist, as
|
||||
opposed to how they are desired as described above.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../.github/workflows/levelization.yml) to validate
|
||||
Github workflow](../../workflows/check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
* [`levelization.yml`](../../.github/workflows/levelization.yml)
|
||||
- [`levelization.yml`](../../workflows/check-levelization.yml)
|
||||
Github Actions workflow to test that levelization loops haven't
|
||||
changed. Unfortunately, if changes are detected, it can't tell if
|
||||
changed. Unfortunately, if changes are detected, it can't tell if
|
||||
they are improvements or not, so if you have resolved any issues or
|
||||
done anything else to improve levelization, run `levelization.sh`,
|
||||
and commit the updated results.
|
||||
|
||||
The `loops.txt` and `ordering.txt` files relate the modules
|
||||
The `loops.txt` and `ordering.txt` files relate the modules
|
||||
using comparison signs, which indicate the number of times each
|
||||
module is included in the other.
|
||||
|
||||
* `A > B` means that A should probably be at a higher level than B,
|
||||
- `A > B` means that A should probably be at a higher level than B,
|
||||
because B is included in A significantly more than A is included in B.
|
||||
These results can be included in both `loops.txt` and `ordering.txt`.
|
||||
Because `ordering.txt`only includes relationships where B is not
|
||||
included in A at all, it will only include these types of results.
|
||||
* `A ~= B` means that A and B are included in each other a different
|
||||
- `A ~= B` means that A and B are included in each other a different
|
||||
number of times, but the values are so close that the script can't
|
||||
definitively say that one should be above the other. These results
|
||||
will only be included in `loops.txt`.
|
||||
* `A == B` means that A and B include each other the same number of
|
||||
- `A == B` means that A and B include each other the same number of
|
||||
times, so the script has no clue which should be higher. These results
|
||||
will only be included in `loops.txt`.
|
||||
|
||||
@@ -110,5 +110,5 @@ get those details locally.
|
||||
|
||||
1. Run `levelization.sh`
|
||||
2. Grep the modules in `paths.txt`.
|
||||
* For example, if a cycle is found `A ~= B`, simply `grep -w
|
||||
A Builds/levelization/results/paths.txt | grep -w B`
|
||||
- For example, if a cycle is found `A ~= B`, simply `grep -w
|
||||
A .github/scripts/levelization/results/paths.txt | grep -w B`
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Usage: levelization.sh
|
||||
# Usage: generate.sh
|
||||
# This script takes no parameters, reads no environment variables,
|
||||
# and can be run from any directory, as long as it is in the expected
|
||||
# location in the repo.
|
||||
@@ -19,7 +19,7 @@ export LANG=C
|
||||
rm -rfv results
|
||||
mkdir results
|
||||
includes="$( pwd )/results/rawincludes.txt"
|
||||
pushd ../..
|
||||
pushd ../../..
|
||||
echo Raw includes:
|
||||
grep -r '^[ ]*#include.*/.*\.h' include src | \
|
||||
grep -v boost | tee ${includes}
|
||||
@@ -10,9 +10,6 @@ Loop: xrpld.app xrpld.core
|
||||
Loop: xrpld.app xrpld.ledger
|
||||
xrpld.app > xrpld.ledger
|
||||
|
||||
Loop: xrpld.app xrpld.net
|
||||
xrpld.app > xrpld.net
|
||||
|
||||
Loop: xrpld.app xrpld.overlay
|
||||
xrpld.overlay > xrpld.app
|
||||
|
||||
@@ -25,15 +22,9 @@ Loop: xrpld.app xrpld.rpc
|
||||
Loop: xrpld.app xrpld.shamap
|
||||
xrpld.app > xrpld.shamap
|
||||
|
||||
Loop: xrpld.core xrpld.net
|
||||
xrpld.net > xrpld.core
|
||||
|
||||
Loop: xrpld.core xrpld.perflog
|
||||
xrpld.perflog == xrpld.core
|
||||
|
||||
Loop: xrpld.net xrpld.rpc
|
||||
xrpld.rpc ~= xrpld.net
|
||||
|
||||
Loop: xrpld.overlay xrpld.rpc
|
||||
xrpld.rpc ~= xrpld.overlay
|
||||
|
||||
@@ -2,6 +2,8 @@ libxrpl.basics > xrpl.basics
|
||||
libxrpl.crypto > xrpl.basics
|
||||
libxrpl.json > xrpl.basics
|
||||
libxrpl.json > xrpl.json
|
||||
libxrpl.net > xrpl.basics
|
||||
libxrpl.net > xrpl.net
|
||||
libxrpl.protocol > xrpl.basics
|
||||
libxrpl.protocol > xrpl.json
|
||||
libxrpl.protocol > xrpl.protocol
|
||||
@@ -62,9 +64,9 @@ test.jtx > xrpl.basics
|
||||
test.jtx > xrpld.app
|
||||
test.jtx > xrpld.core
|
||||
test.jtx > xrpld.ledger
|
||||
test.jtx > xrpld.net
|
||||
test.jtx > xrpld.rpc
|
||||
test.jtx > xrpl.json
|
||||
test.jtx > xrpl.net
|
||||
test.jtx > xrpl.protocol
|
||||
test.jtx > xrpl.resource
|
||||
test.jtx > xrpl.server
|
||||
@@ -109,7 +111,6 @@ test.rpc > test.toplevel
|
||||
test.rpc > xrpl.basics
|
||||
test.rpc > xrpld.app
|
||||
test.rpc > xrpld.core
|
||||
test.rpc > xrpld.net
|
||||
test.rpc > xrpld.overlay
|
||||
test.rpc > xrpld.rpc
|
||||
test.rpc > xrpl.json
|
||||
@@ -134,6 +135,7 @@ test.toplevel > xrpl.json
|
||||
test.unit_test > xrpl.basics
|
||||
tests.libxrpl > xrpl.basics
|
||||
xrpl.json > xrpl.basics
|
||||
xrpl.net > xrpl.basics
|
||||
xrpl.protocol > xrpl.basics
|
||||
xrpl.protocol > xrpl.json
|
||||
xrpl.resource > xrpl.basics
|
||||
@@ -149,6 +151,7 @@ xrpld.app > xrpld.consensus
|
||||
xrpld.app > xrpld.nodestore
|
||||
xrpld.app > xrpld.perflog
|
||||
xrpld.app > xrpl.json
|
||||
xrpld.app > xrpl.net
|
||||
xrpld.app > xrpl.protocol
|
||||
xrpld.app > xrpl.resource
|
||||
xrpld.conditions > xrpl.basics
|
||||
@@ -158,14 +161,11 @@ xrpld.consensus > xrpl.json
|
||||
xrpld.consensus > xrpl.protocol
|
||||
xrpld.core > xrpl.basics
|
||||
xrpld.core > xrpl.json
|
||||
xrpld.core > xrpl.net
|
||||
xrpld.core > xrpl.protocol
|
||||
xrpld.ledger > xrpl.basics
|
||||
xrpld.ledger > xrpl.json
|
||||
xrpld.ledger > xrpl.protocol
|
||||
xrpld.net > xrpl.basics
|
||||
xrpld.net > xrpl.json
|
||||
xrpld.net > xrpl.protocol
|
||||
xrpld.net > xrpl.resource
|
||||
xrpld.nodestore > xrpl.basics
|
||||
xrpld.nodestore > xrpld.core
|
||||
xrpld.nodestore > xrpld.unity
|
||||
@@ -189,6 +189,7 @@ xrpld.rpc > xrpld.core
|
||||
xrpld.rpc > xrpld.ledger
|
||||
xrpld.rpc > xrpld.nodestore
|
||||
xrpld.rpc > xrpl.json
|
||||
xrpld.rpc > xrpl.net
|
||||
xrpld.rpc > xrpl.protocol
|
||||
xrpld.rpc > xrpl.resource
|
||||
xrpld.rpc > xrpl.server
|
||||
174
.github/scripts/strategy-matrix/generate.py
vendored
Normal file
174
.github/scripts/strategy-matrix/generate.py
vendored
Normal file
@@ -0,0 +1,174 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import itertools
|
||||
import json
|
||||
import re
|
||||
|
||||
'''
|
||||
Generate a strategy matrix for GitHub Actions CI.
|
||||
|
||||
On each PR commit we will build a selection of Debian, RHEL, Ubuntu, MacOS, and
|
||||
Windows configurations, while upon merge into the develop, release, or master
|
||||
branches, we will build all configurations, and test most of them.
|
||||
|
||||
We will further set additional CMake arguments as follows:
|
||||
- All builds will have the `tests`, `werr`, and `xrpld` options.
|
||||
- All builds will have the `wextra` option except for GCC 12 and Clang 16.
|
||||
- All release builds will have the `assert` option.
|
||||
- Certain Debian Bookworm configurations will change the reference fee, enable
|
||||
codecov, and enable voidstar in PRs.
|
||||
'''
|
||||
def generate_strategy_matrix(all: bool, architecture: list[dict], os: list[dict], build_type: list[str], cmake_args: list[str]) -> dict:
|
||||
configurations = []
|
||||
for architecture, os, build_type, cmake_args in itertools.product(architecture, os, build_type, cmake_args):
|
||||
# The default CMake target is 'all' for Linux and MacOS and 'install'
|
||||
# for Windows, but it can get overridden for certain configurations.
|
||||
cmake_target = 'install' if os["distro_name"] == 'windows' else 'all'
|
||||
|
||||
# We build and test all configurations by default, except for Windows in
|
||||
# Debug, because it is too slow, as well as when code coverage is
|
||||
# enabled as that mode already runs the tests.
|
||||
build_only = False
|
||||
if os['distro_name'] == 'windows' and build_type == 'Debug':
|
||||
build_only = True
|
||||
|
||||
# Only generate a subset of configurations in PRs.
|
||||
if not all:
|
||||
# Debian:
|
||||
# - Bookworm using GCC 13: Release and Unity on linux/arm64, set
|
||||
# the reference fee to 500.
|
||||
# - Bookworm using GCC 15: Debug and no Unity on linux/amd64, enable
|
||||
# code coverage.
|
||||
# - Bookworm using Clang 16: Debug and no Unity on linux/arm64,
|
||||
# enable voidstar.
|
||||
# - Bookworm using Clang 17: Release and no Unity on linux/amd64,
|
||||
# set the reference fee to 1000.
|
||||
# - Bookworm using Clang 20: Debug and Unity on linux/amd64.
|
||||
if os['distro_name'] == 'debian':
|
||||
skip = True
|
||||
if os['distro_version'] == 'bookworm':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-13' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/arm64':
|
||||
cmake_args = f'-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}'
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-15' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
cmake_args = f'-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0 {cmake_args}'
|
||||
cmake_target = 'coverage'
|
||||
build_only = True
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-16' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/arm64':
|
||||
cmake_args = f'-Dvoidstar=ON {cmake_args}'
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-17' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
cmake_args = f'-DUNIT_TEST_REFERENCE_FEE=1000 {cmake_args}'
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-20' and build_type == 'Debug' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# RHEL:
|
||||
# - 9.4 using GCC 12: Debug and Unity on linux/amd64.
|
||||
# - 9.6 using Clang: Release and no Unity on linux/amd64.
|
||||
if os['distro_name'] == 'rhel':
|
||||
skip = True
|
||||
if os['distro_version'] == '9.4':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-12' and build_type == 'Debug' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
elif os['distro_version'] == '9.6':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-any' and build_type == 'Release' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# Ubuntu:
|
||||
# - Jammy using GCC 12: Debug and no Unity on linux/arm64.
|
||||
# - Noble using GCC 14: Release and Unity on linux/amd64.
|
||||
# - Noble using Clang 18: Debug and no Unity on linux/amd64.
|
||||
# - Noble using Clang 19: Release and Unity on linux/arm64.
|
||||
if os['distro_name'] == 'ubuntu':
|
||||
skip = True
|
||||
if os['distro_version'] == 'jammy':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-12' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/arm64':
|
||||
skip = False
|
||||
elif os['distro_version'] == 'noble':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-14' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-18' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-19' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/arm64':
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# MacOS:
|
||||
# - Debug and no Unity on macos/arm64.
|
||||
if os['distro_name'] == 'macos' and not (build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'macos/arm64'):
|
||||
continue
|
||||
|
||||
# Windows:
|
||||
# - Release and Unity on windows/amd64.
|
||||
if os['distro_name'] == 'windows' and not (build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'windows/amd64'):
|
||||
continue
|
||||
|
||||
|
||||
# Additional CMake arguments.
|
||||
cmake_args = f'{cmake_args} -Dtests=ON -Dwerr=ON -Dxrpld=ON'
|
||||
if not f'{os['compiler_name']}-{os['compiler_version']}' in ['gcc-12', 'clang-16']:
|
||||
cmake_args = f'{cmake_args} -Dwextra=ON'
|
||||
if build_type == 'Release':
|
||||
cmake_args = f'{cmake_args} -Dassert=ON'
|
||||
|
||||
# We skip all RHEL on arm64 due to a build failure that needs further
|
||||
# investigation.
|
||||
if os['distro_name'] == 'rhel' and architecture['platform'] == 'linux/arm64':
|
||||
continue
|
||||
|
||||
# We skip all clang-20 on arm64 due to boost 1.86 build error
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-20' and architecture['platform'] == 'linux/arm64':
|
||||
continue
|
||||
|
||||
# Generate a unique name for the configuration, e.g. macos-arm64-debug
|
||||
# or debian-bookworm-gcc-12-amd64-release-unity.
|
||||
config_name = os['distro_name']
|
||||
if (n := os['distro_version']) != '':
|
||||
config_name += f'-{n}'
|
||||
if (n := os['compiler_name']) != '':
|
||||
config_name += f'-{n}'
|
||||
if (n := os['compiler_version']) != '':
|
||||
config_name += f'-{n}'
|
||||
config_name += f'-{architecture['platform'][architecture['platform'].find('/')+1:]}'
|
||||
config_name += f'-{build_type.lower()}'
|
||||
if '-Dunity=ON' in cmake_args:
|
||||
config_name += '-unity'
|
||||
|
||||
# Add the configuration to the list, with the most unique fields first,
|
||||
# so that they are easier to identify in the GitHub Actions UI, as long
|
||||
# names get truncated.
|
||||
configurations.append({
|
||||
'config_name': config_name,
|
||||
'cmake_args': cmake_args,
|
||||
'cmake_target': cmake_target,
|
||||
'build_only': 'true' if build_only else 'false',
|
||||
'build_type': build_type,
|
||||
'os': os,
|
||||
'architecture': architecture,
|
||||
})
|
||||
|
||||
return {'include': configurations}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-a', '--all', help='Set to generate all configurations (generally used when merging a PR) or leave unset to generate a subset of configurations (generally used when committing to a PR).', action="store_true")
|
||||
parser.add_argument('-c', '--config', help='Path to the JSON file containing the strategy matrix configurations.', required=True, type=str)
|
||||
args = parser.parse_args()
|
||||
|
||||
# Load the JSON configuration file.
|
||||
config = None
|
||||
with open(args.config, 'r') as f:
|
||||
config = json.load(f)
|
||||
if config['architecture'] is None or config['os'] is None or config['build_type'] is None or config['cmake_args'] is None:
|
||||
raise Exception('Invalid configuration file.')
|
||||
|
||||
# Generate the strategy matrix.
|
||||
print(f'matrix={json.dumps(generate_strategy_matrix(args.all, config['architecture'], config['os'], config['build_type'], config['cmake_args']))}')
|
||||
154
.github/scripts/strategy-matrix/linux.json
vendored
Normal file
154
.github/scripts/strategy-matrix/linux.json
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
{
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "linux/amd64",
|
||||
"runner": ["self-hosted", "Linux", "X64", "heavy"]
|
||||
},
|
||||
{
|
||||
"platform": "linux/arm64",
|
||||
"runner": ["self-hosted", "Linux", "ARM64", "heavy-arm64"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "15"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "20"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "jammy",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19"
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
|
||||
}
|
||||
21
.github/scripts/strategy-matrix/macos.json
vendored
Normal file
21
.github/scripts/strategy-matrix/macos.json
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "macos/arm64",
|
||||
"runner": ["self-hosted", "macOS", "ARM64", "mac-runner-m1"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
{
|
||||
"distro_name": "macos",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": [
|
||||
"-Dunity=OFF -DCMAKE_POLICY_VERSION_MINIMUM=3.5",
|
||||
"-Dunity=ON -DCMAKE_POLICY_VERSION_MINIMUM=3.5"
|
||||
]
|
||||
}
|
||||
18
.github/scripts/strategy-matrix/windows.json
vendored
Normal file
18
.github/scripts/strategy-matrix/windows.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "windows/amd64",
|
||||
"runner": ["windows-latest"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
{
|
||||
"distro_name": "windows",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
|
||||
}
|
||||
219
.github/workflows/build-test.yml
vendored
Normal file
219
.github/workflows/build-test.yml
vendored
Normal file
@@ -0,0 +1,219 @@
|
||||
# This workflow builds and tests the binary for various configurations.
|
||||
name: Build and test
|
||||
|
||||
# This workflow can only be triggered by other workflows. Note that the
|
||||
# workflow_call event does not support the 'choice' input type, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
|
||||
# so we use 'string' instead.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: false
|
||||
type: string
|
||||
default: ".build"
|
||||
conan_remote_name:
|
||||
description: "The name of the Conan remote to use."
|
||||
required: true
|
||||
type: string
|
||||
conan_remote_url:
|
||||
description: "The URL of the Conan endpoint to use."
|
||||
required: true
|
||||
type: string
|
||||
dependencies_force_build:
|
||||
description: "Force building of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
dependencies_force_upload:
|
||||
description: "Force uploading of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
type: string
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
secrets:
|
||||
codecov_token:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: false
|
||||
conan_remote_username:
|
||||
description: "The username for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||
required: false
|
||||
conan_remote_password:
|
||||
description: "The password for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||
required: false
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ inputs.os }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: 3.13
|
||||
- name: Generate strategy matrix
|
||||
working-directory: .github/scripts/strategy-matrix
|
||||
id: generate
|
||||
run: python generate.py ${{ inputs.strategy_matrix == 'all' && '--all' || '' }} --config=${{ inputs.os }}.json >> "${GITHUB_OUTPUT}"
|
||||
outputs:
|
||||
matrix: ${{ steps.generate.outputs.matrix }}
|
||||
|
||||
# Build and test the binary.
|
||||
build-test:
|
||||
needs:
|
||||
- generate-matrix
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ${{ inputs.os == 'linux' && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version) || null }}
|
||||
steps:
|
||||
- name: Check strategy matrix
|
||||
run: |
|
||||
echo 'Operating system distro name: ${{ matrix.os.distro_name }}'
|
||||
echo 'Operating system distro version: ${{ matrix.os.distro_version }}'
|
||||
echo 'Operating system compiler name: ${{ matrix.os.compiler_name }}'
|
||||
echo 'Operating system compiler version: ${{ matrix.os.compiler_version }}'
|
||||
echo 'Architecture platform: ${{ matrix.architecture.platform }}'
|
||||
echo 'Architecture runner: ${{ toJson(matrix.architecture.runner) }}'
|
||||
echo 'Build type: ${{ matrix.build_type }}'
|
||||
echo 'Build only: ${{ matrix.build_only }}'
|
||||
echo 'CMake arguments: ${{ matrix.cmake_args }}'
|
||||
echo 'CMake target: ${{ matrix.cmake_target }}'
|
||||
echo 'Config name: ${{ matrix.config_name }}'
|
||||
- name: Clean workspace (MacOS)
|
||||
if: ${{ inputs.os == 'macos' }}
|
||||
run: |
|
||||
WORKSPACE=${{ github.workspace }}
|
||||
echo "Cleaning workspace '${WORKSPACE}'."
|
||||
if [ -z "${WORKSPACE}" ] || [ "${WORKSPACE}" = "/" ]; then
|
||||
echo "Invalid working directory '${WORKSPACE}'."
|
||||
exit 1
|
||||
fi
|
||||
find "${WORKSPACE}" -depth 1 | xargs rm -rfv
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Set up Python (Windows)
|
||||
if: ${{ inputs.os == 'windows' }}
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: 3.13
|
||||
- name: Install build tools (Windows)
|
||||
if: ${{ inputs.os == 'windows' }}
|
||||
run: |
|
||||
echo 'Installing build tools.'
|
||||
pip install wheel conan
|
||||
- name: Check configuration (Windows)
|
||||
if: ${{ inputs.os == 'windows' }}
|
||||
run: |
|
||||
echo 'Checking environment variables.'
|
||||
set
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
- name: Install build tools (MacOS)
|
||||
if: ${{ inputs.os == 'macos' }}
|
||||
run: |
|
||||
echo 'Installing build tools.'
|
||||
brew install --quiet cmake conan ninja coreutils
|
||||
- name: Check configuration (Linux and MacOS)
|
||||
if: ${{ inputs.os == 'linux' || inputs.os == 'macos' }}
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking compiler version.'
|
||||
${{ inputs.os == 'linux' && '${CC}' || 'clang' }} --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
echo 'Checking Ninja version.'
|
||||
ninja --version
|
||||
|
||||
echo 'Checking nproc version.'
|
||||
nproc --version
|
||||
- name: Set up Conan home directory (MacOS)
|
||||
if: ${{ inputs.os == 'macos' }}
|
||||
run: |
|
||||
echo 'Setting up Conan home directory.'
|
||||
export CONAN_HOME=${{ github.workspace }}/.conan
|
||||
mkdir -p ${CONAN_HOME}
|
||||
- name: Set up Conan home directory (Windows)
|
||||
if: ${{ inputs.os == 'windows' }}
|
||||
run: |
|
||||
echo 'Setting up Conan home directory.'
|
||||
set CONAN_HOME=${{ github.workspace }}\.conan
|
||||
mkdir -p %CONAN_HOME%
|
||||
- name: Set up Conan configuration
|
||||
run: |
|
||||
echo 'Installing configuration.'
|
||||
cat conan/global.conf ${{ inputs.os == 'linux' && '>>' || '>' }} $(conan config home)/global.conf
|
||||
|
||||
echo 'Conan configuration:'
|
||||
conan config show '*'
|
||||
- name: Set up Conan profile
|
||||
run: |
|
||||
echo 'Installing profile.'
|
||||
conan config install conan/profiles/default -tf $(conan config home)/profiles/
|
||||
|
||||
echo 'Conan profile:'
|
||||
conan profile show
|
||||
- name: Set up Conan remote
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Adding Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
|
||||
conan remote add --index 0 --force ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
|
||||
|
||||
echo 'Listing Conan remotes.'
|
||||
conan remote list
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
conan_remote_username: ${{ secrets.conan_remote_username }}
|
||||
conan_remote_password: ${{ secrets.conan_remote_password }}
|
||||
force_build: ${{ inputs.dependencies_force_build }}
|
||||
force_upload: ${{ inputs.dependencies_force_upload }}
|
||||
- name: Build and test binary
|
||||
uses: ./.github/actions/build-test
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_only: ${{ matrix.build_only }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_target: ${{ matrix.cmake_target }}
|
||||
codecov_token: ${{ secrets.codecov_token }}
|
||||
os: ${{ inputs.os }}
|
||||
75
.github/workflows/check-format.yml
vendored
Normal file
75
.github/workflows/check-format.yml
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
# This workflow checks if the code is properly formatted.
|
||||
name: Check format
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on: workflow_call
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-format
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
pre-commit:
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-pre-commit
|
||||
steps:
|
||||
# The $GITHUB_WORKSPACE and ${{ github.workspace }} might not point to the
|
||||
# same directory for jobs running in containers. The actions/checkout step
|
||||
# is *supposed* to checkout into $GITHUB_WORKSPACE and then add it to
|
||||
# safe.directory (see instructions at https://github.com/actions/checkout)
|
||||
# but that is apparently not happening for some container images. We
|
||||
# therefore preemptively add both directories to safe.directory. See also
|
||||
# https://github.com/actions/runner/issues/2058 for more details.
|
||||
- name: Configure git safe.directory
|
||||
run: |
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE
|
||||
git config --global --add safe.directory ${{ github.workspace }}
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Check configuration
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking pre-commit version.'
|
||||
pre-commit --version
|
||||
|
||||
echo 'Checking clang-format version.'
|
||||
clang-format --version
|
||||
|
||||
echo 'Checking NPM version.'
|
||||
npm --version
|
||||
|
||||
echo 'Checking Node.js version.'
|
||||
node --version
|
||||
|
||||
echo 'Checking prettier version.'
|
||||
prettier --version
|
||||
- name: Format code
|
||||
run: pre-commit run --show-diff-on-failure --color=always --all-files
|
||||
- name: Check for differences
|
||||
env:
|
||||
MESSAGE: |
|
||||
One or more files did not conform to the formatting. Maybe you did
|
||||
not run 'pre-commit' before committing, or your version of
|
||||
'clang-format' or 'prettier' has an incompatibility with the ones
|
||||
used here (see the "Check configuration" step above).
|
||||
|
||||
Run 'pre-commit run --all-files' in your repo, and then commit and
|
||||
push the changes.
|
||||
run: |
|
||||
DIFF=$(git status --porcelain)
|
||||
if [ -n "${DIFF}" ]; then
|
||||
# Print the files that changed to give the contributor a hint about
|
||||
# what to expect when running pre-commit on their own machine.
|
||||
git status
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
fi
|
||||
46
.github/workflows/check-levelization.yml
vendored
Normal file
46
.github/workflows/check-levelization.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
# This workflow checks if the dependencies between the modules are correctly
|
||||
# indexed.
|
||||
name: Check levelization
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on: workflow_call
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-levelization
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
levelization:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Check levelization
|
||||
run: .github/scripts/levelization/generate.sh
|
||||
- name: Check for differences
|
||||
env:
|
||||
MESSAGE: |
|
||||
|
||||
The dependency relationships between the modules in rippled have
|
||||
changed, which may be an improvement or a regression.
|
||||
|
||||
A rule of thumb is that if your changes caused something to be
|
||||
removed from loops.txt, it's probably an improvement, while if
|
||||
something was added, it's probably a regression.
|
||||
|
||||
Run '.github/scripts/levelization/generate.sh' in your repo, commit
|
||||
and push the changes. See .github/scripts/levelization/README.md for
|
||||
more info.
|
||||
run: |
|
||||
DIFF=$(git status --porcelain)
|
||||
if [ -n "${DIFF}" ]; then
|
||||
# Print the differences to give the contributor a hint about what to
|
||||
# expect when running levelization on their own machine.
|
||||
git diff
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
fi
|
||||
62
.github/workflows/check-missing-commits.yml
vendored
Normal file
62
.github/workflows/check-missing-commits.yml
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
# This workflow checks that all commits in the "master" branch are also in the
|
||||
# "release" and "develop" branches, and that all commits in the "release" branch
|
||||
# are also in the "develop" branch.
|
||||
name: Check for missing commits
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on: workflow_call
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-missing-commits
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check for missing commits
|
||||
env:
|
||||
MESSAGE: |
|
||||
|
||||
If you are reading this, then the commits indicated above are missing
|
||||
from the "develop" and/or "release" branch. Do a reverse-merge as soon
|
||||
as possible. See CONTRIBUTING.md for instructions.
|
||||
run: |
|
||||
set -o pipefail
|
||||
# Branches are ordered by how "canonical" they are. Every commit in one
|
||||
# branch should be in all the branches behind it.
|
||||
order=(master release develop)
|
||||
branches=()
|
||||
for branch in "${order[@]}"; do
|
||||
# Check that the branches exist so that this job will work on forked
|
||||
# repos, which don't necessarily have master and release branches.
|
||||
echo "Checking if ${branch} exists."
|
||||
if git ls-remote --exit-code --heads origin \
|
||||
refs/heads/${branch} > /dev/null; then
|
||||
branches+=(origin/${branch})
|
||||
fi
|
||||
done
|
||||
|
||||
prior=()
|
||||
for branch in "${branches[@]}"; do
|
||||
if [[ ${#prior[@]} -ne 0 ]]; then
|
||||
echo "Checking ${prior[@]} for commits missing from ${branch}."
|
||||
git log --oneline --no-merges "${prior[@]}" \
|
||||
^$branch | tee -a "missing-commits.txt"
|
||||
echo
|
||||
fi
|
||||
prior+=("${branch}")
|
||||
done
|
||||
|
||||
if [[ $(cat missing-commits.txt | wc -l) -ne 0 ]]; then
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
fi
|
||||
63
.github/workflows/clang-format.yml
vendored
63
.github/workflows/clang-format.yml
vendored
@@ -1,63 +0,0 @@
|
||||
name: clang-format
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
|
||||
jobs:
|
||||
check:
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
CLANG_VERSION: 18
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install clang-format
|
||||
run: |
|
||||
codename=$( lsb_release --codename --short )
|
||||
sudo tee /etc/apt/sources.list.d/llvm.list >/dev/null <<EOF
|
||||
deb http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
||||
deb-src http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
||||
EOF
|
||||
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add
|
||||
sudo apt-get update
|
||||
sudo apt-get install clang-format-${CLANG_VERSION}
|
||||
- name: Format first-party sources
|
||||
run: find include src tests -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format-${CLANG_VERSION} -i {} +
|
||||
- name: Check for differences
|
||||
id: assert
|
||||
run: |
|
||||
set -o pipefail
|
||||
git diff --exit-code | tee "clang-format.patch"
|
||||
- name: Upload patch
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
uses: actions/upload-artifact@v4
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: clang-format.patch
|
||||
if-no-files-found: ignore
|
||||
path: clang-format.patch
|
||||
- name: What happened?
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
env:
|
||||
PREAMBLE: |
|
||||
If you are reading this, you are looking at a failed Github Actions
|
||||
job. That means you pushed one or more files that did not conform
|
||||
to the formatting specified in .clang-format. That may be because
|
||||
you neglected to run 'git clang-format' or 'clang-format' before
|
||||
committing, or that your version of clang-format has an
|
||||
incompatibility with the one on this
|
||||
machine, which is:
|
||||
SUGGESTION: |
|
||||
|
||||
To fix it, you can do one of two things:
|
||||
1. Download and apply the patch generated as an artifact of this
|
||||
job to your repo, commit, and push.
|
||||
2. Run 'git-clang-format --extensions cpp,h,hpp,ipp develop'
|
||||
in your repo, commit, and push.
|
||||
run: |
|
||||
echo "${PREAMBLE}"
|
||||
clang-format-${CLANG_VERSION} --version
|
||||
echo "${SUGGESTION}"
|
||||
exit 1
|
||||
37
.github/workflows/doxygen.yml
vendored
37
.github/workflows/doxygen.yml
vendored
@@ -1,37 +0,0 @@
|
||||
name: Build and publish Doxygen documentation
|
||||
# To test this workflow, push your changes to your fork's `develop` branch.
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- doxygen
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
documentation:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: check environment
|
||||
run: |
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
cmake --version
|
||||
doxygen --version
|
||||
env | sort
|
||||
- name: build
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -Donly_docs=TRUE ..
|
||||
cmake --build . --target docs --parallel $(nproc)
|
||||
- name: publish
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: build/docs/html
|
||||
53
.github/workflows/levelization.yml
vendored
53
.github/workflows/levelization.yml
vendored
@@ -1,53 +0,0 @@
|
||||
name: levelization
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
|
||||
jobs:
|
||||
check:
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CLANG_VERSION: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Check levelization
|
||||
run: Builds/levelization/levelization.sh
|
||||
- name: Check for differences
|
||||
id: assert
|
||||
run: |
|
||||
set -o pipefail
|
||||
git diff --exit-code | tee "levelization.patch"
|
||||
- name: Upload patch
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
uses: actions/upload-artifact@v4
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: levelization.patch
|
||||
if-no-files-found: ignore
|
||||
path: levelization.patch
|
||||
- name: What happened?
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
env:
|
||||
MESSAGE: |
|
||||
If you are reading this, you are looking at a failed Github
|
||||
Actions job. That means you changed the dependency relationships
|
||||
between the modules in rippled. That may be an improvement or a
|
||||
regression. This check doesn't judge.
|
||||
|
||||
A rule of thumb, though, is that if your changes caused
|
||||
something to be removed from loops.txt, that's probably an
|
||||
improvement. If something was added, it's probably a regression.
|
||||
|
||||
To fix it, you can do one of two things:
|
||||
1. Download and apply the patch generated as an artifact of this
|
||||
job to your repo, commit, and push.
|
||||
2. Run './Builds/levelization/levelization.sh' in your repo,
|
||||
commit, and push.
|
||||
|
||||
See Builds/levelization/README.md for more info.
|
||||
run: |
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
91
.github/workflows/libxrpl.yml
vendored
91
.github/workflows/libxrpl.yml
vendored
@@ -1,91 +0,0 @@
|
||||
name: Check libXRPL compatibility with Clio
|
||||
env:
|
||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
|
||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/libxrpl/protocol/BuildInfo.cpp'
|
||||
- '.github/workflows/libxrpl.yml'
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
name: Publish libXRPL
|
||||
outputs:
|
||||
outcome: ${{ steps.upload.outputs.outcome }}
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.channel.outputs.channel }}
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
||||
steps:
|
||||
- name: Wait for essential checks to succeed
|
||||
uses: lewagon/wait-on-check-action@v1.3.4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
running-workflow-name: wait-for-check-regexp
|
||||
check-regexp: '(dependencies|test).*linux.*' # Ignore windows and mac tests but make sure linux passes
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
wait-interval: 10
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Generate channel
|
||||
id: channel
|
||||
shell: bash
|
||||
run: |
|
||||
echo channel="clio/pr_${{ github.event.pull_request.number }}" | tee ${GITHUB_OUTPUT}
|
||||
- name: Export new package
|
||||
shell: bash
|
||||
run: |
|
||||
conan export . ${{ steps.channel.outputs.channel }}
|
||||
- name: Add Ripple Conan remote
|
||||
shell: bash
|
||||
run: |
|
||||
conan remote list
|
||||
conan remote remove ripple || true
|
||||
# Do not quote the URL. An empty string will be accepted (with a non-fatal warning), but a missing argument will not.
|
||||
conan remote add ripple ${{ env.CONAN_URL }} --insert 0
|
||||
- name: Parse new version
|
||||
id: version
|
||||
shell: bash
|
||||
run: |
|
||||
echo version="$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" \
|
||||
| awk -F '"' '{print $2}')" | tee ${GITHUB_OUTPUT}
|
||||
- name: Try to authenticate to Ripple Conan remote
|
||||
id: remote
|
||||
shell: bash
|
||||
run: |
|
||||
# `conan user` implicitly uses the environment variables CONAN_LOGIN_USERNAME_<REMOTE> and CONAN_PASSWORD_<REMOTE>.
|
||||
# https://docs.conan.io/1/reference/commands/misc/user.html#using-environment-variables
|
||||
# https://docs.conan.io/1/reference/env_vars.html#conan-login-username-conan-login-username-remote-name
|
||||
# https://docs.conan.io/1/reference/env_vars.html#conan-password-conan-password-remote-name
|
||||
echo outcome=$(conan user --remote ripple --password >&2 \
|
||||
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
|
||||
- name: Upload new package
|
||||
id: upload
|
||||
if: (steps.remote.outputs.outcome == 'success')
|
||||
shell: bash
|
||||
run: |
|
||||
echo "conan upload version ${{ steps.version.outputs.version }} on channel ${{ steps.channel.outputs.channel }}"
|
||||
echo outcome=$(conan upload xrpl/${{ steps.version.outputs.version }}@${{ steps.channel.outputs.channel }} --remote ripple --confirm >&2 \
|
||||
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
|
||||
notify_clio:
|
||||
name: Notify Clio
|
||||
runs-on: ubuntu-latest
|
||||
needs: publish
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
||||
steps:
|
||||
- name: Notify Clio about new version
|
||||
if: (needs.publish.outputs.outcome == 'success')
|
||||
shell: bash
|
||||
run: |
|
||||
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
|
||||
-F "client_payload[version]=${{ needs.publish.outputs.version }}@${{ needs.publish.outputs.channel }}" \
|
||||
-F "client_payload[pr]=${{ github.event.pull_request.number }}"
|
||||
102
.github/workflows/macos.yml
vendored
102
.github/workflows/macos.yml
vendored
@@ -1,102 +0,0 @@
|
||||
name: macos
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
push:
|
||||
# If the branches list is ever changed, be sure to change it on all
|
||||
# build/test jobs (nix, macos, windows, instrumentation)
|
||||
branches:
|
||||
# Always build the package branches
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
# Branches that opt-in to running
|
||||
- 'ci/**'
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
test:
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- macos
|
||||
generator:
|
||||
- Ninja
|
||||
configuration:
|
||||
- Release
|
||||
runs-on: [self-hosted, macOS]
|
||||
env:
|
||||
# The `build` action requires these variables.
|
||||
build_dir: .build
|
||||
NUM_PROCESSORS: 12
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: install Conan
|
||||
run: |
|
||||
brew install conan@1
|
||||
echo '/opt/homebrew/opt/conan@1/bin' >> $GITHUB_PATH
|
||||
- name: install Ninja
|
||||
if: matrix.generator == 'Ninja'
|
||||
run: brew install ninja
|
||||
- name: install python
|
||||
run: |
|
||||
if which python > /dev/null 2>&1; then
|
||||
echo "Python executable exists"
|
||||
else
|
||||
brew install python@3.13
|
||||
ln -s /opt/homebrew/bin/python3 /opt/homebrew/bin/python
|
||||
fi
|
||||
- name: install cmake
|
||||
run: |
|
||||
if which cmake > /dev/null 2>&1; then
|
||||
echo "cmake executable exists"
|
||||
else
|
||||
brew install cmake
|
||||
fi
|
||||
- name: install nproc
|
||||
run: |
|
||||
brew install coreutils
|
||||
- name: check environment
|
||||
run: |
|
||||
env | sort
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
python --version
|
||||
conan --version
|
||||
cmake --version
|
||||
nproc --version
|
||||
echo -n "nproc returns: "
|
||||
nproc
|
||||
system_profiler SPHardwareDataType
|
||||
sysctl -n hw.logicalcpu
|
||||
clang --version
|
||||
- name: configure Conan
|
||||
run : |
|
||||
conan profile new default --detect || true
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
- name: build dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
env:
|
||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: build
|
||||
uses: ./.github/actions/build
|
||||
with:
|
||||
generator: ${{ matrix.generator }}
|
||||
configuration: ${{ matrix.configuration }}
|
||||
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
|
||||
- name: test
|
||||
run: |
|
||||
n=$(nproc)
|
||||
echo "Using $n test jobs"
|
||||
|
||||
cd ${build_dir}
|
||||
./rippled --unittest --unittest-jobs $n
|
||||
ctest -j $n --output-on-failure
|
||||
60
.github/workflows/missing-commits.yml
vendored
60
.github/workflows/missing-commits.yml
vendored
@@ -1,60 +0,0 @@
|
||||
name: missing-commits
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
# Only check that the branches are up to date when updating the
|
||||
# relevant branches.
|
||||
- develop
|
||||
- release
|
||||
|
||||
jobs:
|
||||
up_to_date:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check for missing commits
|
||||
id: commits
|
||||
env:
|
||||
SUGGESTION: |
|
||||
|
||||
If you are reading this, then the commits indicated above are
|
||||
missing from "develop" and/or "release". Do a reverse-merge
|
||||
as soon as possible. See CONTRIBUTING.md for instructions.
|
||||
run: |
|
||||
set -o pipefail
|
||||
# Branches ordered by how "canonical" they are. Every commit in
|
||||
# one branch should be in all the branches behind it
|
||||
order=( master release develop )
|
||||
branches=()
|
||||
for branch in "${order[@]}"
|
||||
do
|
||||
# Check that the branches exist so that this job will work on
|
||||
# forked repos, which don't necessarily have master and
|
||||
# release branches.
|
||||
if git ls-remote --exit-code --heads origin \
|
||||
refs/heads/${branch} > /dev/null
|
||||
then
|
||||
branches+=( origin/${branch} )
|
||||
fi
|
||||
done
|
||||
|
||||
prior=()
|
||||
for branch in "${branches[@]}"
|
||||
do
|
||||
if [[ ${#prior[@]} -ne 0 ]]
|
||||
then
|
||||
echo "Checking ${prior[@]} for commits missing from ${branch}"
|
||||
git log --oneline --no-merges "${prior[@]}" \
|
||||
^$branch | tee -a "missing-commits.txt"
|
||||
echo
|
||||
fi
|
||||
prior+=( "${branch}" )
|
||||
done
|
||||
if [[ $( cat missing-commits.txt | wc -l ) -ne 0 ]]
|
||||
then
|
||||
echo "${SUGGESTION}"
|
||||
exit 1
|
||||
fi
|
||||
447
.github/workflows/nix.yml
vendored
447
.github/workflows/nix.yml
vendored
@@ -1,447 +0,0 @@
|
||||
name: nix
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
push:
|
||||
# If the branches list is ever changed, be sure to change it on all
|
||||
# build/test jobs (nix, macos, windows)
|
||||
branches:
|
||||
# Always build the package branches
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
# Branches that opt-in to running
|
||||
- "ci/**"
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# This workflow has multiple job matrixes.
|
||||
# They can be considered phases because most of the matrices ("test",
|
||||
# "coverage", "conan", ) depend on the first ("dependencies").
|
||||
#
|
||||
# The first phase has a job in the matrix for each combination of
|
||||
# variables that affects dependency ABI:
|
||||
# platform, compiler, and configuration.
|
||||
# It creates a GitHub artifact holding the Conan profile,
|
||||
# and builds and caches binaries for all the dependencies.
|
||||
# If an Artifactory remote is configured, they are cached there.
|
||||
# If not, they are added to the GitHub artifact.
|
||||
# GitHub's "cache" action has a size limit (10 GB) that is too small
|
||||
# to hold the binaries if they are built locally.
|
||||
# We must use the "{upload,download}-artifact" actions instead.
|
||||
#
|
||||
# The remaining phases have a job in the matrix for each test
|
||||
# configuration. They install dependency binaries from the cache,
|
||||
# whichever was used, and build and test rippled.
|
||||
#
|
||||
# "instrumentation" is independent, but is included here because it also
|
||||
# builds on linux in the same "on:" conditions.
|
||||
|
||||
jobs:
|
||||
dependencies:
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux
|
||||
compiler:
|
||||
- gcc
|
||||
- clang
|
||||
configuration:
|
||||
- Debug
|
||||
- Release
|
||||
include:
|
||||
- compiler: gcc
|
||||
profile:
|
||||
version: 11
|
||||
cc: /usr/bin/gcc
|
||||
cxx: /usr/bin/g++
|
||||
- compiler: clang
|
||||
profile:
|
||||
version: 14
|
||||
cc: /usr/bin/clang-14
|
||||
cxx: /usr/bin/clang++-14
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
||||
env:
|
||||
build_dir: .build
|
||||
steps:
|
||||
- name: upgrade conan
|
||||
run: |
|
||||
pip install --upgrade "conan<2"
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: check environment
|
||||
run: |
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
lsb_release -a || true
|
||||
${{ matrix.profile.cc }} --version
|
||||
conan --version
|
||||
cmake --version
|
||||
env | sort
|
||||
- name: configure Conan
|
||||
run: |
|
||||
conan profile new default --detect
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
conan profile update settings.compiler=${{ matrix.compiler }} default
|
||||
conan profile update settings.compiler.version=${{ matrix.profile.version }} default
|
||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
||||
conan profile update env.CC=${{ matrix.profile.cc }} default
|
||||
conan profile update env.CXX=${{ matrix.profile.cxx }} default
|
||||
conan profile update conf.tools.build:compiler_executables='{"c": "${{ matrix.profile.cc }}", "cpp": "${{ matrix.profile.cxx }}"}' default
|
||||
- name: archive profile
|
||||
# Create this archive before dependencies are added to the local cache.
|
||||
run: tar -czf conan.tar -C ~/.conan .
|
||||
- name: build dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
env:
|
||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: upload archive
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
|
||||
path: conan.tar
|
||||
if-no-files-found: error
|
||||
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux
|
||||
compiler:
|
||||
- gcc
|
||||
- clang
|
||||
configuration:
|
||||
- Debug
|
||||
- Release
|
||||
cmake-args:
|
||||
-
|
||||
- "-Dunity=ON"
|
||||
needs: dependencies
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
||||
env:
|
||||
build_dir: .build
|
||||
steps:
|
||||
- name: upgrade conan
|
||||
run: |
|
||||
pip install --upgrade "conan<2"
|
||||
- name: download cache
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
|
||||
- name: extract cache
|
||||
run: |
|
||||
mkdir -p ~/.conan
|
||||
tar -xzf conan.tar -C ~/.conan
|
||||
- name: check environment
|
||||
run: |
|
||||
env | sort
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
conan --version
|
||||
cmake --version
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
env:
|
||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: build
|
||||
uses: ./.github/actions/build
|
||||
with:
|
||||
generator: Ninja
|
||||
configuration: ${{ matrix.configuration }}
|
||||
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
|
||||
- name: test
|
||||
run: |
|
||||
cd ${build_dir}
|
||||
./rippled --unittest --unittest-jobs $(nproc)
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
|
||||
reference-fee-test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux
|
||||
compiler:
|
||||
- gcc
|
||||
configuration:
|
||||
- Debug
|
||||
cmake-args:
|
||||
- "-DUNIT_TEST_REFERENCE_FEE=200"
|
||||
- "-DUNIT_TEST_REFERENCE_FEE=1000"
|
||||
needs: dependencies
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
||||
env:
|
||||
build_dir: .build
|
||||
steps:
|
||||
- name: upgrade conan
|
||||
run: |
|
||||
pip install --upgrade "conan<2"
|
||||
- name: download cache
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
|
||||
- name: extract cache
|
||||
run: |
|
||||
mkdir -p ~/.conan
|
||||
tar -xzf conan.tar -C ~/.conan
|
||||
- name: check environment
|
||||
run: |
|
||||
env | sort
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
conan --version
|
||||
cmake --version
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
env:
|
||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: build
|
||||
uses: ./.github/actions/build
|
||||
with:
|
||||
generator: Ninja
|
||||
configuration: ${{ matrix.configuration }}
|
||||
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
|
||||
- name: test
|
||||
run: |
|
||||
cd ${build_dir}
|
||||
./rippled --unittest --unittest-jobs $(nproc)
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
coverage:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux
|
||||
compiler:
|
||||
- gcc
|
||||
configuration:
|
||||
- Debug
|
||||
needs: dependencies
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
||||
env:
|
||||
build_dir: .build
|
||||
steps:
|
||||
- name: upgrade conan
|
||||
run: |
|
||||
pip install --upgrade "conan<2"
|
||||
- name: download cache
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
|
||||
- name: extract cache
|
||||
run: |
|
||||
mkdir -p ~/.conan
|
||||
tar -xzf conan.tar -C ~/.conan
|
||||
- name: install gcovr
|
||||
run: pip install "gcovr>=7,<9"
|
||||
- name: check environment
|
||||
run: |
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
conan --version
|
||||
cmake --version
|
||||
gcovr --version
|
||||
env | sort
|
||||
ls ~/.conan
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
env:
|
||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: build
|
||||
uses: ./.github/actions/build
|
||||
with:
|
||||
generator: Ninja
|
||||
configuration: ${{ matrix.configuration }}
|
||||
cmake-args: >-
|
||||
-Dassert=TRUE
|
||||
-Dwerr=TRUE
|
||||
-Dcoverage=ON
|
||||
-Dcoverage_format=xml
|
||||
-DCODE_COVERAGE_VERBOSE=ON
|
||||
-DCMAKE_CXX_FLAGS="-O0"
|
||||
-DCMAKE_C_FLAGS="-O0"
|
||||
cmake-target: coverage
|
||||
- name: move coverage report
|
||||
shell: bash
|
||||
run: |
|
||||
mv "${build_dir}/coverage.xml" ./
|
||||
- name: archive coverage report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage.xml
|
||||
path: coverage.xml
|
||||
retention-days: 30
|
||||
- name: upload coverage report
|
||||
uses: wandalen/wretry.action@v1.4.10
|
||||
with:
|
||||
action: codecov/codecov-action@v4.5.0
|
||||
with: |
|
||||
files: coverage.xml
|
||||
fail_ci_if_error: true
|
||||
disable_search: true
|
||||
verbose: true
|
||||
plugin: noop
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
attempt_limit: 5
|
||||
attempt_delay: 210000 # in milliseconds
|
||||
|
||||
conan:
|
||||
needs: dependencies
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
||||
env:
|
||||
build_dir: .build
|
||||
configuration: Release
|
||||
steps:
|
||||
- name: upgrade conan
|
||||
run: |
|
||||
pip install --upgrade "conan<2"
|
||||
- name: download cache
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: linux-gcc-${{ env.configuration }}
|
||||
- name: extract cache
|
||||
run: |
|
||||
mkdir -p ~/.conan
|
||||
tar -xzf conan.tar -C ~/.conan
|
||||
- name: check environment
|
||||
run: |
|
||||
env | sort
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
conan --version
|
||||
cmake --version
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
env:
|
||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
||||
with:
|
||||
configuration: ${{ env.configuration }}
|
||||
- name: export
|
||||
run: |
|
||||
version=$(conan inspect --raw version .)
|
||||
reference="xrpl/${version}@local/test"
|
||||
conan remove -f ${reference} || true
|
||||
conan export . local/test
|
||||
echo "reference=${reference}" >> "${GITHUB_ENV}"
|
||||
- name: build
|
||||
run: |
|
||||
cd tests/conan
|
||||
mkdir ${build_dir}
|
||||
cd ${build_dir}
|
||||
conan install .. --output-folder . \
|
||||
--require-override ${reference} --build missing
|
||||
cmake .. \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=./build/${configuration}/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=${configuration}
|
||||
cmake --build .
|
||||
./example | grep '^[[:digit:]]\+\.[[:digit:]]\+\.[[:digit:]]\+'
|
||||
|
||||
# NOTE we are not using dependencies built above because it lags with
|
||||
# compiler versions. Instrumentation requires clang version 16 or
|
||||
# later
|
||||
|
||||
instrumentation-build:
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
env:
|
||||
CLANG_RELEASE: 16
|
||||
strategy:
|
||||
fail-fast: false
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: debian:bookworm
|
||||
steps:
|
||||
- name: install prerequisites
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install --yes --no-install-recommends \
|
||||
clang-${CLANG_RELEASE} clang++-${CLANG_RELEASE} \
|
||||
python3-pip python-is-python3 make cmake git wget
|
||||
apt-get clean
|
||||
update-alternatives --install \
|
||||
/usr/bin/clang clang /usr/bin/clang-${CLANG_RELEASE} 100 \
|
||||
--slave /usr/bin/clang++ clang++ /usr/bin/clang++-${CLANG_RELEASE}
|
||||
update-alternatives --auto clang
|
||||
pip install --no-cache --break-system-packages "conan<2"
|
||||
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: prepare environment
|
||||
run: |
|
||||
mkdir ${GITHUB_WORKSPACE}/.build
|
||||
echo "SOURCE_DIR=$GITHUB_WORKSPACE" >> $GITHUB_ENV
|
||||
echo "BUILD_DIR=$GITHUB_WORKSPACE/.build" >> $GITHUB_ENV
|
||||
echo "CC=/usr/bin/clang" >> $GITHUB_ENV
|
||||
echo "CXX=/usr/bin/clang++" >> $GITHUB_ENV
|
||||
|
||||
- name: configure Conan
|
||||
run: |
|
||||
conan profile new --detect default
|
||||
conan profile update settings.compiler=clang default
|
||||
conan profile update settings.compiler.version=${CLANG_RELEASE} default
|
||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
conan profile update options.rocksdb=False default
|
||||
conan profile update \
|
||||
'conf.tools.build:compiler_executables={"c": "/usr/bin/clang", "cpp": "/usr/bin/clang++"}' default
|
||||
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS"' default
|
||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
|
||||
conan export external/snappy snappy/1.1.10@
|
||||
conan export external/soci soci/4.0.3@
|
||||
|
||||
- name: build dependencies
|
||||
run: |
|
||||
cd ${BUILD_DIR}
|
||||
conan install ${SOURCE_DIR} \
|
||||
--output-folder ${BUILD_DIR} \
|
||||
--install-folder ${BUILD_DIR} \
|
||||
--build missing \
|
||||
--settings build_type=Debug
|
||||
|
||||
- name: build with instrumentation
|
||||
run: |
|
||||
cd ${BUILD_DIR}
|
||||
cmake -S ${SOURCE_DIR} -B ${BUILD_DIR} \
|
||||
-Dvoidstar=ON \
|
||||
-Dtests=ON \
|
||||
-Dxrpld=ON \
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DSECP256K1_BUILD_BENCHMARK=OFF \
|
||||
-DSECP256K1_BUILD_TESTS=OFF \
|
||||
-DSECP256K1_BUILD_EXHAUSTIVE_TESTS=OFF \
|
||||
-DCMAKE_TOOLCHAIN_FILE=${BUILD_DIR}/build/generators/conan_toolchain.cmake
|
||||
cmake --build . --parallel $(nproc)
|
||||
|
||||
- name: verify instrumentation enabled
|
||||
run: |
|
||||
cd ${BUILD_DIR}
|
||||
./rippled --version | grep libvoidstar
|
||||
|
||||
- name: run unit tests
|
||||
run: |
|
||||
cd ${BUILD_DIR}
|
||||
./rippled -u --unittest-jobs $(( $(nproc)/4 ))
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
80
.github/workflows/notify-clio.yml
vendored
Normal file
80
.github/workflows/notify-clio.yml
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
# This workflow exports the built libxrpl package to the Conan remote on a
|
||||
# a channel named after the pull request, and notifies the Clio repository about
|
||||
# the new version so it can check for compatibility.
|
||||
name: Notify Clio
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
conan_remote_name:
|
||||
description: "The name of the Conan remote to use."
|
||||
required: true
|
||||
type: string
|
||||
conan_remote_url:
|
||||
description: "The URL of the Conan endpoint to use."
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
clio_notify_token:
|
||||
description: "The GitHub token to notify Clio about new versions."
|
||||
required: true
|
||||
conan_remote_username:
|
||||
description: "The username for logging into the Conan remote."
|
||||
required: true
|
||||
conan_remote_password:
|
||||
description: "The password for logging into the Conan remote."
|
||||
required: true
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-clio
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Generate outputs
|
||||
id: generate
|
||||
run: |
|
||||
echo 'Generating user and channel.'
|
||||
echo "user=clio" >> "${GITHUB_OUTPUT}"
|
||||
echo "channel=pr_${{ github.event.pull_request.number }}" >> "${GITHUB_OUTPUT}"
|
||||
echo 'Extracting version.'
|
||||
echo "version=$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')" >> "${GITHUB_OUTPUT}"
|
||||
- name: Add Conan remote
|
||||
run: |
|
||||
echo "Adding Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
|
||||
conan remote add --index 0 --force ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
|
||||
echo 'Listing Conan remotes.'
|
||||
conan remote list
|
||||
- name: Log into Conan remote
|
||||
run: conan remote login ${{ inputs.conan_remote_name }} "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
|
||||
- name: Upload package
|
||||
run: |
|
||||
conan export --user=${{ steps.generate.outputs.user }} --channel=${{ steps.generate.outputs.channel }} .
|
||||
conan upload --confirm --check --remote=${{ inputs.conan_remote_name }} xrpl/${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}
|
||||
outputs:
|
||||
channel: ${{ steps.generate.outputs.channel }}
|
||||
version: ${{ steps.generate.outputs.version }}
|
||||
|
||||
notify:
|
||||
needs: upload
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.clio_notify_token }}
|
||||
steps:
|
||||
- name: Notify Clio
|
||||
run: |
|
||||
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
|
||||
-F "client_payload[version]=${{ needs.upload.outputs.version }}@${{ needs.upload.outputs.user }}/${{ needs.upload.outputs.channel }}" \
|
||||
-F "client_payload[pr]=${{ github.event.pull_request.number }}"
|
||||
118
.github/workflows/on-pr.yml
vendored
Normal file
118
.github/workflows/on-pr.yml
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
# This workflow runs all workflows to check, build and test the project on
|
||||
# various Linux flavors, as well as on MacOS and Windows, on every push to a
|
||||
# user branch. However, it will not run if the pull request is a draft unless it
|
||||
# has the 'DraftRunCI' label.
|
||||
name: PR
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/actions/build-deps/**"
|
||||
- ".github/actions/build-test/**"
|
||||
- ".github/scripts/levelization/**"
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
- ".github/workflows/build-test.yml"
|
||||
- ".github/workflows/check-format.yml"
|
||||
- ".github/workflows/check-levelization.yml"
|
||||
- ".github/workflows/notify-clio.yml"
|
||||
- ".github/workflows/on-pr.yml"
|
||||
# Keep the list of paths below in sync with those in the `on-trigger.yml`
|
||||
# file.
|
||||
- "cmake/**"
|
||||
- "conan/**"
|
||||
- "external/**"
|
||||
- "include/**"
|
||||
- "src/**"
|
||||
- "tests/**"
|
||||
- ".clang-format"
|
||||
- ".codecov.yml"
|
||||
- ".pre-commit-config.yaml"
|
||||
- "CMakeLists.txt"
|
||||
- "conanfile.py"
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- labeled
|
||||
- unlabeled
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
CONAN_REMOTE_NAME: xrplf
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
|
||||
jobs:
|
||||
# This job determines whether the workflow should run. It runs when:
|
||||
# * Opened as a non-draft PR.
|
||||
# * A commit is added to a non-draft PR or the PR has the 'DraftRunCI' label.
|
||||
# * A draft PR has the 'DraftRunCI' label added.
|
||||
# * A non-draft PR has the 'DraftRunCI' label removed.
|
||||
# These checks are in part to ensure the workflow won't run needlessly while
|
||||
# also allowing it to be triggered without having to add a no-op commit. A new
|
||||
# workflow execution can be triggered by adding and then removing the label on
|
||||
# a non-draft PR, or conversely by removing it and then adding it back on a
|
||||
# draft PR; this can be useful in certain cases.
|
||||
should-run:
|
||||
if: >-
|
||||
${{
|
||||
(github.event.action == 'opened' && !github.event.pull_request.draft) ||
|
||||
(github.event.action == 'synchronize' && (!github.event.pull_request.draft || contains(github.event.pull_request.labels.*.name, 'DraftRunCI'))) ||
|
||||
(github.event.action == 'labeled' && github.event.pull_request.draft && github.event.label.name == 'DraftRunCI') ||
|
||||
(github.event.action == 'unlabeled' && !github.event.pull_request.draft && github.event.label.name == 'DraftRunCI')
|
||||
}}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: No-op
|
||||
run: true
|
||||
|
||||
check-format:
|
||||
needs: should-run
|
||||
uses: ./.github/workflows/check-format.yml
|
||||
|
||||
check-levelization:
|
||||
needs: should-run
|
||||
uses: ./.github/workflows/check-levelization.yml
|
||||
|
||||
# This job works around the limitation that GitHub Actions does not support
|
||||
# using environment variables as inputs for reusable workflows.
|
||||
generate-outputs:
|
||||
needs: should-run
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: No-op
|
||||
run: true
|
||||
outputs:
|
||||
conan_remote_name: ${{ env.CONAN_REMOTE_NAME }}
|
||||
conan_remote_url: ${{ env.CONAN_REMOTE_URL }}
|
||||
|
||||
build-test:
|
||||
needs: generate-outputs
|
||||
uses: ./.github/workflows/build-test.yml
|
||||
strategy:
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
with:
|
||||
conan_remote_name: ${{ needs.generate-outputs.outputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ needs.generate-outputs.outputs.conan_remote_url }}
|
||||
os: ${{ matrix.os }}
|
||||
secrets:
|
||||
codecov_token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
notify-clio:
|
||||
needs:
|
||||
- generate-outputs
|
||||
- build-test
|
||||
uses: ./.github/workflows/notify-clio.yml
|
||||
with:
|
||||
conan_remote_name: ${{ needs.generate-outputs.outputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ needs.generate-outputs.outputs.conan_remote_url }}
|
||||
secrets:
|
||||
clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
115
.github/workflows/on-trigger.yml
vendored
Normal file
115
.github/workflows/on-trigger.yml
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
# This workflow runs all workflows to build the dependencies required for the
|
||||
# project on various Linux flavors, as well as on MacOS and Windows, on a
|
||||
# scheduled basis, on merge into the 'develop', 'release', or 'master' branches,
|
||||
# or manually. The missing commits check is only run when the code is merged
|
||||
# into the 'develop' or 'release' branches, and the documentation is built when
|
||||
# the code is merged into the 'develop' branch.
|
||||
name: Trigger
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
paths:
|
||||
- ".github/actions/build-deps/**"
|
||||
- ".github/actions/build-test/**"
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
- ".github/workflows/build-test.yml"
|
||||
- ".github/workflows/check-missing-commits.yml"
|
||||
- ".github/workflows/on-trigger.yml"
|
||||
- ".github/workflows/publish-docs.yml"
|
||||
# Keep the list of paths below in sync with those in `on-pr.yml`.
|
||||
- "cmake/**"
|
||||
- "conan/**"
|
||||
- "external/**"
|
||||
- "include/**"
|
||||
- "src/**"
|
||||
- "tests/**"
|
||||
- ".clang-format"
|
||||
- ".codecov.yml"
|
||||
- ".pre-commit-config.yaml"
|
||||
- "CMakeLists.txt"
|
||||
- "conanfile.py"
|
||||
# Run at 06:32 UTC on every day of the week from Monday through Friday. This
|
||||
# will force all dependencies to be rebuilt, which is useful to verify that
|
||||
# all dependencies can be built successfully. Only the dependencies that
|
||||
# are actually missing from the remote will be uploaded.
|
||||
schedule:
|
||||
- cron: "32 6 * * 1-5"
|
||||
# Run when manually triggered via the GitHub UI or API. If `force_upload` is
|
||||
# true, then the dependencies that were missing (`force_rebuild` is false) or
|
||||
# rebuilt (`force_rebuild` is true) will be uploaded, overwriting existing
|
||||
# dependencies if needed.
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dependencies_force_build:
|
||||
description: "Force building of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
dependencies_force_upload:
|
||||
description: "Force uploading of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
CONAN_REMOTE_NAME: xrplf
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
|
||||
jobs:
|
||||
check-missing-commits:
|
||||
if: ${{ github.event_name == 'push' && github.ref_type == 'branch' && contains(fromJSON('["develop", "release"]'), github.ref_name) }}
|
||||
uses: ./.github/workflows/check-missing-commits.yml
|
||||
|
||||
# This job works around the limitation that GitHub Actions does not support
|
||||
# using environment variables as inputs for reusable workflows. It also sets
|
||||
# outputs that depend on the event that triggered the workflow.
|
||||
generate-outputs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check inputs and set outputs
|
||||
id: generate
|
||||
run: |
|
||||
if [[ '${{ github.event_name }}' == 'push' ]]; then
|
||||
echo 'dependencies_force_build=false' >> "${GITHUB_OUTPUT}"
|
||||
echo 'dependencies_force_upload=false' >> "${GITHUB_OUTPUT}"
|
||||
elif [[ '${{ github.event_name }}' == 'schedule' ]]; then
|
||||
echo 'dependencies_force_build=true' >> "${GITHUB_OUTPUT}"
|
||||
echo 'dependencies_force_upload=false' >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo 'dependencies_force_build=${{ inputs.dependencies_force_build }}' >> "${GITHUB_OUTPUT}"
|
||||
echo 'dependencies_force_upload=${{ inputs.dependencies_force_upload }}' >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
outputs:
|
||||
conan_remote_name: ${{ env.CONAN_REMOTE_NAME }}
|
||||
conan_remote_url: ${{ env.CONAN_REMOTE_URL }}
|
||||
dependencies_force_build: ${{ steps.generate.outputs.dependencies_force_build }}
|
||||
dependencies_force_upload: ${{ steps.generate.outputs.dependencies_force_upload }}
|
||||
|
||||
build-test:
|
||||
needs: generate-outputs
|
||||
uses: ./.github/workflows/build-test.yml
|
||||
strategy:
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
with:
|
||||
conan_remote_name: ${{ needs.generate-outputs.outputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ needs.generate-outputs.outputs.conan_remote_url }}
|
||||
dependencies_force_build: ${{ needs.generate-outputs.outputs.dependencies_force_build == 'true' }}
|
||||
dependencies_force_upload: ${{ needs.generate-outputs.outputs.dependencies_force_upload == 'true' }}
|
||||
os: ${{ matrix.os }}
|
||||
strategy_matrix: "all"
|
||||
secrets:
|
||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
60
.github/workflows/publish-docs.yml
vendored
Normal file
60
.github/workflows/publish-docs.yml
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
# This workflow builds the documentation for the repository, and publishes it to
|
||||
# GitHub Pages when changes are merged into the default branch.
|
||||
name: Build and publish documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- ".github/workflows/publish-docs.yml"
|
||||
- "*.md"
|
||||
- "**/*.md"
|
||||
- "docs/**"
|
||||
- "include/**"
|
||||
- "src/libxrpl/**"
|
||||
- "src/xrpld/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
BUILD_DIR: .build
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-documentation
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Check configuration
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Doxygen version.'
|
||||
doxygen --version
|
||||
- name: Build documentation
|
||||
run: |
|
||||
mkdir -p ${{ env.BUILD_DIR }}
|
||||
cd ${{ env.BUILD_DIR }}
|
||||
cmake -Donly_docs=ON ..
|
||||
cmake --build . --target docs --parallel $(nproc)
|
||||
- name: Publish documentation
|
||||
if: ${{ github.ref_type == 'branch' && github.ref_name == github.event.repository.default_branch }}
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ${{ env.BUILD_DIR }}/docs/html
|
||||
100
.github/workflows/windows.yml
vendored
100
.github/workflows/windows.yml
vendored
@@ -1,100 +0,0 @@
|
||||
name: windows
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
push:
|
||||
# If the branches list is ever changed, be sure to change it on all
|
||||
# build/test jobs (nix, macos, windows, instrumentation)
|
||||
branches:
|
||||
# Always build the package branches
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
# Branches that opt-in to running
|
||||
- 'ci/**'
|
||||
|
||||
# https://docs.github.com/en/actions/using-jobs/using-concurrency
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
test:
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version:
|
||||
- generator: Visual Studio 17 2022
|
||||
runs-on: windows-2022
|
||||
configuration:
|
||||
- type: Release
|
||||
tests: true
|
||||
- type: Debug
|
||||
# Skip running unit tests on debug builds, because they
|
||||
# take an unreasonable amount of time
|
||||
tests: false
|
||||
runtime: d
|
||||
runs-on: ${{ matrix.version.runs-on }}
|
||||
env:
|
||||
build_dir: .build
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: choose Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: learn Python cache directory
|
||||
id: pip-cache
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
echo "dir=$(pip cache dir)" | tee ${GITHUB_OUTPUT}
|
||||
- name: restore Python cache directory
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ steps.pip-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-${{ hashFiles('.github/workflows/windows.yml') }}
|
||||
- name: install Conan
|
||||
run: pip install wheel 'conan<2'
|
||||
- name: check environment
|
||||
run: |
|
||||
dir env:
|
||||
$env:PATH -split ';'
|
||||
python --version
|
||||
conan --version
|
||||
cmake --version
|
||||
- name: configure Conan
|
||||
shell: bash
|
||||
run: |
|
||||
conan profile new default --detect
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
conan profile update \
|
||||
settings.compiler.runtime=MT${{ matrix.configuration.runtime }} \
|
||||
default
|
||||
- name: build dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
env:
|
||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
||||
with:
|
||||
configuration: ${{ matrix.configuration.type }}
|
||||
- name: build
|
||||
uses: ./.github/actions/build
|
||||
with:
|
||||
generator: '${{ matrix.version.generator }}'
|
||||
configuration: ${{ matrix.configuration.type }}
|
||||
# Hard code for now. Move to the matrix if varied options are needed
|
||||
cmake-args: '-Dassert=TRUE -Dwerr=TRUE -Dreporting=OFF -Dunity=ON'
|
||||
cmake-target: install
|
||||
- name: test
|
||||
shell: bash
|
||||
if: ${{ matrix.configuration.tests }}
|
||||
run: |
|
||||
cd ${build_dir}/${{ matrix.configuration.type }}
|
||||
./rippled --unittest --unittest-jobs $(nproc)
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -37,10 +37,9 @@ Release/*.*
|
||||
*.gcov
|
||||
|
||||
# Levelization checking
|
||||
Builds/levelization/results/rawincludes.txt
|
||||
Builds/levelization/results/paths.txt
|
||||
Builds/levelization/results/includes/
|
||||
Builds/levelization/results/includedby/
|
||||
.github/scripts/levelization/results/*
|
||||
!.github/scripts/levelization/results/loops.txt
|
||||
!.github/scripts/levelization/results/ordering.txt
|
||||
|
||||
# Ignore tmp directory.
|
||||
tmp
|
||||
@@ -111,4 +110,4 @@ bld.rippled/
|
||||
.vscode
|
||||
|
||||
# Suggested in-tree build directory
|
||||
/.build/
|
||||
/.build*/
|
||||
|
||||
@@ -1,6 +1,64 @@
|
||||
# .pre-commit-config.yaml
|
||||
# To run pre-commit hooks, first install pre-commit:
|
||||
# - `pip install pre-commit==${PRE_COMMIT_VERSION}`
|
||||
# - `pip install pre-commit-hooks==${PRE_COMMIT_HOOKS_VERSION}`
|
||||
#
|
||||
# Depending on your system, you can use `brew install` or `apt install` as well
|
||||
# for installing the pre-commit package, but `pip` is needed to install the
|
||||
# hooks; you can also use `pipx` if you prefer.
|
||||
# Next, install the required formatters:
|
||||
# - `pip install clang-format==${CLANG_VERSION}`
|
||||
# - `npm install prettier@${PRETTIER_VERSION}`
|
||||
#
|
||||
# See https://github.com/XRPLF/ci/blob/main/.github/workflows/tools-rippled.yml
|
||||
# for the versions used in the CI pipeline. You will need to have the exact same
|
||||
# versions of the tools installed on your system to produce the same results as
|
||||
# the pipeline.
|
||||
#
|
||||
# Then, run the following command to install the git hook scripts:
|
||||
# - `pre-commit install`
|
||||
# You can run all configured hooks against all files with:
|
||||
# - `pre-commit run --all-files`
|
||||
# To manually run a specific hook, use:
|
||||
# - `pre-commit run <hook_id> --all-files`
|
||||
# To run the hooks against only the files changed in the current commit, use:
|
||||
# - `pre-commit run`
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||
rev: v18.1.3
|
||||
hooks:
|
||||
- id: clang-format
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: clang-format
|
||||
name: clang-format
|
||||
language: system
|
||||
entry: clang-format -i
|
||||
files: '\.(cpp|hpp|h|ipp|proto)$'
|
||||
- id: trailing-whitespace
|
||||
name: trailing-whitespace
|
||||
entry: trailing-whitespace-fixer
|
||||
language: system
|
||||
types: [text]
|
||||
- id: end-of-file
|
||||
name: end-of-file
|
||||
entry: end-of-file-fixer
|
||||
language: system
|
||||
types: [text]
|
||||
- id: mixed-line-ending
|
||||
name: mixed-line-ending
|
||||
entry: mixed-line-ending
|
||||
language: system
|
||||
types: [text]
|
||||
- id: check-merge-conflict
|
||||
name: check-merge-conflict
|
||||
entry: check-merge-conflict --assume-in-merge
|
||||
language: system
|
||||
types: [text]
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: prettier
|
||||
name: prettier
|
||||
language: system
|
||||
entry: prettier --ignore-unknown --write
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
external/.*|
|
||||
.github/scripts/levelization/results/.*\.txt
|
||||
)$
|
||||
|
||||
1
.prettierignore
Normal file
1
.prettierignore
Normal file
@@ -0,0 +1 @@
|
||||
external
|
||||
609
BUILD.md
609
BUILD.md
@@ -3,29 +3,29 @@
|
||||
| These instructions assume you have a C++ development environment ready with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up on Linux, macOS, or Windows, [see this guide](./docs/build/environment.md). |
|
||||
|
||||
> These instructions also assume a basic familiarity with Conan and CMake.
|
||||
> If you are unfamiliar with Conan,
|
||||
> you can read our [crash course](./docs/build/conan.md)
|
||||
> or the official [Getting Started][3] walkthrough.
|
||||
> If you are unfamiliar with Conan, you can read our
|
||||
> [crash course](./docs/build/conan.md) or the official [Getting Started][3]
|
||||
> walkthrough.
|
||||
|
||||
## Branches
|
||||
|
||||
For a stable release, choose the `master` branch or one of the [tagged
|
||||
releases](https://github.com/ripple/rippled/releases).
|
||||
|
||||
```
|
||||
```bash
|
||||
git checkout master
|
||||
```
|
||||
|
||||
For the latest release candidate, choose the `release` branch.
|
||||
|
||||
```
|
||||
```bash
|
||||
git checkout release
|
||||
```
|
||||
|
||||
For the latest set of untested features, or to contribute, choose the `develop`
|
||||
branch.
|
||||
|
||||
```
|
||||
```bash
|
||||
git checkout develop
|
||||
```
|
||||
|
||||
@@ -33,176 +33,323 @@ git checkout develop
|
||||
|
||||
See [System Requirements](https://xrpl.org/system-requirements.html).
|
||||
|
||||
Building rippled generally requires git, Python, Conan, CMake, and a C++ compiler. Some guidance on setting up such a [C++ development environment can be found here](./docs/build/environment.md).
|
||||
Building rippled generally requires git, Python, Conan, CMake, and a C++
|
||||
compiler. Some guidance on setting up such a [C++ development environment can be
|
||||
found here](./docs/build/environment.md).
|
||||
|
||||
- [Python 3.7](https://www.python.org/downloads/)
|
||||
- [Conan 1.60](https://conan.io/downloads.html)[^1]
|
||||
- [CMake 3.16](https://cmake.org/download/)
|
||||
- [Python 3.11](https://www.python.org/downloads/), or higher
|
||||
- [Conan 2.17](https://conan.io/downloads.html)[^1], or higher
|
||||
- [CMake 3.22](https://cmake.org/download/)[^2], or higher
|
||||
|
||||
[^1]: It is possible to build with Conan 2.x,
|
||||
but the instructions are significantly different,
|
||||
which is why we are not recommending it yet.
|
||||
Notably, the `conan profile update` command is removed in 2.x.
|
||||
Profiles must be edited by hand.
|
||||
[^1]:
|
||||
It is possible to build with Conan 1.60+, but the instructions are
|
||||
significantly different, which is why we are not recommending it.
|
||||
|
||||
[^2]:
|
||||
CMake 4 is not yet supported by all dependencies required by this project.
|
||||
If you are affected by this issue, follow [conan workaround for cmake
|
||||
4](#workaround-for-cmake-4)
|
||||
|
||||
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
The [minimum compiler versions][2] required are:
|
||||
|
||||
| Compiler | Version |
|
||||
|-------------|---------|
|
||||
| GCC | 11 |
|
||||
| Clang | 13 |
|
||||
| Apple Clang | 13.1.6 |
|
||||
| MSVC | 19.23 |
|
||||
| Compiler | Version |
|
||||
| ----------- | --------- |
|
||||
| GCC | 12 |
|
||||
| Clang | 16 |
|
||||
| Apple Clang | 16 |
|
||||
| MSVC | 19.44[^3] |
|
||||
|
||||
### Linux
|
||||
|
||||
The Ubuntu operating system has received the highest level of
|
||||
quality assurance, testing, and support.
|
||||
The Ubuntu Linux distribution has received the highest level of quality
|
||||
assurance, testing, and support. We also support Red Hat and use Debian
|
||||
internally.
|
||||
|
||||
Here are [sample instructions for setting up a C++ development environment on Linux](./docs/build/environment.md#linux).
|
||||
Here are [sample instructions for setting up a C++ development environment on
|
||||
Linux](./docs/build/environment.md#linux).
|
||||
|
||||
### Mac
|
||||
|
||||
Many rippled engineers use macOS for development.
|
||||
|
||||
Here are [sample instructions for setting up a C++ development environment on macOS](./docs/build/environment.md#macos).
|
||||
Here are [sample instructions for setting up a C++ development environment on
|
||||
macOS](./docs/build/environment.md#macos).
|
||||
|
||||
### Windows
|
||||
|
||||
Windows is not recommended for production use at this time.
|
||||
Windows is used by some engineers for development only.
|
||||
|
||||
- Additionally, 32-bit Windows development is not supported.
|
||||
|
||||
[Boost]: https://www.boost.org/
|
||||
[^3]: Windows is not recommended for production use.
|
||||
|
||||
## Steps
|
||||
|
||||
### Set Up Conan
|
||||
|
||||
After you have a [C++ development environment](./docs/build/environment.md) ready with Git, Python, Conan, CMake, and a C++ compiler, you may need to set up your Conan profile.
|
||||
After you have a [C++ development environment](./docs/build/environment.md) ready with Git, Python,
|
||||
Conan, CMake, and a C++ compiler, you may need to set up your Conan profile.
|
||||
|
||||
These instructions assume a basic familiarity with Conan and CMake.
|
||||
These instructions assume a basic familiarity with Conan and CMake. If you are
|
||||
unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official
|
||||
[Getting Started][3] walkthrough.
|
||||
|
||||
If you are unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official [Getting Started][3] walkthrough.
|
||||
#### Default profile
|
||||
|
||||
You'll need at least one Conan profile:
|
||||
We recommend that you import the provided `conan/profiles/default` profile:
|
||||
|
||||
```
|
||||
conan profile new default --detect
|
||||
```
|
||||
|
||||
Update the compiler settings:
|
||||
|
||||
```
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
```
|
||||
|
||||
Configure Conan (1.x only) to use recipe revisions:
|
||||
|
||||
```
|
||||
conan config set general.revisions_enabled=1
|
||||
```
|
||||
|
||||
**Linux** developers will commonly have a default Conan [profile][] that compiles
|
||||
with GCC and links with libstdc++.
|
||||
If you are linking with libstdc++ (see profile setting `compiler.libcxx`),
|
||||
then you will need to choose the `libstdc++11` ABI:
|
||||
|
||||
```
|
||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
||||
```
|
||||
|
||||
|
||||
Ensure inter-operability between `boost::string_view` and `std::string_view` types:
|
||||
|
||||
```
|
||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_BEAST_USE_STD_STRING_VIEW"]' default
|
||||
conan profile update 'env.CXXFLAGS="-DBOOST_BEAST_USE_STD_STRING_VIEW"' default
|
||||
```bash
|
||||
conan config install conan/profiles/ -tf $(conan config home)/profiles/
|
||||
```
|
||||
|
||||
If you have other flags in the `conf.tools.build` or `env.CXXFLAGS` sections, make sure to retain the existing flags and append the new ones. You can check them with:
|
||||
```
|
||||
conan profile show default
|
||||
You can check your Conan profile by running:
|
||||
|
||||
```bash
|
||||
conan profile show
|
||||
```
|
||||
|
||||
#### Custom profile
|
||||
|
||||
**Windows** developers may need to use the x64 native build tools.
|
||||
An easy way to do that is to run the shortcut "x64 Native Tools Command
|
||||
Prompt" for the version of Visual Studio that you have installed.
|
||||
If the default profile does not work for you and you do not yet have a Conan
|
||||
profile, you can create one by running:
|
||||
|
||||
Windows developers must also build `rippled` and its dependencies for the x64
|
||||
architecture:
|
||||
|
||||
```
|
||||
conan profile update settings.arch=x86_64 default
|
||||
```
|
||||
|
||||
### Multiple compilers
|
||||
|
||||
When `/usr/bin/g++` exists on a platform, it is the default cpp compiler. This
|
||||
default works for some users.
|
||||
|
||||
However, if this compiler cannot build rippled or its dependencies, then you can
|
||||
install another compiler and set Conan and CMake to use it.
|
||||
Update the `conf.tools.build:compiler_executables` setting in order to set the correct variables (`CMAKE_<LANG>_COMPILER`) in the
|
||||
generated CMake toolchain file.
|
||||
For example, on Ubuntu 20, you may have gcc at `/usr/bin/gcc` and g++ at `/usr/bin/g++`; if that is the case, you can select those compilers with:
|
||||
```
|
||||
conan profile update 'conf.tools.build:compiler_executables={"c": "/usr/bin/gcc", "cpp": "/usr/bin/g++"}' default
|
||||
```bash
|
||||
conan profile detect
|
||||
```
|
||||
|
||||
Replace `/usr/bin/gcc` and `/usr/bin/g++` with paths to the desired compilers.
|
||||
You may need to make changes to the profile to suit your environment. You can
|
||||
refer to the provided `conan/profiles/default` profile for inspiration, and you
|
||||
may also need to apply the required [tweaks](#conan-profile-tweaks) to this
|
||||
default profile.
|
||||
|
||||
It should choose the compiler for dependencies as well,
|
||||
but not all of them have a Conan recipe that respects this setting (yet).
|
||||
For the rest, you can set these environment variables.
|
||||
Replace `<path>` with paths to the desired compilers:
|
||||
### Patched recipes
|
||||
|
||||
- `conan profile update env.CC=<path> default`
|
||||
- `conan profile update env.CXX=<path> default`
|
||||
The recipes in Conan Center occasionally need to be patched for compatibility
|
||||
with the latest version of `rippled`. We maintain a fork of the Conan Center
|
||||
[here](https://github.com/XRPLF/conan-center-index/) containing the patches.
|
||||
|
||||
Export our [Conan recipe for Snappy](./external/snappy).
|
||||
It does not explicitly link the C++ standard library,
|
||||
which allows you to statically link it with GCC, if you want.
|
||||
To ensure our patched recipes are used, you must add our Conan remote at a
|
||||
higher index than the default Conan Center remote, so it is consulted first. You
|
||||
can do this by running:
|
||||
|
||||
```
|
||||
# Conan 1.x
|
||||
conan export external/snappy snappy/1.1.10@
|
||||
# Conan 2.x
|
||||
conan export --version 1.1.10 external/snappy
|
||||
```
|
||||
```bash
|
||||
conan remote add --index 0 xrplf "https://conan.ripplex.io"
|
||||
```
|
||||
|
||||
Export our [Conan recipe for RocksDB](./external/rocksdb).
|
||||
It does not override paths to dependencies when building with Visual Studio.
|
||||
Alternatively, you can pull the patched recipes into the repository and use them
|
||||
locally:
|
||||
|
||||
```
|
||||
# Conan 1.x
|
||||
conan export external/rocksdb rocksdb/9.7.3@
|
||||
# Conan 2.x
|
||||
conan export --version 9.7.3 external/rocksdb
|
||||
```
|
||||
```bash
|
||||
cd external
|
||||
git init
|
||||
git remote add origin git@github.com:XRPLF/conan-center-index.git
|
||||
git sparse-checkout init
|
||||
git sparse-checkout set recipes/snappy
|
||||
git sparse-checkout add recipes/soci
|
||||
git fetch origin master
|
||||
git checkout master
|
||||
conan export --version 1.1.10 recipes/snappy/all
|
||||
conan export --version 4.0.3 recipes/soci/all
|
||||
rm -rf .git
|
||||
```
|
||||
|
||||
Export our [Conan recipe for SOCI](./external/soci).
|
||||
It patches their CMake to correctly import its dependencies.
|
||||
In the case we switch to a newer version of a dependency that still requires a
|
||||
patch, it will be necessary for you to pull in the changes and re-export the
|
||||
updated dependencies with the newer version. However, if we switch to a newer
|
||||
version that no longer requires a patch, no action is required on your part, as
|
||||
the new recipe will be automatically pulled from the official Conan Center.
|
||||
|
||||
```
|
||||
# Conan 1.x
|
||||
conan export external/soci soci/4.0.3@
|
||||
# Conan 2.x
|
||||
conan export --version 4.0.3 external/soci
|
||||
```
|
||||
### Conan profile tweaks
|
||||
|
||||
Export our [Conan recipe for NuDB](./external/nudb).
|
||||
It fixes some source files to add missing `#include`s.
|
||||
#### Missing compiler version
|
||||
|
||||
If you see an error similar to the following after running `conan profile show`:
|
||||
|
||||
```
|
||||
# Conan 1.x
|
||||
conan export external/nudb nudb/2.0.8@
|
||||
# Conan 2.x
|
||||
conan export --version 2.0.8 external/nudb
|
||||
```
|
||||
```bash
|
||||
ERROR: Invalid setting '17' is not a valid 'settings.compiler.version' value.
|
||||
Possible values are ['5.0', '5.1', '6.0', '6.1', '7.0', '7.3', '8.0', '8.1',
|
||||
'9.0', '9.1', '10.0', '11.0', '12.0', '13', '13.0', '13.1', '14', '14.0', '15',
|
||||
'15.0', '16', '16.0']
|
||||
Read "http://docs.conan.io/2/knowledge/faq.html#error-invalid-setting"
|
||||
```
|
||||
|
||||
you need to amend the list of compiler versions in
|
||||
`$(conan config home)/settings.yml`, by appending the required version number(s)
|
||||
to the `version` array specific for your compiler. For example:
|
||||
|
||||
```yaml
|
||||
apple-clang:
|
||||
version:
|
||||
[
|
||||
"5.0",
|
||||
"5.1",
|
||||
"6.0",
|
||||
"6.1",
|
||||
"7.0",
|
||||
"7.3",
|
||||
"8.0",
|
||||
"8.1",
|
||||
"9.0",
|
||||
"9.1",
|
||||
"10.0",
|
||||
"11.0",
|
||||
"12.0",
|
||||
"13",
|
||||
"13.0",
|
||||
"13.1",
|
||||
"14",
|
||||
"14.0",
|
||||
"15",
|
||||
"15.0",
|
||||
"16",
|
||||
"16.0",
|
||||
"17",
|
||||
"17.0",
|
||||
]
|
||||
```
|
||||
|
||||
#### Multiple compilers
|
||||
|
||||
If you have multiple compilers installed, make sure to select the one to use in
|
||||
your default Conan configuration **before** running `conan profile detect`, by
|
||||
setting the `CC` and `CXX` environment variables.
|
||||
|
||||
For example, if you are running MacOS and have [homebrew
|
||||
LLVM@18](https://formulae.brew.sh/formula/llvm@18), and want to use it as a
|
||||
compiler in the new Conan profile:
|
||||
|
||||
```bash
|
||||
export CC=$(brew --prefix llvm@18)/bin/clang
|
||||
export CXX=$(brew --prefix llvm@18)/bin/clang++
|
||||
conan profile detect
|
||||
```
|
||||
|
||||
You should also explicitly set the path to the compiler in the profile file,
|
||||
which helps to avoid errors when `CC` and/or `CXX` are set and disagree with the
|
||||
selected Conan profile. For example:
|
||||
|
||||
```text
|
||||
[conf]
|
||||
tools.build:compiler_executables={'c':'/usr/bin/gcc','cpp':'/usr/bin/g++'}
|
||||
```
|
||||
|
||||
#### Multiple profiles
|
||||
|
||||
You can manage multiple Conan profiles in the directory
|
||||
`$(conan config home)/profiles`, for example renaming `default` to a different
|
||||
name and then creating a new `default` profile for a different compiler.
|
||||
|
||||
#### Select language
|
||||
|
||||
The default profile created by Conan will typically select different C++ dialect
|
||||
than C++20 used by this project. You should set `20` in the profile line
|
||||
starting with `compiler.cppstd=`. For example:
|
||||
|
||||
```bash
|
||||
sed -i.bak -e 's|^compiler\.cppstd=.*$|compiler.cppstd=20|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
#### Select standard library in Linux
|
||||
|
||||
**Linux** developers will commonly have a default Conan [profile][] that
|
||||
compiles with GCC and links with libstdc++. If you are linking with libstdc++
|
||||
(see profile setting `compiler.libcxx`), then you will need to choose the
|
||||
`libstdc++11` ABI:
|
||||
|
||||
```bash
|
||||
sed -i.bak -e 's|^compiler\.libcxx=.*$|compiler.libcxx=libstdc++11|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
#### Select architecture and runtime in Windows
|
||||
|
||||
**Windows** developers may need to use the x64 native build tools. An easy way
|
||||
to do that is to run the shortcut "x64 Native Tools Command Prompt" for the
|
||||
version of Visual Studio that you have installed.
|
||||
|
||||
Windows developers must also build `rippled` and its dependencies for the x64
|
||||
architecture:
|
||||
|
||||
```bash
|
||||
sed -i.bak -e 's|^arch=.*$|arch=x86_64|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
**Windows** developers also must select static runtime:
|
||||
|
||||
```bash
|
||||
sed -i.bak -e 's|^compiler\.runtime=.*$|compiler.runtime=static|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
#### Workaround for CMake 4
|
||||
|
||||
If your system CMake is version 4 rather than 3, you may have to configure Conan
|
||||
profile to use CMake version 3 for dependencies, by adding the following two
|
||||
lines to your profile:
|
||||
|
||||
```text
|
||||
[tool_requires]
|
||||
!cmake/*: cmake/[>=3 <4]
|
||||
```
|
||||
|
||||
This will force Conan to download and use a locally cached CMake 3 version, and
|
||||
is needed because some of the dependencies used by this project do not support
|
||||
CMake 4.
|
||||
|
||||
#### Clang workaround for grpc
|
||||
|
||||
If your compiler is clang, version 19 or later, or apple-clang, version 17 or
|
||||
later, you may encounter a compilation error while building the `grpc`
|
||||
dependency:
|
||||
|
||||
```text
|
||||
In file included from .../lib/promise/try_seq.h:26:
|
||||
.../lib/promise/detail/basic_seq.h:499:38: error: a template argument list is expected after a name prefixed by the template keyword [-Wmissing-template-arg-list-after-template-kw]
|
||||
499 | Traits::template CallSeqFactory(f_, *cur_, std::move(arg)));
|
||||
| ^
|
||||
```
|
||||
|
||||
The workaround for this error is to add two lines to profile:
|
||||
|
||||
```text
|
||||
[conf]
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
```
|
||||
|
||||
#### Workaround for gcc 12
|
||||
|
||||
If your compiler is gcc, version 12, and you have enabled `werr` option, you may
|
||||
encounter a compilation error such as:
|
||||
|
||||
```text
|
||||
/usr/include/c++/12/bits/char_traits.h:435:56: error: 'void* __builtin_memcpy(void*, const void*, long unsigned int)' accessing 9223372036854775810 or more bytes at offsets [2, 9223372036854775807] and 1 may overlap up to 9223372036854775813 bytes at offset -3 [-Werror=restrict]
|
||||
435 | return static_cast<char_type*>(__builtin_memcpy(__s1, __s2, __n));
|
||||
| ~~~~~~~~~~~~~~~~^~~~~~~~~~~~~~~~~
|
||||
cc1plus: all warnings being treated as errors
|
||||
```
|
||||
|
||||
The workaround for this error is to add two lines to your profile:
|
||||
|
||||
```text
|
||||
[conf]
|
||||
tools.build:cxxflags=['-Wno-restrict']
|
||||
```
|
||||
|
||||
#### Workaround for clang 16
|
||||
|
||||
If your compiler is clang, version 16, you may encounter compilation error such
|
||||
as:
|
||||
|
||||
```text
|
||||
In file included from .../boost/beast/websocket/stream.hpp:2857:
|
||||
.../boost/beast/websocket/impl/read.hpp:695:17: error: call to 'async_teardown' is ambiguous
|
||||
async_teardown(impl.role, impl.stream(),
|
||||
^~~~~~~~~~~~~~
|
||||
```
|
||||
|
||||
The workaround for this error is to add two lines to your profile:
|
||||
|
||||
```text
|
||||
[conf]
|
||||
tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
|
||||
```
|
||||
|
||||
### Build and Test
|
||||
|
||||
@@ -224,71 +371,70 @@ It fixes some source files to add missing `#include`s.
|
||||
|
||||
2. Use conan to generate CMake files for every configuration you want to build:
|
||||
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
```
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
```
|
||||
|
||||
To build Debug, in the next step, be sure to set `-DCMAKE_BUILD_TYPE=Debug`
|
||||
To build Debug, in the next step, be sure to set `-DCMAKE_BUILD_TYPE=Debug`
|
||||
|
||||
For a single-configuration generator, e.g. `Unix Makefiles` or `Ninja`,
|
||||
you only need to run this command once.
|
||||
For a multi-configuration generator, e.g. `Visual Studio`, you may want to
|
||||
run it more than once.
|
||||
For a single-configuration generator, e.g. `Unix Makefiles` or `Ninja`,
|
||||
you only need to run this command once.
|
||||
For a multi-configuration generator, e.g. `Visual Studio`, you may want to
|
||||
run it more than once.
|
||||
|
||||
Each of these commands should also have a different `build_type` setting.
|
||||
A second command with the same `build_type` setting will overwrite the files
|
||||
generated by the first. You can pass the build type on the command line with
|
||||
`--settings build_type=$BUILD_TYPE` or in the profile itself,
|
||||
under the section `[settings]` with the key `build_type`.
|
||||
Each of these commands should also have a different `build_type` setting.
|
||||
A second command with the same `build_type` setting will overwrite the files
|
||||
generated by the first. You can pass the build type on the command line with
|
||||
`--settings build_type=$BUILD_TYPE` or in the profile itself,
|
||||
under the section `[settings]` with the key `build_type`.
|
||||
|
||||
If you are using a Microsoft Visual C++ compiler,
|
||||
then you will need to ensure consistency between the `build_type` setting
|
||||
and the `compiler.runtime` setting.
|
||||
If you are using a Microsoft Visual C++ compiler,
|
||||
then you will need to ensure consistency between the `build_type` setting
|
||||
and the `compiler.runtime` setting.
|
||||
|
||||
When `build_type` is `Release`, `compiler.runtime` should be `MT`.
|
||||
When `build_type` is `Release`, `compiler.runtime` should be `MT`.
|
||||
|
||||
When `build_type` is `Debug`, `compiler.runtime` should be `MTd`.
|
||||
When `build_type` is `Debug`, `compiler.runtime` should be `MTd`.
|
||||
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release --settings compiler.runtime=MT
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug --settings compiler.runtime=MTd
|
||||
```
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release --settings compiler.runtime=MT
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug --settings compiler.runtime=MTd
|
||||
```
|
||||
|
||||
3. Configure CMake and pass the toolchain file generated by Conan, located at
|
||||
`$OUTPUT_FOLDER/build/generators/conan_toolchain.cmake`.
|
||||
|
||||
Single-config generators:
|
||||
Single-config generators:
|
||||
|
||||
Pass the CMake variable [`CMAKE_BUILD_TYPE`][build_type]
|
||||
and make sure it matches the one of the `build_type` settings
|
||||
you chose in the previous step.
|
||||
Pass the CMake variable [`CMAKE_BUILD_TYPE`][build_type]
|
||||
and make sure it matches the one of the `build_type` settings
|
||||
you chose in the previous step.
|
||||
|
||||
For example, to build Debug, in the next command, replace "Release" with "Debug"
|
||||
For example, to build Debug, in the next command, replace "Release" with "Debug"
|
||||
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release -Dxrpld=ON -Dtests=ON ..
|
||||
```
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release -Dxrpld=ON -Dtests=ON ..
|
||||
```
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
Multi-config generators:
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -Dxrpld=ON -Dtests=ON ..
|
||||
```
|
||||
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -Dxrpld=ON -Dtests=ON ..
|
||||
```
|
||||
**Note:** You can pass build options for `rippled` in this step.
|
||||
|
||||
**Note:** You can pass build options for `rippled` in this step.
|
||||
|
||||
5. Build `rippled`.
|
||||
4. Build `rippled`.
|
||||
|
||||
For a single-configuration generator, it will build whatever configuration
|
||||
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator,
|
||||
you must pass the option `--config` to select the build configuration.
|
||||
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator, you
|
||||
must pass the option `--config` to select the build configuration.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
cmake --build . -j $(nproc)
|
||||
cmake --build .
|
||||
```
|
||||
|
||||
Multi-config generators:
|
||||
@@ -298,24 +444,27 @@ It fixes some source files to add missing `#include`s.
|
||||
cmake --build . --config Debug
|
||||
```
|
||||
|
||||
6. Test rippled.
|
||||
5. Test rippled.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
./rippled --unittest
|
||||
./rippled --unittest --unittest-jobs N
|
||||
```
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
./Release/rippled --unittest
|
||||
./Debug/rippled --unittest
|
||||
./Release/rippled --unittest --unittest-jobs N
|
||||
./Debug/rippled --unittest --unittest-jobs N
|
||||
```
|
||||
|
||||
The location of `rippled` in your build directory depends on your CMake
|
||||
generator. Pass `--help` to see the rest of the command line options.
|
||||
Replace the `--unittest-jobs` parameter N with the desired unit tests
|
||||
concurrency. Recommended setting is half of the number of available CPU
|
||||
cores.
|
||||
|
||||
The location of `rippled` binary in your build directory depends on your
|
||||
CMake generator. Pass `--help` to see the rest of the command line options.
|
||||
|
||||
## Coverage report
|
||||
|
||||
@@ -356,7 +505,7 @@ variable in `cmake`. The specific command line used to run the `gcovr` tool will
|
||||
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
|
||||
|
||||
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
|
||||
set to the number of available CPU cores. This may cause spurious test
|
||||
set to the number of available CPU cores. This may cause spurious test
|
||||
errors on Apple. Developers can override the number of unit test jobs with
|
||||
the `coverage_test_parallelism` variable in `cmake`.
|
||||
|
||||
@@ -372,45 +521,56 @@ cmake --build . --target coverage
|
||||
After the `coverage` target is completed, the generated coverage report will be
|
||||
stored inside the build directory, as either of:
|
||||
|
||||
- file named `coverage.`_extension_ , with a suitable extension for the report format, or
|
||||
- file named `coverage.`_extension_, with a suitable extension for the report format, or
|
||||
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
|
||||
|
||||
|
||||
## Options
|
||||
|
||||
| Option | Default Value | Description |
|
||||
| --- | ---| ---|
|
||||
| `assert` | OFF | Enable assertions.
|
||||
| `coverage` | OFF | Prepare the coverage report. |
|
||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||
| `tests` | OFF | Build tests. |
|
||||
| `unity` | ON | Configure a unity build. |
|
||||
| `xrpld` | OFF | Build the xrpld (`rippled`) application, and not just the libxrpl library. |
|
||||
| Option | Default Value | Description |
|
||||
| ---------- | ------------- | -------------------------------------------------------------------------- |
|
||||
| `assert` | OFF | Enable assertions. |
|
||||
| `coverage` | OFF | Prepare the coverage report. |
|
||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||
| `tests` | OFF | Build tests. |
|
||||
| `unity` | OFF | Configure a unity build. |
|
||||
| `xrpld` | OFF | Build the xrpld (`rippled`) application, and not just the libxrpl library. |
|
||||
| `werr` | OFF | Treat compilation warnings as errors |
|
||||
| `wextra` | OFF | Enable additional compilation warnings |
|
||||
|
||||
[Unity builds][5] may be faster for the first build
|
||||
(at the cost of much more memory) since they concatenate sources into fewer
|
||||
translation units. Non-unity builds may be faster for incremental builds,
|
||||
and can be helpful for detecting `#include` omissions.
|
||||
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
||||
### Conan
|
||||
|
||||
After any updates or changes to dependencies, you may need to do the following:
|
||||
|
||||
1. Remove your build directory.
|
||||
2. Remove the Conan cache:
|
||||
2. Remove individual libraries from the Conan cache, e.g.
|
||||
|
||||
```bash
|
||||
conan remove 'grpc/*'
|
||||
```
|
||||
rm -rf ~/.conan/data
|
||||
|
||||
**or**
|
||||
|
||||
Remove all libraries from Conan cache:
|
||||
|
||||
```bash
|
||||
conan remove '*'
|
||||
```
|
||||
|
||||
3. Re-run [conan export](#patched-recipes) if needed.
|
||||
4. Re-run [conan install](#build-and-test).
|
||||
|
||||
### `protobuf/port_def.inc` file not found
|
||||
|
||||
### 'protobuf/port_def.inc' file not found
|
||||
|
||||
If `cmake --build .` results in an error due to a missing a protobuf file, then you might have generated CMake files for a different `build_type` than the `CMAKE_BUILD_TYPE` you passed to conan.
|
||||
If `cmake --build .` results in an error due to a missing a protobuf file, then
|
||||
you might have generated CMake files for a different `build_type` than the
|
||||
`CMAKE_BUILD_TYPE` you passed to Conan.
|
||||
|
||||
```
|
||||
/rippled/.build/pb-xrpl.libpb/xrpl/proto/ripple.pb.h:10:10: fatal error: 'google/protobuf/port_def.inc' file not found
|
||||
@@ -424,70 +584,21 @@ For example, if you want to build Debug:
|
||||
1. For conan install, pass `--settings build_type=Debug`
|
||||
2. For cmake, pass `-DCMAKE_BUILD_TYPE=Debug`
|
||||
|
||||
|
||||
### no std::result_of
|
||||
|
||||
If your compiler version is recent enough to have removed `std::result_of` as
|
||||
part of C++20, e.g. Apple Clang 15.0, then you might need to add a preprocessor
|
||||
definition to your build.
|
||||
|
||||
```
|
||||
conan profile update 'options.boost:extra_b2_flags="define=BOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
||||
conan profile update 'env.CFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
||||
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
||||
conan profile update 'conf.tools.build:cflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
|
||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
|
||||
```
|
||||
|
||||
|
||||
### call to 'async_teardown' is ambiguous
|
||||
|
||||
If you are compiling with an early version of Clang 16, then you might hit
|
||||
a [regression][6] when compiling C++20 that manifests as an [error in a Boost
|
||||
header][7]. You can workaround it by adding this preprocessor definition:
|
||||
|
||||
```
|
||||
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS"' default
|
||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
|
||||
```
|
||||
|
||||
|
||||
### recompile with -fPIC
|
||||
|
||||
If you get a linker error suggesting that you recompile Boost with
|
||||
position-independent code, such as:
|
||||
|
||||
```
|
||||
/usr/bin/ld.gold: error: /home/username/.conan/data/boost/1.77.0/_/_/package/.../lib/libboost_container.a(alloc_lib.o):
|
||||
requires unsupported dynamic reloc 11; recompile with -fPIC
|
||||
```
|
||||
|
||||
Conan most likely downloaded a bad binary distribution of the dependency.
|
||||
This seems to be a [bug][1] in Conan just for Boost 1.77.0 compiled with GCC
|
||||
for Linux. The solution is to build the dependency locally by passing
|
||||
`--build boost` when calling `conan install`.
|
||||
|
||||
```
|
||||
conan install --build boost ...
|
||||
```
|
||||
|
||||
|
||||
## Add a Dependency
|
||||
|
||||
If you want to experiment with a new package, follow these steps:
|
||||
|
||||
1. Search for the package on [Conan Center](https://conan.io/center/).
|
||||
2. Modify [`conanfile.py`](./conanfile.py):
|
||||
- Add a version of the package to the `requires` property.
|
||||
- Change any default options for the package by adding them to the
|
||||
`default_options` property (with syntax `'$package:$option': $value`).
|
||||
- Add a version of the package to the `requires` property.
|
||||
- Change any default options for the package by adding them to the
|
||||
`default_options` property (with syntax `'$package:$option': $value`).
|
||||
3. Modify [`CMakeLists.txt`](./CMakeLists.txt):
|
||||
- Add a call to `find_package($package REQUIRED)`.
|
||||
- Link a library from the package to the target `ripple_libs`
|
||||
(search for the existing call to `target_link_libraries(ripple_libs INTERFACE ...)`).
|
||||
- Add a call to `find_package($package REQUIRED)`.
|
||||
- Link a library from the package to the target `ripple_libs`
|
||||
(search for the existing call to `target_link_libraries(ripple_libs INTERFACE ...)`).
|
||||
4. Start coding! Don't forget to include whatever headers you need from the package.
|
||||
|
||||
|
||||
[1]: https://github.com/conan-io/conan-center-index/issues/13168
|
||||
[2]: https://en.cppreference.com/w/cpp/compiler_support/20
|
||||
[3]: https://docs.conan.io/en/latest/getting_started.html
|
||||
|
||||
290
CONTRIBUTING.md
290
CONTRIBUTING.md
@@ -8,13 +8,12 @@ We assume you are familiar with the general practice of [making
|
||||
contributions on GitHub][contrib]. This file includes only special
|
||||
instructions specific to this project.
|
||||
|
||||
|
||||
## Before you start
|
||||
|
||||
The following branches exist in the main project repository:
|
||||
|
||||
- `develop`: The latest set of unreleased features, and the most common
|
||||
starting point for contributions.
|
||||
starting point for contributions.
|
||||
- `release`: The latest beta release or release candidate.
|
||||
- `master`: The latest stable release.
|
||||
- `gh-pages`: The documentation for this project, built by Doxygen.
|
||||
@@ -27,18 +26,18 @@ In general, external contributions should be developed in your personal
|
||||
[fork][forking]. Contributions from developers with write permissions
|
||||
should be done in [the main repository][rippled] in a branch with
|
||||
a permitted prefix. Permitted prefixes are:
|
||||
* XLS-[a-zA-Z0-9]+/.+
|
||||
* e.g. XLS-0033d/mpt-clarify-STEitherAmount
|
||||
* [GitHub username]/.+
|
||||
* e.g. JoelKatz/fix-rpc-webhook-queue
|
||||
* [Organization name]/.+
|
||||
* e.g. ripple/antithesis
|
||||
|
||||
Regardless of where the branch is created, please open a *draft* pull
|
||||
- XLS-[a-zA-Z0-9]+/.+
|
||||
- e.g. XLS-0033d/mpt-clarify-STEitherAmount
|
||||
- [GitHub username]/.+
|
||||
- e.g. JoelKatz/fix-rpc-webhook-queue
|
||||
- [Organization name]/.+
|
||||
- e.g. ripple/antithesis
|
||||
|
||||
Regardless of where the branch is created, please open a _draft_ pull
|
||||
request as soon as possible after pushing the branch to Github, to
|
||||
increase visibility, and ease feedback during the development process.
|
||||
|
||||
|
||||
## Major contributions
|
||||
|
||||
If your contribution is a major feature or breaking change, then you
|
||||
@@ -55,8 +54,8 @@ responsibility of the XLS author to update the draft to match the final
|
||||
implementation when its corresponding pull request is merged, unless the
|
||||
author delegates that responsibility to others.
|
||||
|
||||
|
||||
## Before making a pull request
|
||||
|
||||
(Or marking a draft pull request as ready.)
|
||||
|
||||
Changes that alter transaction processing must be guarded by an
|
||||
@@ -73,30 +72,32 @@ automatic test run by `rippled --unittest`.
|
||||
Otherwise, it must be a manual test.
|
||||
|
||||
If you create new source files, they must be organized as follows:
|
||||
* If the files are in any of the `libxrpl` modules, the headers (`.h`) must go
|
||||
|
||||
- If the files are in any of the `libxrpl` modules, the headers (`.h`) must go
|
||||
under `include/xrpl`, and source (`.cpp`) files must go under
|
||||
`src/libxrpl`.
|
||||
* All other non-test files must go under `src/xrpld`.
|
||||
* All test source files must go under `src/test`.
|
||||
- All other non-test files must go under `src/xrpld`.
|
||||
- All test source files must go under `src/test`.
|
||||
|
||||
The source must be formatted according to the style guide below.
|
||||
|
||||
Header includes must be [levelized](./Builds/levelization).
|
||||
Header includes must be [levelized](.github/scripts/levelization).
|
||||
|
||||
Changes should be usually squashed down into a single commit.
|
||||
Some larger or more complicated change sets make more sense,
|
||||
and are easier to review if organized into multiple logical commits.
|
||||
Either way, all commits should fit the following criteria:
|
||||
* Changes should be presented in a single commit or a logical
|
||||
|
||||
- Changes should be presented in a single commit or a logical
|
||||
sequence of commits.
|
||||
Specifically, chronological commits that simply
|
||||
reflect the history of how the author implemented
|
||||
the change, "warts and all", are not useful to
|
||||
reviewers.
|
||||
* Every commit should have a [good message](#good-commit-messages).
|
||||
- Every commit should have a [good message](#good-commit-messages).
|
||||
to explain a specific aspects of the change.
|
||||
* Every commit should be signed.
|
||||
* Every commit should be well-formed (builds successfully,
|
||||
- Every commit should be signed.
|
||||
- Every commit should be well-formed (builds successfully,
|
||||
unit tests passing), as this helps to resolve merge
|
||||
conflicts, and makes it easier to use `git bisect`
|
||||
to find bugs.
|
||||
@@ -108,13 +109,14 @@ Refer to
|
||||
for general rules on writing a good commit message.
|
||||
|
||||
tl;dr
|
||||
|
||||
> 1. Separate subject from body with a blank line.
|
||||
> 2. Limit the subject line to 50 characters.
|
||||
> * [...]shoot for 50 characters, but consider 72 the hard limit.
|
||||
> - [...]shoot for 50 characters, but consider 72 the hard limit.
|
||||
> 3. Capitalize the subject line.
|
||||
> 4. Do not end the subject line with a period.
|
||||
> 5. Use the imperative mood in the subject line.
|
||||
> * A properly formed Git commit subject line should always be able
|
||||
> - A properly formed Git commit subject line should always be able
|
||||
> to complete the following sentence: "If applied, this commit will
|
||||
> _your subject line here_".
|
||||
> 6. Wrap the body at 72 characters.
|
||||
@@ -122,16 +124,17 @@ tl;dr
|
||||
|
||||
In addition to those guidelines, please add one of the following
|
||||
prefixes to the subject line if appropriate.
|
||||
* `fix:` - The primary purpose is to fix an existing bug.
|
||||
* `perf:` - The primary purpose is performance improvements.
|
||||
* `refactor:` - The changes refactor code without affecting
|
||||
|
||||
- `fix:` - The primary purpose is to fix an existing bug.
|
||||
- `perf:` - The primary purpose is performance improvements.
|
||||
- `refactor:` - The changes refactor code without affecting
|
||||
functionality.
|
||||
* `test:` - The changes _only_ affect unit tests.
|
||||
* `docs:` - The changes _only_ affect documentation. This can
|
||||
- `test:` - The changes _only_ affect unit tests.
|
||||
- `docs:` - The changes _only_ affect documentation. This can
|
||||
include code comments in addition to `.md` files like this one.
|
||||
* `build:` - The changes _only_ affect the build process,
|
||||
- `build:` - The changes _only_ affect the build process,
|
||||
including CMake and/or Conan settings.
|
||||
* `chore:` - Other tasks that don't affect the binary, but don't fit
|
||||
- `chore:` - Other tasks that don't affect the binary, but don't fit
|
||||
any of the other cases. e.g. formatting, git settings, updating
|
||||
Github Actions jobs.
|
||||
|
||||
@@ -143,9 +146,10 @@ unit tests for Feature X (#1234)`.
|
||||
|
||||
In general, pull requests use `develop` as the base branch.
|
||||
The exceptions are
|
||||
* Fixes and improvements to a release candidate use `release` as the
|
||||
|
||||
- Fixes and improvements to a release candidate use `release` as the
|
||||
base.
|
||||
* Hotfixes use `master` as the base.
|
||||
- Hotfixes use `master` as the base.
|
||||
|
||||
If your changes are not quite ready, but you want to make it easily available
|
||||
for preliminary examination or review, you can create a "Draft" pull request.
|
||||
@@ -182,11 +186,11 @@ meets a few criteria:
|
||||
2. All CI checks must be complete and passed. (One-off failures may
|
||||
be acceptable if they are related to a known issue.)
|
||||
3. The PR must have a [good commit message](#good-commit-messages).
|
||||
* If the PR started with a good commit message, and it doesn't
|
||||
- If the PR started with a good commit message, and it doesn't
|
||||
need to be updated, the author can indicate that in a comment.
|
||||
* Any contributor, preferably the author, can leave a comment
|
||||
- Any contributor, preferably the author, can leave a comment
|
||||
suggesting a commit message.
|
||||
* If the author squashes and rebases the code in preparation for
|
||||
- If the author squashes and rebases the code in preparation for
|
||||
merge, they should also ensure the commit message(s) are updated
|
||||
as well.
|
||||
4. The PR branch must be up to date with the base branch (usually
|
||||
@@ -208,7 +212,6 @@ This is a non-exhaustive list of recommended style guidelines. These are
|
||||
not always strictly enforced and serve as a way to keep the codebase
|
||||
coherent rather than a set of _thou shalt not_ commandments.
|
||||
|
||||
|
||||
## Formatting
|
||||
|
||||
All code must conform to `clang-format` version 18,
|
||||
@@ -237,6 +240,7 @@ To download the patch file:
|
||||
5. Commit and push.
|
||||
|
||||
You can install a pre-commit hook to automatically run `clang-format` before every commit:
|
||||
|
||||
```
|
||||
pip3 install pre-commit
|
||||
pre-commit install
|
||||
@@ -267,49 +271,51 @@ locations, where the reporting of contract violations on the Antithesis
|
||||
platform is either not possible or not useful.
|
||||
|
||||
For this reason:
|
||||
* The locations where `assert` or `assert(false)` contracts should continue to be used:
|
||||
* `constexpr` functions
|
||||
* unit tests i.e. files under `src/test`
|
||||
* unit tests-related modules (files under `beast/test` and `beast/unit_test`)
|
||||
* Outside of the listed locations, do not use `assert`; use `XRPL_ASSERT` instead,
|
||||
|
||||
- The locations where `assert` or `assert(false)` contracts should continue to be used:
|
||||
- `constexpr` functions
|
||||
- unit tests i.e. files under `src/test`
|
||||
- unit tests-related modules (files under `beast/test` and `beast/unit_test`)
|
||||
- Outside of the listed locations, do not use `assert`; use `XRPL_ASSERT` instead,
|
||||
giving it unique name, with the short description of the contract.
|
||||
* Outside of the listed locations, do not use `assert(false)`; use
|
||||
- Outside of the listed locations, do not use `assert(false)`; use
|
||||
`UNREACHABLE` instead, giving it unique name, with the description of the
|
||||
condition being violated
|
||||
* The contract name should start with a full name (including scope) of the
|
||||
function, optionally a named lambda, followed by a colon ` : ` and a brief
|
||||
- The contract name should start with a full name (including scope) of the
|
||||
function, optionally a named lambda, followed by a colon `:` and a brief
|
||||
(typically at most five words) description. `UNREACHABLE` contracts
|
||||
can use slightly longer descriptions. If there are multiple overloads of the
|
||||
function, use common sense to balance both brevity and unambiguity of the
|
||||
function name. NOTE: the purpose of name is to provide stable means of
|
||||
unique identification of every contract; for this reason try to avoid elements
|
||||
which can change in some obvious refactors or when reinforcing the condition.
|
||||
* Contract description typically (except for `UNREACHABLE`) should describe the
|
||||
- Contract description typically (except for `UNREACHABLE`) should describe the
|
||||
_expected_ condition, as in "I assert that _expected_ is true".
|
||||
* Contract description for `UNREACHABLE` should describe the _unexpected_
|
||||
- Contract description for `UNREACHABLE` should describe the _unexpected_
|
||||
situation which caused the line to have been reached.
|
||||
* Example good name for an
|
||||
- Example good name for an
|
||||
`UNREACHABLE` macro `"Json::operator==(Value, Value) : invalid type"`; example
|
||||
good name for an `XRPL_ASSERT` macro `"Json::Value::asCString : valid type"`.
|
||||
* Example **bad** name
|
||||
- Example **bad** name
|
||||
`"RFC1751::insert(char* s, int x, int start, int length) : length is greater than or equal zero"`
|
||||
(missing namespace, unnecessary full function signature, description too verbose).
|
||||
Good name: `"ripple::RFC1751::insert : minimum length"`.
|
||||
* In **few** well-justified cases a non-standard name can be used, in which case a
|
||||
- In **few** well-justified cases a non-standard name can be used, in which case a
|
||||
comment should be placed to explain the rationale (example in `contract.cpp`)
|
||||
* Do **not** rename a contract without a good reason (e.g. the name no longer
|
||||
- Do **not** rename a contract without a good reason (e.g. the name no longer
|
||||
reflects the location or the condition being checked)
|
||||
* Do not use `std::unreachable`
|
||||
* Do not put contracts where they can be violated by an external condition
|
||||
- Do not use `std::unreachable`
|
||||
- Do not put contracts where they can be violated by an external condition
|
||||
(e.g. timing, data payload before mandatory validation etc.) as this creates
|
||||
bogus bug reports (and causes crashes of Debug builds)
|
||||
|
||||
## Unit Tests
|
||||
|
||||
To execute all unit tests:
|
||||
|
||||
```rippled --unittest --unittest-jobs=<number of cores>```
|
||||
`rippled --unittest --unittest-jobs=<number of cores>`
|
||||
|
||||
(Note: Using multiple cores on a Mac M1 can cause spurious test failures. The
|
||||
(Note: Using multiple cores on a Mac M1 can cause spurious test failures. The
|
||||
cause is still under investigation. If you observe this problem, try specifying fewer jobs.)
|
||||
|
||||
To run a specific set of test suites:
|
||||
@@ -317,10 +323,11 @@ To run a specific set of test suites:
|
||||
```
|
||||
rippled --unittest TestSuiteName
|
||||
```
|
||||
|
||||
Note: In this example, all tests with prefix `TestSuiteName` will be run, so if
|
||||
`TestSuiteName1` and `TestSuiteName2` both exist, then both tests will run.
|
||||
Alternatively, if the unit test name finds an exact match, it will stop
|
||||
doing partial matches, i.e. if a unit test with a title of `TestSuiteName`
|
||||
`TestSuiteName1` and `TestSuiteName2` both exist, then both tests will run.
|
||||
Alternatively, if the unit test name finds an exact match, it will stop
|
||||
doing partial matches, i.e. if a unit test with a title of `TestSuiteName`
|
||||
exists, then no other unit test will be executed, apart from `TestSuiteName`.
|
||||
|
||||
## Avoid
|
||||
@@ -336,7 +343,6 @@ exists, then no other unit test will be executed, apart from `TestSuiteName`.
|
||||
explanatory comments.
|
||||
8. Importing new libraries unless there is a very good reason to do so.
|
||||
|
||||
|
||||
## Seek to
|
||||
|
||||
9. Extend functionality of existing code rather than creating new code.
|
||||
@@ -351,14 +357,12 @@ exists, then no other unit test will be executed, apart from `TestSuiteName`.
|
||||
14. Provide as many comments as you feel that a competent programmer
|
||||
would need to understand what your code does.
|
||||
|
||||
|
||||
# Maintainers
|
||||
|
||||
Maintainers are ecosystem participants with elevated access to the repository.
|
||||
They are able to push new code, make decisions on when a release should be
|
||||
made, etc.
|
||||
|
||||
|
||||
## Adding and removing
|
||||
|
||||
New maintainers can be proposed by two existing maintainers, subject to a vote
|
||||
@@ -373,47 +377,41 @@ A minimum of 60% agreement and 50% participation are required.
|
||||
The XRP Ledger Foundation will have the ability, for cause, to remove an
|
||||
existing maintainer without a vote.
|
||||
|
||||
|
||||
## Current Maintainers
|
||||
|
||||
Maintainers are users with maintain or admin access to the repo.
|
||||
|
||||
* [bthomee](https://github.com/bthomee) (Ripple)
|
||||
* [intelliot](https://github.com/intelliot) (Ripple)
|
||||
* [JoelKatz](https://github.com/JoelKatz) (Ripple)
|
||||
* [nixer89](https://github.com/nixer89) (XRP Ledger Foundation)
|
||||
* [RichardAH](https://github.com/RichardAH) (XRP Ledger Foundation)
|
||||
* [Silkjaer](https://github.com/Silkjaer) (XRP Ledger Foundation)
|
||||
* [WietseWind](https://github.com/WietseWind) (XRPL Labs + XRP Ledger Foundation)
|
||||
* [ximinez](https://github.com/ximinez) (Ripple)
|
||||
|
||||
- [bthomee](https://github.com/bthomee) (Ripple)
|
||||
- [intelliot](https://github.com/intelliot) (Ripple)
|
||||
- [JoelKatz](https://github.com/JoelKatz) (Ripple)
|
||||
- [legleux](https://github.com/legleux) (Ripple)
|
||||
- [mankins](https://github.com/mankins) (XRP Ledger Foundation)
|
||||
- [WietseWind](https://github.com/WietseWind) (XRPL Labs + XRP Ledger Foundation)
|
||||
- [ximinez](https://github.com/ximinez) (Ripple)
|
||||
|
||||
## Current Code Reviewers
|
||||
|
||||
Code Reviewers are developers who have the ability to review, approve, and
|
||||
in some cases merge source code changes.
|
||||
|
||||
* [HowardHinnant](https://github.com/HowardHinnant) (Ripple)
|
||||
* [scottschurr](https://github.com/scottschurr) (Ripple)
|
||||
* [seelabs](https://github.com/seelabs) (Ripple)
|
||||
* [Ed Hennis](https://github.com/ximinez) (Ripple)
|
||||
* [mvadari](https://github.com/mvadari) (Ripple)
|
||||
* [thejohnfreeman](https://github.com/thejohnfreeman) (Ripple)
|
||||
* [Bronek](https://github.com/Bronek) (Ripple)
|
||||
* [manojsdoshi](https://github.com/manojsdoshi) (Ripple)
|
||||
* [godexsoft](https://github.com/godexsoft) (Ripple)
|
||||
* [mDuo13](https://github.com/mDuo13) (Ripple)
|
||||
* [ckniffen](https://github.com/ckniffen) (Ripple)
|
||||
* [arihantkothari](https://github.com/arihantkothari) (Ripple)
|
||||
* [pwang200](https://github.com/pwang200) (Ripple)
|
||||
* [sophiax851](https://github.com/sophiax851) (Ripple)
|
||||
* [shawnxie999](https://github.com/shawnxie999) (Ripple)
|
||||
* [gregtatcam](https://github.com/gregtatcam) (Ripple)
|
||||
* [mtrippled](https://github.com/mtrippled) (Ripple)
|
||||
* [ckeshava](https://github.com/ckeshava) (Ripple)
|
||||
* [nbougalis](https://github.com/nbougalis) None
|
||||
* [RichardAH](https://github.com/RichardAH) (XRPL Labs + XRP Ledger Foundation)
|
||||
* [dangell7](https://github.com/dangell7) (XRPL Labs)
|
||||
- [a1q123456](https://github.com/a1q123456) (Ripple)
|
||||
- [Bronek](https://github.com/Bronek) (Ripple)
|
||||
- [bthomee](https://github.com/bthomee) (Ripple)
|
||||
- [ckeshava](https://github.com/ckeshava) (Ripple)
|
||||
- [dangell7](https://github.com/dangell7) (XRPL Labs)
|
||||
- [godexsoft](https://github.com/godexsoft) (Ripple)
|
||||
- [gregtatcam](https://github.com/gregtatcam) (Ripple)
|
||||
- [kuznetsss](https://github.com/kuznetsss) (Ripple)
|
||||
- [lmaisons](https://github.com/lmaisons) (Ripple)
|
||||
- [mathbunnyru](https://github.com/mathbunnyru) (Ripple)
|
||||
- [mvadari](https://github.com/mvadari) (Ripple)
|
||||
- [oleks-rip](https://github.com/oleks-rip) (Ripple)
|
||||
- [PeterChen13579](https://github.com/PeterChen13579) (Ripple)
|
||||
- [pwang200](https://github.com/pwang200) (Ripple)
|
||||
- [q73zhao](https://github.com/q73zhao) (Ripple)
|
||||
- [shawnxie999](https://github.com/shawnxie999) (Ripple)
|
||||
- [Tapanito](https://github.com/Tapanito) (Ripple)
|
||||
- [ximinez](https://github.com/ximinez) (Ripple)
|
||||
|
||||
Developers not on this list are able and encouraged to submit feedback
|
||||
on pending code changes (open pull requests).
|
||||
@@ -423,6 +421,7 @@ on pending code changes (open pull requests).
|
||||
These instructions assume you have your git upstream remotes configured
|
||||
to avoid accidental pushes to the main repo, and a remote group
|
||||
specifying both of them. e.g.
|
||||
|
||||
```
|
||||
$ git remote -v | grep upstream
|
||||
upstream https://github.com/XRPLF/rippled.git (fetch)
|
||||
@@ -437,6 +436,7 @@ upstream upstream-push
|
||||
You can use the [setup-upstreams] script to set this up.
|
||||
|
||||
It also assumes you have a default gpg signing key set up in git. e.g.
|
||||
|
||||
```
|
||||
$ git config user.signingkey
|
||||
968479A1AFF927E37D1A566BB5690EEEBB952194
|
||||
@@ -461,8 +461,8 @@ the suggested commit message, or modify it as needed.
|
||||
#### Slightly more complicated pull requests
|
||||
|
||||
Some pull requests need to be pushed to `develop` as more than one
|
||||
commit. A PR author may *request* to merge as separate commits. They
|
||||
must *justify* why separate commits are needed, and *specify* how they
|
||||
commit. A PR author may _request_ to merge as separate commits. They
|
||||
must _justify_ why separate commits are needed, and _specify_ how they
|
||||
would like the commits to be merged. If you disagree with the author,
|
||||
discuss it with them directly.
|
||||
|
||||
@@ -471,20 +471,22 @@ fast forward only merge (`--ff-only`) on the command line and push to
|
||||
`develop`.
|
||||
|
||||
Some examples of when separate commits are worthwhile are:
|
||||
|
||||
1. PRs where source files are reorganized in multiple steps.
|
||||
2. PRs where the commits are mostly independent and *could* be separate
|
||||
2. PRs where the commits are mostly independent and _could_ be separate
|
||||
PRs, but are pulled together into one PR under a commit theme or
|
||||
issue.
|
||||
3. PRs that are complicated enough that `git bisect` would not be much
|
||||
help if it determined this PR introduced a problem.
|
||||
|
||||
Either way, check that:
|
||||
* The commits are based on the current tip of `develop`.
|
||||
* The commits are clean: No merge commits (except when reverse
|
||||
|
||||
- The commits are based on the current tip of `develop`.
|
||||
- The commits are clean: No merge commits (except when reverse
|
||||
merging), no "[FOLD]" or "fixup!" messages.
|
||||
* All commits are signed. If the commits are not signed by the author, use
|
||||
- All commits are signed. If the commits are not signed by the author, use
|
||||
`git commit --amend -S` to sign them yourself.
|
||||
* At least one (but preferably all) of the commits has the PR number
|
||||
- At least one (but preferably all) of the commits has the PR number
|
||||
in the commit message.
|
||||
|
||||
The "Create a merge commit" and "Rebase and merge" options should be
|
||||
@@ -502,13 +504,13 @@ Rippled uses a linear workflow model that can be summarized as:
|
||||
1. In between releases, developers work against the `develop` branch.
|
||||
2. Periodically, a maintainer will build and tag a beta version from
|
||||
`develop`, which is pushed to `release`.
|
||||
* Betas are usually released every two to three weeks, though that
|
||||
- Betas are usually released every two to three weeks, though that
|
||||
schedule can vary depending on progress, availability, and other
|
||||
factors.
|
||||
3. When the changes in `develop` are considered stable and mature enough
|
||||
to be ready to release, a release candidate (RC) is built and tagged
|
||||
from `develop`, and merged to `release`.
|
||||
* Further development for that release (primarily fixes) then
|
||||
- Further development for that release (primarily fixes) then
|
||||
continues against `release`, while other development continues on
|
||||
`develop`. Effectively, `release` is forked from `develop`. Changes
|
||||
to `release` must be reverse merged to `develop`.
|
||||
@@ -543,6 +545,7 @@ Rippled uses a linear workflow model that can be summarized as:
|
||||
the version number, etc.
|
||||
|
||||
The workflow may look something like:
|
||||
|
||||
```
|
||||
git fetch --multiple upstreams user1 user2 user3 [...]
|
||||
git checkout -B release-next --no-track upstream/develop
|
||||
@@ -581,8 +584,9 @@ This includes, betas, and the first release candidate (RC).
|
||||
|
||||
1. If you didn't create one [preparing the `develop`
|
||||
branch](#preparing-the-develop-branch), Ensure there is no old
|
||||
`release-next` branch hanging around. Then make a `release-next`
|
||||
`release-next` branch hanging around. Then make a `release-next`
|
||||
branch that only changes the version number. e.g.
|
||||
|
||||
```
|
||||
git fetch upstreams
|
||||
|
||||
@@ -603,25 +607,30 @@ git push upstream-push
|
||||
git fetch upstreams
|
||||
git branch --set-upstream-to=upstream/release-next
|
||||
```
|
||||
You can also use the [update-version] script.
|
||||
2. Create a Pull Request for `release-next` with **`develop`** as
|
||||
the base branch.
|
||||
1. Use the title "[TRIVIAL] Set version to X.X.X-bX".
|
||||
2. Instead of the default description template, use the following:
|
||||
|
||||
You can also use the [update-version] script. 2. Create a Pull Request for `release-next` with **`develop`** as
|
||||
the base branch.
|
||||
|
||||
1. Use the title "[TRIVIAL] Set version to X.X.X-bX".
|
||||
2. Instead of the default description template, use the following:
|
||||
|
||||
```
|
||||
## High Level Overview of Change
|
||||
|
||||
This PR only changes the version number. It will be merged as
|
||||
soon as Github CI actions successfully complete.
|
||||
```
|
||||
|
||||
3. Wait for CI to successfully complete, and get someone to approve
|
||||
the PR. (It is safe to ignore known CI issues.)
|
||||
4. Push the updated `develop` branch using your `release-next`
|
||||
branch. **Do not use the Github UI. It's important to preserve
|
||||
commit IDs.**
|
||||
|
||||
```
|
||||
git push upstream-push release-next:develop
|
||||
```
|
||||
|
||||
5. In the unlikely event that the push fails because someone has merged
|
||||
something else in the meantime, rebase your branch onto the updated
|
||||
`develop` branch, push again, and go back to step 3.
|
||||
@@ -630,22 +639,25 @@ git push upstream-push release-next:develop
|
||||
7. Once this is done, forward progress on `develop` can continue
|
||||
(other PRs may be merged).
|
||||
8. Now create a Pull Request for `release-next` with **`release`** as
|
||||
the base branch. Instead of the default template, reuse and update
|
||||
the base branch. Instead of the default template, reuse and update
|
||||
the message from the previous release. Include the following verbiage
|
||||
somewhere in the description:
|
||||
|
||||
```
|
||||
The base branch is `release`. [All releases (including
|
||||
betas)](https://github.com/XRPLF/rippled/blob/develop/CONTRIBUTING.md#before-you-start)
|
||||
go in `release`. This PR branch will be pushed directly to `release` (not
|
||||
squashed or rebased, and not using the GitHub UI).
|
||||
```
|
||||
|
||||
7. Sign-offs for the three platforms (Linux, Mac, Windows) usually occur
|
||||
offline, but at least one approval will be needed on the PR.
|
||||
* If issues are discovered during testing, simply abandon the
|
||||
release. It's easy to start a new release, it should be easy to
|
||||
- If issues are discovered during testing, simply abandon the
|
||||
release. It's easy to start a new release, it should be easy to
|
||||
abandon one. **DO NOT REUSE THE VERSION NUMBER.** e.g. If you
|
||||
abandon 2.4.0-b1, the next attempt will be 2.4.0-b2.
|
||||
8. Once everything is ready to go, push to `release`.
|
||||
|
||||
```
|
||||
git fetch upstreams
|
||||
|
||||
@@ -666,23 +678,28 @@ git log -1 --oneline
|
||||
# Other branches, including some from upstream-push, may also be
|
||||
# present.
|
||||
```
|
||||
|
||||
9. Tag the release, too.
|
||||
|
||||
```
|
||||
git tag <version number>
|
||||
git push upstream-push <version number>
|
||||
```
|
||||
|
||||
10. Delete the `release-next` branch on the repo. Use the Github UI or:
|
||||
|
||||
```
|
||||
git push --delete upstream-push release-next
|
||||
```
|
||||
|
||||
11. Finally [create a new release on
|
||||
Github](https://github.com/XRPLF/rippled/releases).
|
||||
|
||||
#### Release candidates after the first
|
||||
|
||||
Once the first release candidate is [merged into
|
||||
release](#making-the-release), then `release` and `develop` *are allowed
|
||||
to diverge*.
|
||||
release](#making-the-release), then `release` and `develop` _are allowed
|
||||
to diverge_.
|
||||
|
||||
If a bug or issue is discovered in a version that has a release
|
||||
candidate being tested, any fix and new version will need to be applied
|
||||
@@ -690,7 +707,7 @@ against `release`, then reverse-merged to `develop`. This helps keep git
|
||||
history as linear as possible.
|
||||
|
||||
A `release-next` branch will be created from `release`, and any further
|
||||
work for that release must be based on `release-next`. Specifically,
|
||||
work for that release must be based on `release-next`. Specifically,
|
||||
PRs must use `release-next` as the base, and those PRs will be merged
|
||||
directly to `release-next` when approved. Changes should be restricted
|
||||
to bug fixes, but other changes may be necessary from time to time.
|
||||
@@ -713,17 +730,21 @@ Once the RC is merged and tagged, it needs to be reverse merged into
|
||||
1. Create a branch, based on `upstream/develop`.
|
||||
The branch name is not important, but could include "mergeNNNrcN".
|
||||
E.g. For release A.B.C-rcD, use `mergeABCrcD`.
|
||||
|
||||
```
|
||||
git fetch upstreams
|
||||
|
||||
git checkout --no-track -b mergeABCrcD upstream/develop
|
||||
```
|
||||
|
||||
2. Merge `release` into your branch.
|
||||
|
||||
```
|
||||
# I like the "--edit --log --verbose" parameters, but they are
|
||||
# not required.
|
||||
git merge upstream/release
|
||||
```
|
||||
|
||||
3. `BuildInfo.cpp` will have a conflict with the version number.
|
||||
Resolve it with the version from `develop` - the higher version.
|
||||
4. Push your branch to your repo (or `upstream` if you have permission),
|
||||
@@ -731,22 +752,27 @@ git merge upstream/release
|
||||
simply indicate that this is a merge of the RC. The "Context" should
|
||||
summarize the changes from the RC. Include the following text
|
||||
prominently:
|
||||
|
||||
```
|
||||
This PR must be merged manually using a push. Do not use the Github UI.
|
||||
```
|
||||
|
||||
5. Depending on the complexity of the changes, and/or merge conflicts,
|
||||
the PR may need a thorough review, or just a sign-off that the
|
||||
merge was done correctly.
|
||||
6. If `develop` is updated before this PR is merged, do not merge
|
||||
`develop` back into your branch. Instead rebase preserving merges,
|
||||
or do the merge again. (See also the `rerere` git config setting.)
|
||||
|
||||
```
|
||||
git rebase --rebase-merges upstream/develop
|
||||
# OR
|
||||
git reset --hard upstream/develop
|
||||
git merge upstream/release
|
||||
```
|
||||
|
||||
7. When the PR is ready, push it to `develop`.
|
||||
|
||||
```
|
||||
git fetch upstreams
|
||||
|
||||
@@ -757,8 +783,8 @@ git push upstream-push mergeABCrcD:develop
|
||||
|
||||
git fetch upstreams
|
||||
```
|
||||
Development on `develop` can proceed as normal.
|
||||
|
||||
Development on `develop` can proceed as normal.
|
||||
|
||||
#### Final releases
|
||||
|
||||
@@ -773,7 +799,7 @@ internally as if they were RCs (at minimum, ensuring unit tests pass,
|
||||
and the app starts, syncs, and stops cleanly across all three
|
||||
platforms.)
|
||||
|
||||
*If in doubt, make an RC first.*
|
||||
_If in doubt, make an RC first._
|
||||
|
||||
The process for building a final release is very similar to [the process
|
||||
for building a beta](#making-the-release), except the code will be
|
||||
@@ -785,20 +811,23 @@ moving from `release` to `master` instead of from `develop` to
|
||||
number. As above, or using the
|
||||
[update-version] script.
|
||||
2. Create a Pull Request for `master-next` with **`master`** as
|
||||
the base branch. Instead of the default template, reuse and update
|
||||
the base branch. Instead of the default template, reuse and update
|
||||
the message from the previous final release. Include the following verbiage
|
||||
somewhere in the description:
|
||||
|
||||
```
|
||||
The base branch is `master`. This PR branch will be pushed directly to
|
||||
`release` and `master` (not squashed or rebased, and not using the
|
||||
GitHub UI).
|
||||
```
|
||||
|
||||
7. Sign-offs for the three platforms (Linux, Mac, Windows) usually occur
|
||||
offline, but at least one approval will be needed on the PR.
|
||||
* If issues are discovered during testing, close the PR, delete
|
||||
- If issues are discovered during testing, close the PR, delete
|
||||
`master-next`, and move development back to `release`, [issuing
|
||||
more RCs as necessary](#release-candidates-after-the-first)
|
||||
8. Once everything is ready to go, push to `release` and `master`.
|
||||
|
||||
```
|
||||
git fetch upstreams
|
||||
|
||||
@@ -821,15 +850,20 @@ git log -1 --oneline
|
||||
# Other branches, including some from upstream-push, may also be
|
||||
# present.
|
||||
```
|
||||
|
||||
9. Tag the release, too.
|
||||
|
||||
```
|
||||
git tag <version number>
|
||||
git push upstream-push <version number>
|
||||
```
|
||||
|
||||
10. Delete the `master-next` branch on the repo. Use the Github UI or:
|
||||
|
||||
```
|
||||
git push --delete upstream-push master-next
|
||||
```
|
||||
|
||||
11. [Create a new release on
|
||||
Github](https://github.com/XRPLF/rippled/releases). Be sure that
|
||||
"Set as the latest release" is checked.
|
||||
@@ -856,11 +890,13 @@ any branch. When it's ready to merge, jump to step 3 using your branch
|
||||
instead of `master-next`.
|
||||
|
||||
1. Create a `master-next` branch from `master`.
|
||||
|
||||
```
|
||||
git checkout --no-track -b master-next upstream/master
|
||||
git push upstream-push
|
||||
git fetch upstreams
|
||||
```
|
||||
|
||||
2. Open any PRs for the pending hotfix using `master-next` as the base,
|
||||
so they can be merged directly in to it. Unlike `develop`, though,
|
||||
`master-next` can be thrown away and recreated if necessary.
|
||||
@@ -868,19 +904,22 @@ git fetch upstreams
|
||||
steps as above, or use the
|
||||
[update-version] script.
|
||||
4. Create a Pull Request for `master-next` with **`master`** as
|
||||
the base branch. Instead of the default template, reuse and update
|
||||
the base branch. Instead of the default template, reuse and update
|
||||
the message from the previous final release. Include the following verbiage
|
||||
somewhere in the description:
|
||||
|
||||
```
|
||||
The base branch is `master`. This PR branch will be pushed directly to
|
||||
`master` (not squashed or rebased, and not using the GitHub UI).
|
||||
```
|
||||
|
||||
7. Sign-offs for the three platforms (Linux, Mac, Windows) usually occur
|
||||
offline, but at least one approval will be needed on the PR.
|
||||
* If issues are discovered during testing, update `master-next` as
|
||||
- If issues are discovered during testing, update `master-next` as
|
||||
needed, but ensure that the changes are properly squashed, and the
|
||||
version setting commit remains last
|
||||
8. Once everything is ready to go, push to `master` **only**.
|
||||
|
||||
```
|
||||
git fetch upstreams
|
||||
|
||||
@@ -901,15 +940,20 @@ git log -1 --oneline
|
||||
# Other branches, including some from upstream-push, may also be
|
||||
# present.
|
||||
```
|
||||
|
||||
9. Tag the release, too.
|
||||
|
||||
```
|
||||
git tag <version number>
|
||||
git push upstream-push <version number>
|
||||
```
|
||||
|
||||
9. Delete the `master-next` branch on the repo.
|
||||
|
||||
```
|
||||
git push --delete upstream-push master-next
|
||||
```
|
||||
|
||||
10. [Create a new release on
|
||||
Github](https://github.com/XRPLF/rippled/releases). Be sure that
|
||||
"Set as the latest release" is checked.
|
||||
@@ -921,17 +965,21 @@ Once the hotfix is released, it needs to be reverse merged into
|
||||
1. Create a branch in your own repo, based on `upstream/develop`.
|
||||
The branch name is not important, but could include "mergeNNN".
|
||||
E.g. For release 2.2.3, use `merge223`.
|
||||
|
||||
```
|
||||
git fetch upstreams
|
||||
|
||||
git checkout --no-track -b merge223 upstream/develop
|
||||
```
|
||||
|
||||
2. Merge master into your branch.
|
||||
|
||||
```
|
||||
# I like the "--edit --log --verbose" parameters, but they are
|
||||
# not required.
|
||||
git merge upstream/master
|
||||
```
|
||||
|
||||
3. `BuildInfo.cpp` will have a conflict with the version number.
|
||||
Resolve it with the version from `develop` - the higher version.
|
||||
4. Push your branch to your repo, and open a normal PR against
|
||||
@@ -939,22 +987,27 @@ git merge upstream/master
|
||||
is a merge of the hotfix version. The "Context" should summarize
|
||||
the changes from the hotfix. Include the following text
|
||||
prominently:
|
||||
|
||||
```
|
||||
This PR must be merged manually using a --ff-only merge. Do not use the Github UI.
|
||||
```
|
||||
|
||||
5. Depending on the complexity of the hotfix, and/or merge conflicts,
|
||||
the PR may need a thorough review, or just a sign-off that the
|
||||
merge was done correctly.
|
||||
6. If `develop` is updated before this PR is merged, do not merge
|
||||
`develop` back into your branch. Instead rebase preserving merges,
|
||||
or do the merge again. (See also the `rerere` git config setting.)
|
||||
|
||||
```
|
||||
git rebase --rebase-merges upstream/develop
|
||||
# OR
|
||||
git reset --hard upstream/develop
|
||||
git merge upstream/master
|
||||
```
|
||||
|
||||
7. When the PR is ready, push it to `develop`.
|
||||
|
||||
```
|
||||
git fetch upstreams
|
||||
|
||||
@@ -963,6 +1016,7 @@ git log --show-signature "upstream/develop..HEAD"
|
||||
|
||||
git push upstream-push HEAD:develop
|
||||
```
|
||||
|
||||
Development on `develop` can proceed as normal. It is recommended to
|
||||
create a beta (or RC) immediately to ensure that everything worked as
|
||||
expected.
|
||||
@@ -977,12 +1031,13 @@ a significant fraction of users, which would necessitate a hotfix / point
|
||||
release to that version as well as any later versions.
|
||||
|
||||
This scenario would follow the same basic procedure as above,
|
||||
except that *none* of `develop`, `release`, or `master`
|
||||
except that _none_ of `develop`, `release`, or `master`
|
||||
would be touched during the release process.
|
||||
|
||||
In this example, consider if version 2.1.1 needed to be patched.
|
||||
|
||||
1. Create two branches in the main (`upstream`) repo.
|
||||
|
||||
```
|
||||
git fetch upstreams
|
||||
|
||||
@@ -996,6 +1051,7 @@ git push upstream-push
|
||||
|
||||
git fetch upstreams
|
||||
```
|
||||
|
||||
2. Work continues as above, except using `master-2.1.2`as
|
||||
the base branch for any merging, packaging, etc.
|
||||
3. After the release is tagged and packages are built, you could
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
ISC License
|
||||
ISC License
|
||||
|
||||
Copyright (c) 2011, Arthur Britto, David Schwartz, Jed McCaleb, Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant.
|
||||
Copyright (c) 2012-2020, the XRP Ledger developers.
|
||||
@@ -14,4 +14,3 @@ ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
|
||||
24
README.md
24
README.md
@@ -5,17 +5,19 @@
|
||||
The [XRP Ledger](https://xrpl.org/) is a decentralized cryptographic ledger powered by a network of peer-to-peer nodes. The XRP Ledger uses a novel Byzantine Fault Tolerant consensus algorithm to settle and record transactions in a secure distributed database without a central operator.
|
||||
|
||||
## XRP
|
||||
|
||||
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free asset native to the XRP Ledger, and is designed to bridge the many different currencies in use worldwide. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
|
||||
|
||||
## rippled
|
||||
|
||||
The server software that powers the XRP Ledger is called `rippled` and is available in this repository under the permissive [ISC open-source license](LICENSE.md). The `rippled` server software is written primarily in C++ and runs on a variety of platforms. The `rippled` server software can run in several modes depending on its [configuration](https://xrpl.org/rippled-server-modes.html).
|
||||
|
||||
If you are interested in running an **API Server** (including a **Full History Server**), take a look at [Clio](https://github.com/XRPLF/clio). (rippled Reporting Mode has been replaced by Clio.)
|
||||
|
||||
### Build from Source
|
||||
|
||||
* [Read the build instructions in `BUILD.md`](BUILD.md)
|
||||
* If you encounter any issues, please [open an issue](https://github.com/XRPLF/rippled/issues)
|
||||
- [Read the build instructions in `BUILD.md`](BUILD.md)
|
||||
- If you encounter any issues, please [open an issue](https://github.com/XRPLF/rippled/issues)
|
||||
|
||||
## Key Features of the XRP Ledger
|
||||
|
||||
@@ -35,19 +37,18 @@ If you are interested in running an **API Server** (including a **Full History S
|
||||
[Modern Features for Smart Contracts]: https://xrpl.org/xrp-ledger-overview.html#modern-features-for-smart-contracts
|
||||
[On-Ledger Decentralized Exchange]: https://xrpl.org/xrp-ledger-overview.html#on-ledger-decentralized-exchange
|
||||
|
||||
|
||||
## Source Code
|
||||
|
||||
Here are some good places to start learning the source code:
|
||||
|
||||
- Read the markdown files in the source tree: `src/ripple/**/*.md`.
|
||||
- Read [the levelization document](./Builds/levelization) to get an idea of the internal dependency graph.
|
||||
- Read [the levelization document](.github/scripts/levelization) to get an idea of the internal dependency graph.
|
||||
- In the big picture, the `main` function constructs an `ApplicationImp` object, which implements the `Application` virtual interface. Almost every component in the application takes an `Application&` parameter in its constructor, typically named `app` and stored as a member variable `app_`. This allows most components to depend on any other component.
|
||||
|
||||
### Repository Contents
|
||||
|
||||
| Folder | Contents |
|
||||
|:-----------|:-------------------------------------------------|
|
||||
| :--------- | :----------------------------------------------- |
|
||||
| `./bin` | Scripts and data files for Ripple integrators. |
|
||||
| `./Builds` | Platform-specific guides for building `rippled`. |
|
||||
| `./docs` | Source documentation files and doxygen config. |
|
||||
@@ -57,15 +58,14 @@ Here are some good places to start learning the source code:
|
||||
Some of the directories under `src` are external repositories included using
|
||||
git-subtree. See those directories' README files for more details.
|
||||
|
||||
|
||||
## Additional Documentation
|
||||
|
||||
* [XRP Ledger Dev Portal](https://xrpl.org/)
|
||||
* [Setup and Installation](https://xrpl.org/install-rippled.html)
|
||||
* [Source Documentation (Doxygen)](https://xrplf.github.io/rippled/)
|
||||
- [XRP Ledger Dev Portal](https://xrpl.org/)
|
||||
- [Setup and Installation](https://xrpl.org/install-rippled.html)
|
||||
- [Source Documentation (Doxygen)](https://xrplf.github.io/rippled/)
|
||||
|
||||
## See Also
|
||||
|
||||
* [Clio API Server for the XRP Ledger](https://github.com/XRPLF/clio)
|
||||
* [Mailing List for Release Announcements](https://groups.google.com/g/ripple-server)
|
||||
* [Learn more about the XRP Ledger (YouTube)](https://www.youtube.com/playlist?list=PLJQ55Tj1hIVZtJ_JdTvSum2qMTsedWkNi)
|
||||
- [Clio API Server for the XRP Ledger](https://github.com/XRPLF/clio)
|
||||
- [Mailing List for Release Announcements](https://groups.google.com/g/ripple-server)
|
||||
- [Learn more about the XRP Ledger (YouTube)](https://www.youtube.com/playlist?list=PLJQ55Tj1hIVZtJ_JdTvSum2qMTsedWkNi)
|
||||
|
||||
14
SECURITY.md
14
SECURITY.md
@@ -2,7 +2,6 @@
|
||||
|
||||
For more details on operating an XRP Ledger server securely, please visit https://xrpl.org/manage-the-rippled-server.html.
|
||||
|
||||
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
@@ -77,13 +76,14 @@ The amount paid varies dramatically. Vulnerabilities that are harmless on their
|
||||
|
||||
To report a qualifying bug, please send a detailed report to:
|
||||
|
||||
|Email Address|bugs@ripple.com |
|
||||
|:-----------:|:----------------------------------------------------|
|
||||
|Short Key ID | `0xC57929BE` |
|
||||
|Long Key ID | `0xCD49A0AFC57929BE` |
|
||||
|Fingerprint | `24E6 3B02 37E0 FA9C 5E96 8974 CD49 A0AF C579 29BE` |
|
||||
| Email Address | bugs@ripple.com |
|
||||
| :-----------: | :-------------------------------------------------- |
|
||||
| Short Key ID | `0xC57929BE` |
|
||||
| Long Key ID | `0xCD49A0AFC57929BE` |
|
||||
| Fingerprint | `24E6 3B02 37E0 FA9C 5E96 8974 CD49 A0AF C579 29BE` |
|
||||
|
||||
The full PGP key for this address, which is also available on several key servers (e.g. on [keyserver.ubuntu.com](https://keyserver.ubuntu.com)), is:
|
||||
|
||||
The full PGP key for this address, which is also available on several key servers (e.g. on [keyserver.ubuntu.com](https://keyserver.ubuntu.com)), is:
|
||||
```
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
mQINBFUwGHYBEAC0wpGpBPkd8W1UdQjg9+cEFzeIEJRaoZoeuJD8mofwI5Ejnjdt
|
||||
|
||||
470
bin/browser.js
470
bin/browser.js
@@ -1,470 +0,0 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// ledger?l=L
|
||||
// transaction?h=H
|
||||
// ledger_entry?l=L&h=H
|
||||
// account?l=L&a=A
|
||||
// directory?l=L&dir_root=H&i=I
|
||||
// directory?l=L&o=A&i=I // owner directory
|
||||
// offer?l=L&offer=H
|
||||
// offer?l=L&account=A&i=I
|
||||
// ripple_state=l=L&a=A&b=A&c=C
|
||||
// account_lines?l=L&a=A
|
||||
//
|
||||
// A=address
|
||||
// C=currency 3 letter code
|
||||
// H=hash
|
||||
// I=index
|
||||
// L=current | closed | validated | index | hash
|
||||
//
|
||||
|
||||
var async = require("async");
|
||||
var extend = require("extend");
|
||||
var http = require("http");
|
||||
var url = require("url");
|
||||
|
||||
var Remote = require("ripple-lib").Remote;
|
||||
|
||||
var program = process.argv[1];
|
||||
|
||||
var httpd_response = function (res, opts) {
|
||||
var self=this;
|
||||
|
||||
res.statusCode = opts.statusCode;
|
||||
res.end(
|
||||
"<HTML>"
|
||||
+ "<HEAD><TITLE>Title</TITLE></HEAD>"
|
||||
+ "<BODY BACKGROUND=\"#FFFFFF\">"
|
||||
+ "State:" + self.state
|
||||
+ "<UL>"
|
||||
+ "<LI><A HREF=\"/\">home</A>"
|
||||
+ "<LI>" + html_link('r4EM4gBQfr1QgQLXSPF4r7h84qE9mb6iCC')
|
||||
// + "<LI><A HREF=\""+test+"\">rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh</A>"
|
||||
+ "<LI><A HREF=\"/ledger\">ledger</A>"
|
||||
+ "</UL>"
|
||||
+ (opts.body || '')
|
||||
+ '<HR><PRE>'
|
||||
+ (opts.url || '')
|
||||
+ '</PRE>'
|
||||
+ "</BODY>"
|
||||
+ "</HTML>"
|
||||
);
|
||||
};
|
||||
|
||||
var html_link = function (generic) {
|
||||
return '<A HREF="' + build_uri({ type: 'account', account: generic}) + '">' + generic + '</A>';
|
||||
};
|
||||
|
||||
// Build a link to a type.
|
||||
var build_uri = function (params, opts) {
|
||||
var c;
|
||||
|
||||
if (params.type === 'account') {
|
||||
c = {
|
||||
pathname: 'account',
|
||||
query: {
|
||||
l: params.ledger,
|
||||
a: params.account,
|
||||
},
|
||||
};
|
||||
|
||||
} else if (params.type === 'ledger') {
|
||||
c = {
|
||||
pathname: 'ledger',
|
||||
query: {
|
||||
l: params.ledger,
|
||||
},
|
||||
};
|
||||
|
||||
} else if (params.type === 'transaction') {
|
||||
c = {
|
||||
pathname: 'transaction',
|
||||
query: {
|
||||
h: params.hash,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
c = {};
|
||||
}
|
||||
|
||||
opts = opts || {};
|
||||
|
||||
c.protocol = "http";
|
||||
c.hostname = opts.hostname || self.base.hostname;
|
||||
c.port = opts.port || self.base.port;
|
||||
|
||||
return url.format(c);
|
||||
};
|
||||
|
||||
var build_link = function (item, link) {
|
||||
console.log(link);
|
||||
return "<A HREF=" + link + ">" + item + "</A>";
|
||||
};
|
||||
|
||||
var rewrite_field = function (type, obj, field, opts) {
|
||||
if (field in obj) {
|
||||
obj[field] = rewrite_type(type, obj[field], opts);
|
||||
}
|
||||
};
|
||||
|
||||
var rewrite_type = function (type, obj, opts) {
|
||||
if ('amount' === type) {
|
||||
if ('string' === typeof obj) {
|
||||
// XRP.
|
||||
return '<B>' + obj + '</B>';
|
||||
|
||||
} else {
|
||||
rewrite_field('address', obj, 'issuer', opts);
|
||||
|
||||
return obj;
|
||||
}
|
||||
return build_link(
|
||||
obj,
|
||||
build_uri({
|
||||
type: 'account',
|
||||
account: obj
|
||||
}, opts)
|
||||
);
|
||||
}
|
||||
if ('address' === type) {
|
||||
return build_link(
|
||||
obj,
|
||||
build_uri({
|
||||
type: 'account',
|
||||
account: obj
|
||||
}, opts)
|
||||
);
|
||||
}
|
||||
else if ('ledger' === type) {
|
||||
return build_link(
|
||||
obj,
|
||||
build_uri({
|
||||
type: 'ledger',
|
||||
ledger: obj,
|
||||
}, opts)
|
||||
);
|
||||
}
|
||||
else if ('node' === type) {
|
||||
// A node
|
||||
if ('PreviousTxnID' in obj)
|
||||
obj.PreviousTxnID = rewrite_type('transaction', obj.PreviousTxnID, opts);
|
||||
|
||||
if ('Offer' === obj.LedgerEntryType) {
|
||||
if ('NewFields' in obj) {
|
||||
if ('TakerGets' in obj.NewFields)
|
||||
obj.NewFields.TakerGets = rewrite_type('amount', obj.NewFields.TakerGets, opts);
|
||||
|
||||
if ('TakerPays' in obj.NewFields)
|
||||
obj.NewFields.TakerPays = rewrite_type('amount', obj.NewFields.TakerPays, opts);
|
||||
}
|
||||
}
|
||||
|
||||
obj.LedgerEntryType = '<B>' + obj.LedgerEntryType + '</B>';
|
||||
|
||||
return obj;
|
||||
}
|
||||
else if ('transaction' === type) {
|
||||
// Reference to a transaction.
|
||||
return build_link(
|
||||
obj,
|
||||
build_uri({
|
||||
type: 'transaction',
|
||||
hash: obj
|
||||
}, opts)
|
||||
);
|
||||
}
|
||||
|
||||
return 'ERROR: ' + type;
|
||||
};
|
||||
|
||||
var rewrite_object = function (obj, opts) {
|
||||
var out = extend({}, obj);
|
||||
|
||||
rewrite_field('address', out, 'Account', opts);
|
||||
|
||||
rewrite_field('ledger', out, 'parent_hash', opts);
|
||||
rewrite_field('ledger', out, 'ledger_index', opts);
|
||||
rewrite_field('ledger', out, 'ledger_current_index', opts);
|
||||
rewrite_field('ledger', out, 'ledger_hash', opts);
|
||||
|
||||
if ('ledger' in obj) {
|
||||
// It's a ledger header.
|
||||
out.ledger = rewrite_object(out.ledger, opts);
|
||||
|
||||
if ('ledger_hash' in out.ledger)
|
||||
out.ledger.ledger_hash = '<B>' + out.ledger.ledger_hash + '</B>';
|
||||
|
||||
delete out.ledger.hash;
|
||||
delete out.ledger.totalCoins;
|
||||
}
|
||||
|
||||
if ('TransactionType' in obj) {
|
||||
// It's a transaction.
|
||||
out.TransactionType = '<B>' + obj.TransactionType + '</B>';
|
||||
|
||||
rewrite_field('amount', out, 'TakerGets', opts);
|
||||
rewrite_field('amount', out, 'TakerPays', opts);
|
||||
rewrite_field('ledger', out, 'inLedger', opts);
|
||||
|
||||
out.meta.AffectedNodes = out.meta.AffectedNodes.map(function (node) {
|
||||
var kind = 'CreatedNode' in node
|
||||
? 'CreatedNode'
|
||||
: 'ModifiedNode' in node
|
||||
? 'ModifiedNode'
|
||||
: 'DeletedNode' in node
|
||||
? 'DeletedNode'
|
||||
: undefined;
|
||||
|
||||
if (kind) {
|
||||
node[kind] = rewrite_type('node', node[kind], opts);
|
||||
}
|
||||
return node;
|
||||
});
|
||||
}
|
||||
else if ('node' in obj && 'LedgerEntryType' in obj.node) {
|
||||
// Its a ledger entry.
|
||||
|
||||
if (obj.node.LedgerEntryType === 'AccountRoot') {
|
||||
rewrite_field('address', out.node, 'Account', opts);
|
||||
rewrite_field('transaction', out.node, 'PreviousTxnID', opts);
|
||||
rewrite_field('ledger', out.node, 'PreviousTxnLgrSeq', opts);
|
||||
}
|
||||
|
||||
out.node.LedgerEntryType = '<B>' + out.node.LedgerEntryType + '</B>';
|
||||
}
|
||||
|
||||
return out;
|
||||
};
|
||||
|
||||
var augment_object = function (obj, opts, done) {
|
||||
if (obj.node.LedgerEntryType == 'AccountRoot') {
|
||||
var tx_hash = obj.node.PreviousTxnID;
|
||||
var tx_ledger = obj.node.PreviousTxnLgrSeq;
|
||||
|
||||
obj.history = [];
|
||||
|
||||
async.whilst(
|
||||
function () { return tx_hash; },
|
||||
function (callback) {
|
||||
// console.log("augment_object: request: %s %s", tx_hash, tx_ledger);
|
||||
opts.remote.request_tx(tx_hash)
|
||||
.on('success', function (m) {
|
||||
tx_hash = undefined;
|
||||
tx_ledger = undefined;
|
||||
|
||||
//console.log("augment_object: ", JSON.stringify(m));
|
||||
m.meta.AffectedNodes.filter(function(n) {
|
||||
// console.log("augment_object: ", JSON.stringify(n));
|
||||
// if (n.ModifiedNode)
|
||||
// console.log("augment_object: %s %s %s %s %s %s/%s", 'ModifiedNode' in n, n.ModifiedNode && (n.ModifiedNode.LedgerEntryType === 'AccountRoot'), n.ModifiedNode && n.ModifiedNode.FinalFields && (n.ModifiedNode.FinalFields.Account === obj.node.Account), Object.keys(n)[0], n.ModifiedNode && (n.ModifiedNode.LedgerEntryType), obj.node.Account, n.ModifiedNode && n.ModifiedNode.FinalFields && n.ModifiedNode.FinalFields.Account);
|
||||
// if ('ModifiedNode' in n && n.ModifiedNode.LedgerEntryType === 'AccountRoot')
|
||||
// {
|
||||
// console.log("***: ", JSON.stringify(m));
|
||||
// console.log("***: ", JSON.stringify(n));
|
||||
// }
|
||||
return 'ModifiedNode' in n
|
||||
&& n.ModifiedNode.LedgerEntryType === 'AccountRoot'
|
||||
&& n.ModifiedNode.FinalFields
|
||||
&& n.ModifiedNode.FinalFields.Account === obj.node.Account;
|
||||
})
|
||||
.forEach(function (n) {
|
||||
tx_hash = n.ModifiedNode.PreviousTxnID;
|
||||
tx_ledger = n.ModifiedNode.PreviousTxnLgrSeq;
|
||||
|
||||
obj.history.push({
|
||||
tx_hash: tx_hash,
|
||||
tx_ledger: tx_ledger
|
||||
});
|
||||
console.log("augment_object: next: %s %s", tx_hash, tx_ledger);
|
||||
});
|
||||
|
||||
callback();
|
||||
})
|
||||
.on('error', function (m) {
|
||||
callback(m);
|
||||
})
|
||||
.request();
|
||||
},
|
||||
function (err) {
|
||||
if (err) {
|
||||
done();
|
||||
}
|
||||
else {
|
||||
async.forEach(obj.history, function (o, callback) {
|
||||
opts.remote.request_account_info(obj.node.Account)
|
||||
.ledger_index(o.tx_ledger)
|
||||
.on('success', function (m) {
|
||||
//console.log("augment_object: ", JSON.stringify(m));
|
||||
o.Balance = m.account_data.Balance;
|
||||
// o.account_data = m.account_data;
|
||||
callback();
|
||||
})
|
||||
.on('error', function (m) {
|
||||
o.error = m;
|
||||
callback();
|
||||
})
|
||||
.request();
|
||||
},
|
||||
function (err) {
|
||||
done(err);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
done();
|
||||
}
|
||||
};
|
||||
|
||||
if (process.argv.length < 4 || process.argv.length > 7) {
|
||||
console.log("Usage: %s ws_ip ws_port [<ip> [<port> [<start>]]]", program);
|
||||
}
|
||||
else {
|
||||
var ws_ip = process.argv[2];
|
||||
var ws_port = process.argv[3];
|
||||
var ip = process.argv.length > 4 ? process.argv[4] : "127.0.0.1";
|
||||
var port = process.argv.length > 5 ? process.argv[5] : "8080";
|
||||
|
||||
// console.log("START");
|
||||
var self = this;
|
||||
|
||||
var remote = (new Remote({
|
||||
websocket_ip: ws_ip,
|
||||
websocket_port: ws_port,
|
||||
trace: false
|
||||
}))
|
||||
.on('state', function (m) {
|
||||
console.log("STATE: %s", m);
|
||||
|
||||
self.state = m;
|
||||
})
|
||||
// .once('ledger_closed', callback)
|
||||
.connect()
|
||||
;
|
||||
|
||||
self.base = {
|
||||
hostname: ip,
|
||||
port: port,
|
||||
remote: remote,
|
||||
};
|
||||
|
||||
// console.log("SERVE");
|
||||
var server = http.createServer(function (req, res) {
|
||||
var input = "";
|
||||
|
||||
req.setEncoding();
|
||||
|
||||
req.on('data', function (buffer) {
|
||||
// console.log("DATA: %s", buffer);
|
||||
input = input + buffer;
|
||||
});
|
||||
|
||||
req.on('end', function () {
|
||||
// console.log("URL: %s", req.url);
|
||||
// console.log("HEADERS: %s", JSON.stringify(req.headers, undefined, 2));
|
||||
|
||||
var _parsed = url.parse(req.url, true);
|
||||
var _url = JSON.stringify(_parsed, undefined, 2);
|
||||
|
||||
// console.log("HEADERS: %s", JSON.stringify(_parsed, undefined, 2));
|
||||
if (_parsed.pathname === "/account") {
|
||||
var request = remote
|
||||
.request_ledger_entry('account_root')
|
||||
.ledger_index(-1)
|
||||
.account_root(_parsed.query.a)
|
||||
.on('success', function (m) {
|
||||
// console.log("account_root: %s", JSON.stringify(m, undefined, 2));
|
||||
|
||||
augment_object(m, self.base, function() {
|
||||
httpd_response(res,
|
||||
{
|
||||
statusCode: 200,
|
||||
url: _url,
|
||||
body: "<PRE>"
|
||||
+ JSON.stringify(rewrite_object(m, self.base), undefined, 2)
|
||||
+ "</PRE>"
|
||||
});
|
||||
});
|
||||
})
|
||||
.request();
|
||||
|
||||
} else if (_parsed.pathname === "/ledger") {
|
||||
var request = remote
|
||||
.request_ledger(undefined, { expand: true, transactions: true })
|
||||
.on('success', function (m) {
|
||||
// console.log("Ledger: %s", JSON.stringify(m, undefined, 2));
|
||||
|
||||
httpd_response(res,
|
||||
{
|
||||
statusCode: 200,
|
||||
url: _url,
|
||||
body: "<PRE>"
|
||||
+ JSON.stringify(rewrite_object(m, self.base), undefined, 2)
|
||||
+"</PRE>"
|
||||
});
|
||||
})
|
||||
|
||||
if (_parsed.query.l && _parsed.query.l.length === 64) {
|
||||
request.ledger_hash(_parsed.query.l);
|
||||
}
|
||||
else if (_parsed.query.l) {
|
||||
request.ledger_index(Number(_parsed.query.l));
|
||||
}
|
||||
else {
|
||||
request.ledger_index(-1);
|
||||
}
|
||||
|
||||
request.request();
|
||||
|
||||
} else if (_parsed.pathname === "/transaction") {
|
||||
var request = remote
|
||||
.request_tx(_parsed.query.h)
|
||||
// .request_transaction_entry(_parsed.query.h)
|
||||
// .ledger_select(_parsed.query.l)
|
||||
.on('success', function (m) {
|
||||
// console.log("transaction: %s", JSON.stringify(m, undefined, 2));
|
||||
|
||||
httpd_response(res,
|
||||
{
|
||||
statusCode: 200,
|
||||
url: _url,
|
||||
body: "<PRE>"
|
||||
+ JSON.stringify(rewrite_object(m, self.base), undefined, 2)
|
||||
+"</PRE>"
|
||||
});
|
||||
})
|
||||
.on('error', function (m) {
|
||||
httpd_response(res,
|
||||
{
|
||||
statusCode: 200,
|
||||
url: _url,
|
||||
body: "<PRE>"
|
||||
+ 'ERROR: ' + JSON.stringify(m, undefined, 2)
|
||||
+"</PRE>"
|
||||
});
|
||||
})
|
||||
.request();
|
||||
|
||||
} else {
|
||||
var test = build_uri({
|
||||
type: 'account',
|
||||
ledger: 'closed',
|
||||
account: 'rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh',
|
||||
}, self.base);
|
||||
|
||||
httpd_response(res,
|
||||
{
|
||||
statusCode: req.url === "/" ? 200 : 404,
|
||||
url: _url,
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
server.listen(port, ip, undefined,
|
||||
function () {
|
||||
console.log("Listening at: http://%s:%s", ip, port);
|
||||
});
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
@@ -1,64 +0,0 @@
|
||||
var ripple = require('ripple-lib');
|
||||
|
||||
var v = {
|
||||
seed: "snoPBrXtMeMyMHUVTgbuqAfg1SUTb",
|
||||
addr: "rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh"
|
||||
};
|
||||
|
||||
var remote = ripple.Remote.from_config({
|
||||
"trusted" : true,
|
||||
"websocket_ip" : "127.0.0.1",
|
||||
"websocket_port" : 5006,
|
||||
"websocket_ssl" : false,
|
||||
"local_signing" : true
|
||||
});
|
||||
|
||||
var tx_json = {
|
||||
"Account" : v.addr,
|
||||
"Amount" : "10000000",
|
||||
"Destination" : "rEu2ULPiEQm1BAL8pYzmXnNX1aFX9sCks",
|
||||
"Fee" : "10",
|
||||
"Flags" : 0,
|
||||
"Sequence" : 3,
|
||||
"TransactionType" : "Payment"
|
||||
|
||||
//"SigningPubKey": '0396941B22791A448E5877A44CE98434DB217D6FB97D63F0DAD23BE49ED45173C9'
|
||||
};
|
||||
|
||||
remote.on('connected', function () {
|
||||
var req = remote.request_sign(v.seed, tx_json);
|
||||
req.message.debug_signing = true;
|
||||
req.on('success', function (result) {
|
||||
console.log("SERVER RESULT");
|
||||
console.log(result);
|
||||
|
||||
var sim = {};
|
||||
var tx = remote.transaction();
|
||||
tx.tx_json = tx_json;
|
||||
tx._secret = v.seed;
|
||||
tx.complete();
|
||||
var unsigned = tx.serialize().to_hex();
|
||||
tx.sign();
|
||||
|
||||
sim.tx_blob = tx.serialize().to_hex();
|
||||
sim.tx_json = tx.tx_json;
|
||||
sim.tx_signing_hash = tx.signing_hash().to_hex();
|
||||
sim.tx_unsigned = unsigned;
|
||||
|
||||
console.log("\nLOCAL RESULT");
|
||||
console.log(sim);
|
||||
|
||||
remote.connect(false);
|
||||
});
|
||||
req.on('error', function (err) {
|
||||
if (err.error === "remoteError" && err.remote.error === "srcActNotFound") {
|
||||
console.log("Please fund account "+v.addr+" to run this test.");
|
||||
} else {
|
||||
console.log('error', err);
|
||||
}
|
||||
remote.connect(false);
|
||||
});
|
||||
req.request();
|
||||
|
||||
});
|
||||
remote.connect();
|
||||
@@ -1,18 +0,0 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// Returns a Gravatar style hash as per: http://en.gravatar.com/site/implement/hash/
|
||||
//
|
||||
|
||||
if (3 != process.argv.length) {
|
||||
process.stderr.write("Usage: " + process.argv[1] + " email_address\n\nReturns gravatar style hash.\n");
|
||||
process.exit(1);
|
||||
|
||||
} else {
|
||||
var md5 = require('crypto').createHash('md5');
|
||||
|
||||
md5.update(process.argv[2].trim().toLowerCase());
|
||||
|
||||
process.stdout.write(md5.digest('hex') + "\n");
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
@@ -1,31 +0,0 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// This program allows IE 9 ripple-clients to make websocket connections to
|
||||
// rippled using flash. As IE 9 does not have websocket support, this required
|
||||
// if you wish to support IE 9 ripple-clients.
|
||||
//
|
||||
// http://www.lightsphere.com/dev/articles/flash_socket_policy.html
|
||||
//
|
||||
// For better security, be sure to set the Port below to the port of your
|
||||
// [websocket_public_port].
|
||||
//
|
||||
|
||||
var net = require("net"),
|
||||
port = "*",
|
||||
domains = ["*:"+port]; // Domain:Port
|
||||
|
||||
net.createServer(
|
||||
function(socket) {
|
||||
socket.write("<?xml version='1.0' ?>\n");
|
||||
socket.write("<!DOCTYPE cross-domain-policy SYSTEM 'http://www.macromedia.com/xml/dtds/cross-domain-policy.dtd'>\n");
|
||||
socket.write("<cross-domain-policy>\n");
|
||||
domains.forEach(
|
||||
function(domain) {
|
||||
var parts = domain.split(':');
|
||||
socket.write("\t<allow-access-from domain='" + parts[0] + "' to-ports='" + parts[1] + "' />\n");
|
||||
}
|
||||
);
|
||||
socket.write("</cross-domain-policy>\n");
|
||||
socket.end();
|
||||
}
|
||||
).listen(843);
|
||||
@@ -1,150 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This script generates information about your rippled installation
|
||||
# and system. It can be used to help debug issues that you may face
|
||||
# in your installation. While this script endeavors to not display any
|
||||
# sensitive information, it is recommended that you read the output
|
||||
# before sharing with any third parties.
|
||||
|
||||
|
||||
rippled_exe=/opt/ripple/bin/rippled
|
||||
conf_file=/etc/opt/ripple/rippled.cfg
|
||||
|
||||
while getopts ":e:c:" opt; do
|
||||
case $opt in
|
||||
e)
|
||||
rippled_exe=${OPTARG}
|
||||
;;
|
||||
c)
|
||||
conf_file=${OPTARG}
|
||||
;;
|
||||
\?)
|
||||
echo "Invalid option: -$OPTARG"
|
||||
exit -1
|
||||
esac
|
||||
done
|
||||
|
||||
tmp_loc=$(mktemp -d --tmpdir ripple_info.XXXXX)
|
||||
chmod 751 ${tmp_loc}
|
||||
awk_prog=${tmp_loc}/cfg.awk
|
||||
summary_out=${tmp_loc}/rippled_info.md
|
||||
printf "# rippled report info\n\n> generated at %s\n" "$(date -R)" > ${summary_out}
|
||||
|
||||
function log_section {
|
||||
printf "\n## %s\n" "$*" >> ${summary_out}
|
||||
|
||||
while read -r l; do
|
||||
echo " $l" >> ${summary_out}
|
||||
done </dev/stdin
|
||||
}
|
||||
|
||||
function join_by {
|
||||
local IFS="$1"; shift; echo "$*";
|
||||
}
|
||||
|
||||
if [[ -f ${conf_file} ]] ; then
|
||||
exclude=( ips ips_fixed node_seed validation_seed validator_token )
|
||||
cleaned_conf=${tmp_loc}/cleaned_rippled_cfg.txt
|
||||
cat << 'EOP' >> ${awk_prog}
|
||||
BEGIN {FS="[[:space:]]*=[[:space:]]*"; skip=0; db_path=""; print > OUT_FILE; split(exl,exa,"|")}
|
||||
/^#/ {next}
|
||||
save==2 && /^[[:space:]]*$/ {next}
|
||||
/^\[.+\]$/ {
|
||||
section=tolower(gensub(/^\[[[:space:]]*([a-zA-Z_]+)[[:space:]]*\]$/, "\\1", "g"))
|
||||
skip = 0
|
||||
for (i in exa) {
|
||||
if (section == exa[i])
|
||||
skip = 1
|
||||
}
|
||||
if (section == "database_path")
|
||||
save = 1
|
||||
}
|
||||
skip==1 {next}
|
||||
save==2 {save=0; db_path=$0}
|
||||
save==1 {save=2}
|
||||
$1 ~ /password/ {$0=$1"=<redacted>"}
|
||||
{print >> OUT_FILE}
|
||||
END {print db_path}
|
||||
EOP
|
||||
|
||||
db=$(\
|
||||
sed -r -e 's/\<s[[:alnum:]]{28}\>/<redactedsecret>/g;s/^[[:space:]]*//;s/[[:space:]]*$//' ${conf_file} |\
|
||||
awk -v OUT_FILE=${cleaned_conf} -v exl="$(join_by '|' "${exclude[@]}")" -f ${awk_prog})
|
||||
rm ${awk_prog}
|
||||
cat ${cleaned_conf} | log_section "cleaned config file"
|
||||
rm ${cleaned_conf}
|
||||
echo "${db}" | log_section "database path"
|
||||
df ${db} | log_section "df: database"
|
||||
fi
|
||||
|
||||
# Send output from this script to a log file
|
||||
## this captures any messages
|
||||
## or errors from the script itself
|
||||
|
||||
log_file=${tmp_loc}/get_info.log
|
||||
exec 3>&1 1>>${log_file} 2>&1
|
||||
|
||||
## Send all stdout files to /tmp
|
||||
|
||||
if [[ -x ${rippled_exe} ]] ; then
|
||||
pgrep rippled && \
|
||||
${rippled_exe} --conf ${conf_file} \
|
||||
-- server_info | log_section "server info"
|
||||
fi
|
||||
|
||||
cat /proc/meminfo | log_section "meminfo"
|
||||
cat /proc/swaps | log_section "swap space"
|
||||
ulimit -a | log_section "ulimit"
|
||||
|
||||
if command -v lshw >/dev/null 2>&1 ; then
|
||||
lshw 2>/dev/null | log_section "hardware info"
|
||||
else
|
||||
lscpu > ${tmp_loc}/hw_info.txt
|
||||
hwinfo >> ${tmp_loc}/hw_info.txt
|
||||
lspci >> ${tmp_loc}/hw_info.txt
|
||||
lsblk >> ${tmp_loc}/hw_info.txt
|
||||
cat ${tmp_loc}/hw_info.txt | log_section "hardware info"
|
||||
rm ${tmp_loc}/hw_info.txt
|
||||
fi
|
||||
|
||||
if command -v iostat >/dev/null 2>&1 ; then
|
||||
iostat -t -d -x 2 6 | log_section "iostat"
|
||||
fi
|
||||
|
||||
df -h | log_section "free disk space"
|
||||
drives=($(df | awk '$1 ~ /^\/dev\// {print $1}' | xargs -n 1 basename))
|
||||
block_devs=($(ls /sys/block/))
|
||||
for d in "${drives[@]}"; do
|
||||
for dev in "${block_devs[@]}"; do
|
||||
#echo "D: [$d], DEV: [$dev]"
|
||||
if [[ $d =~ $dev ]]; then
|
||||
# this file (if exists) has 0 for SSD and 1 for HDD
|
||||
if [[ "$(cat /sys/block/${dev}/queue/rotational 2>/dev/null)" == 0 ]] ; then
|
||||
echo "${d} : SSD" >> ${tmp_loc}/is_ssd.txt
|
||||
else
|
||||
echo "${d} : NO SSD" >> ${tmp_loc}/is_ssd.txt
|
||||
fi
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
if [[ -f ${tmp_loc}/is_ssd.txt ]] ; then
|
||||
cat ${tmp_loc}/is_ssd.txt | log_section "SSD"
|
||||
rm ${tmp_loc}/is_ssd.txt
|
||||
fi
|
||||
|
||||
cat ${log_file} | log_section "script log"
|
||||
|
||||
cat << MSG | tee /dev/fd/3
|
||||
####################################################
|
||||
rippled info has been gathered. Please copy the
|
||||
contents of ${summary_out}
|
||||
to a github gist at https://gist.github.com/
|
||||
|
||||
PLEASE REVIEW THIS FILE FOR ANY SENSITIVE DATA
|
||||
BEFORE POSTING! We have tried our best to omit
|
||||
any sensitive information from this file, but you
|
||||
should verify before posting.
|
||||
####################################################
|
||||
MSG
|
||||
|
||||
@@ -5,7 +5,7 @@ then
|
||||
name=$( basename $0 )
|
||||
cat <<- USAGE
|
||||
Usage: $name <username>
|
||||
|
||||
|
||||
Where <username> is the Github username of the upstream repo. e.g. XRPLF
|
||||
USAGE
|
||||
exit 0
|
||||
@@ -83,4 +83,3 @@ fi
|
||||
_run git fetch --jobs=$(nproc) upstreams
|
||||
|
||||
exit 0
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ then
|
||||
name=$( basename $0 )
|
||||
cat <<- USAGE
|
||||
Usage: $name workbranch base/branch user/branch [user/branch [...]]
|
||||
|
||||
|
||||
* workbranch will be created locally from base/branch
|
||||
* base/branch and user/branch may be specified as user:branch to allow
|
||||
easy copying from Github PRs
|
||||
@@ -66,4 +66,3 @@ git push $push HEAD:$b
|
||||
git fetch $repo
|
||||
-------------------------------------------------------------------
|
||||
PUSH
|
||||
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// Returns hex of lowercasing a string.
|
||||
//
|
||||
|
||||
var stringToHex = function (s) {
|
||||
return Array.prototype.map.call(s, function (c) {
|
||||
var b = c.charCodeAt(0);
|
||||
|
||||
return b < 16 ? "0" + b.toString(16) : b.toString(16);
|
||||
}).join("");
|
||||
};
|
||||
|
||||
if (3 != process.argv.length) {
|
||||
process.stderr.write("Usage: " + process.argv[1] + " string\n\nReturns hex of lowercasing string.\n");
|
||||
process.exit(1);
|
||||
|
||||
} else {
|
||||
|
||||
process.stdout.write(stringToHex(process.argv[2].toLowerCase()) + "\n");
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
@@ -1,42 +0,0 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// This is a tool to issue JSON-RPC requests from the command line.
|
||||
//
|
||||
// This can be used to test a JSON-RPC server.
|
||||
//
|
||||
// Requires: npm simple-jsonrpc
|
||||
//
|
||||
|
||||
var jsonrpc = require('simple-jsonrpc');
|
||||
|
||||
var program = process.argv[1];
|
||||
|
||||
if (5 !== process.argv.length) {
|
||||
console.log("Usage: %s <URL> <method> <json>", program);
|
||||
}
|
||||
else {
|
||||
var url = process.argv[2];
|
||||
var method = process.argv[3];
|
||||
var json_raw = process.argv[4];
|
||||
var json;
|
||||
|
||||
try {
|
||||
json = JSON.parse(json_raw);
|
||||
}
|
||||
catch (e) {
|
||||
console.log("JSON parse error: %s", e.message);
|
||||
throw e;
|
||||
}
|
||||
|
||||
var client = jsonrpc.client(url);
|
||||
|
||||
client.call(method, json,
|
||||
function (result) {
|
||||
console.log(JSON.stringify(result, undefined, 2));
|
||||
},
|
||||
function (error) {
|
||||
console.log(JSON.stringify(error, undefined, 2));
|
||||
});
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
@@ -1,68 +0,0 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// This is a tool to listen for JSON-RPC requests at an IP and port.
|
||||
//
|
||||
// This will report the request to console and echo back the request as the response.
|
||||
//
|
||||
|
||||
var http = require("http");
|
||||
|
||||
var program = process.argv[1];
|
||||
|
||||
if (4 !== process.argv.length) {
|
||||
console.log("Usage: %s <ip> <port>", program);
|
||||
}
|
||||
else {
|
||||
var ip = process.argv[2];
|
||||
var port = process.argv[3];
|
||||
|
||||
var server = http.createServer(function (req, res) {
|
||||
console.log("CONNECT");
|
||||
var input = "";
|
||||
|
||||
req.setEncoding();
|
||||
|
||||
req.on('data', function (buffer) {
|
||||
// console.log("DATA: %s", buffer);
|
||||
input = input + buffer;
|
||||
});
|
||||
|
||||
req.on('end', function () {
|
||||
// console.log("END");
|
||||
|
||||
var json_req;
|
||||
|
||||
console.log("URL: %s", req.url);
|
||||
console.log("HEADERS: %s", JSON.stringify(req.headers, undefined, 2));
|
||||
|
||||
try {
|
||||
json_req = JSON.parse(input);
|
||||
|
||||
console.log("REQ: %s", JSON.stringify(json_req, undefined, 2));
|
||||
}
|
||||
catch (e) {
|
||||
console.log("BAD JSON: %s", e.message);
|
||||
|
||||
json_req = { error : e.message }
|
||||
}
|
||||
|
||||
res.statusCode = 200;
|
||||
res.end(JSON.stringify({
|
||||
jsonrpc: "2.0",
|
||||
result: { request : json_req },
|
||||
id: req.id
|
||||
}));
|
||||
});
|
||||
|
||||
req.on('close', function () {
|
||||
console.log("CLOSE");
|
||||
});
|
||||
});
|
||||
|
||||
server.listen(port, ip, undefined,
|
||||
function () {
|
||||
console.log("Listening at: %s:%s", ip, port);
|
||||
});
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
218
bin/physical.sh
218
bin/physical.sh
@@ -1,218 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -o errexit
|
||||
|
||||
marker_base=985c80fbc6131f3a8cedd0da7e8af98dfceb13c7
|
||||
marker_commit=${1:-${marker_base}}
|
||||
|
||||
if [ $(git merge-base ${marker_commit} ${marker_base}) != ${marker_base} ]; then
|
||||
echo "first marker commit not an ancestor: ${marker_commit}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $(git merge-base ${marker_commit} HEAD) != $(git rev-parse --verify ${marker_commit}) ]; then
|
||||
echo "given marker commit not an ancestor: ${marker_commit}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -e Builds/CMake ]; then
|
||||
echo move CMake
|
||||
git mv Builds/CMake cmake
|
||||
git add --update .
|
||||
git commit -m 'Move CMake directory' --author 'Pretty Printer <cpp@ripple.com>'
|
||||
fi
|
||||
|
||||
if [ -e src/ripple ]; then
|
||||
|
||||
echo move protocol buffers
|
||||
mkdir -p include/xrpl
|
||||
if [ -e src/ripple/proto ]; then
|
||||
git mv src/ripple/proto include/xrpl
|
||||
fi
|
||||
|
||||
extract_list() {
|
||||
git show ${marker_commit}:Builds/CMake/RippledCore.cmake | \
|
||||
awk "/END ${1}/ { p = 0 } p && /src\/ripple/; /BEGIN ${1}/ { p = 1 }" | \
|
||||
sed -e 's#src/ripple/##' -e 's#[^a-z]\+$##'
|
||||
}
|
||||
|
||||
move_files() {
|
||||
oldroot="$1"; shift
|
||||
newroot="$1"; shift
|
||||
detail="$1"; shift
|
||||
files=("$@")
|
||||
for file in ${files[@]}; do
|
||||
if [ ! -e ${oldroot}/${file} ]; then
|
||||
continue
|
||||
fi
|
||||
dir=$(dirname ${file})
|
||||
if [ $(basename ${dir}) == 'details' ]; then
|
||||
dir=$(dirname ${dir})
|
||||
fi
|
||||
if [ $(basename ${dir}) == 'impl' ]; then
|
||||
dir="$(dirname ${dir})/${detail}"
|
||||
fi
|
||||
mkdir -p ${newroot}/${dir}
|
||||
git mv ${oldroot}/${file} ${newroot}/${dir}
|
||||
done
|
||||
}
|
||||
|
||||
echo move libxrpl headers
|
||||
files=$(extract_list 'LIBXRPL HEADERS')
|
||||
files+=(
|
||||
basics/SlabAllocator.h
|
||||
|
||||
beast/asio/io_latency_probe.h
|
||||
beast/container/aged_container.h
|
||||
beast/container/aged_container_utility.h
|
||||
beast/container/aged_map.h
|
||||
beast/container/aged_multimap.h
|
||||
beast/container/aged_multiset.h
|
||||
beast/container/aged_set.h
|
||||
beast/container/aged_unordered_map.h
|
||||
beast/container/aged_unordered_multimap.h
|
||||
beast/container/aged_unordered_multiset.h
|
||||
beast/container/aged_unordered_set.h
|
||||
beast/container/detail/aged_associative_container.h
|
||||
beast/container/detail/aged_container_iterator.h
|
||||
beast/container/detail/aged_ordered_container.h
|
||||
beast/container/detail/aged_unordered_container.h
|
||||
beast/container/detail/empty_base_optimization.h
|
||||
beast/core/LockFreeStack.h
|
||||
beast/insight/Collector.h
|
||||
beast/insight/Counter.h
|
||||
beast/insight/CounterImpl.h
|
||||
beast/insight/Event.h
|
||||
beast/insight/EventImpl.h
|
||||
beast/insight/Gauge.h
|
||||
beast/insight/GaugeImpl.h
|
||||
beast/insight/Group.h
|
||||
beast/insight/Groups.h
|
||||
beast/insight/Hook.h
|
||||
beast/insight/HookImpl.h
|
||||
beast/insight/Insight.h
|
||||
beast/insight/Meter.h
|
||||
beast/insight/MeterImpl.h
|
||||
beast/insight/NullCollector.h
|
||||
beast/insight/StatsDCollector.h
|
||||
beast/test/fail_counter.h
|
||||
beast/test/fail_stream.h
|
||||
beast/test/pipe_stream.h
|
||||
beast/test/sig_wait.h
|
||||
beast/test/string_iostream.h
|
||||
beast/test/string_istream.h
|
||||
beast/test/string_ostream.h
|
||||
beast/test/test_allocator.h
|
||||
beast/test/yield_to.h
|
||||
beast/utility/hash_pair.h
|
||||
beast/utility/maybe_const.h
|
||||
beast/utility/temp_dir.h
|
||||
|
||||
# included by only json/impl/json_assert.h
|
||||
json/json_errors.h
|
||||
|
||||
protocol/PayChan.h
|
||||
protocol/RippleLedgerHash.h
|
||||
protocol/messages.h
|
||||
protocol/st.h
|
||||
)
|
||||
files+=(
|
||||
basics/README.md
|
||||
crypto/README.md
|
||||
json/README.md
|
||||
protocol/README.md
|
||||
resource/README.md
|
||||
)
|
||||
move_files src/ripple include/xrpl detail ${files[@]}
|
||||
|
||||
echo move libxrpl sources
|
||||
files=$(extract_list 'LIBXRPL SOURCES')
|
||||
move_files src/ripple src/libxrpl "" ${files[@]}
|
||||
|
||||
echo check leftovers
|
||||
dirs=$(cd include/xrpl; ls -d */)
|
||||
dirs=$(cd src/ripple; ls -d ${dirs} 2>/dev/null || true)
|
||||
files="$(cd src/ripple; find ${dirs} -type f)"
|
||||
if [ -n "${files}" ]; then
|
||||
echo "leftover files:"
|
||||
echo ${files}
|
||||
exit
|
||||
fi
|
||||
|
||||
echo remove empty directories
|
||||
empty_dirs="$(cd src/ripple; find ${dirs} -depth -type d)"
|
||||
for dir in ${empty_dirs[@]}; do
|
||||
if [ -e ${dir} ]; then
|
||||
rmdir ${dir}
|
||||
fi
|
||||
done
|
||||
|
||||
echo move xrpld sources
|
||||
files=$(
|
||||
extract_list 'XRPLD SOURCES'
|
||||
cd src/ripple
|
||||
find * -regex '.*\.\(h\|ipp\|md\|pu\|uml\|png\)'
|
||||
)
|
||||
move_files src/ripple src/xrpld detail ${files[@]}
|
||||
|
||||
files="$(cd src/ripple; find . -type f)"
|
||||
if [ -n "${files}" ]; then
|
||||
echo "leftover files:"
|
||||
echo ${files}
|
||||
exit
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
rm -rf src/ripple
|
||||
|
||||
echo rename .hpp to .h
|
||||
find include src -name '*.hpp' -exec bash -c 'f="{}"; git mv "${f}" "${f%hpp}h"' \;
|
||||
|
||||
echo move PerfLog.h
|
||||
if [ -e include/xrpl/basics/PerfLog.h ]; then
|
||||
git mv include/xrpl/basics/PerfLog.h src/xrpld/perflog
|
||||
fi
|
||||
|
||||
# Make sure all protobuf includes have the correct prefix.
|
||||
protobuf_replace='s:^#include\s*["<].*org/xrpl\([^">]\+\)[">]:#include <xrpl/proto/org/xrpl\1>:'
|
||||
# Make sure first-party includes use angle brackets and .h extension.
|
||||
ripple_replace='s:include\s*["<]ripple/\(.*\)\.h\(pp\)\?[">]:include <ripple/\1.h>:'
|
||||
beast_replace='s:include\s*<beast/:include <xrpl/beast/:'
|
||||
# Rename impl directories to detail.
|
||||
impl_rename='s:\(<xrpl.*\)/impl\(/details\)\?/:\1/detail/:'
|
||||
|
||||
echo rewrite includes in libxrpl
|
||||
find include/xrpl src/libxrpl -type f -exec sed -i \
|
||||
-e "${protobuf_replace}" \
|
||||
-e "${ripple_replace}" \
|
||||
-e "${beast_replace}" \
|
||||
-e 's:^#include <ripple/:#include <xrpl/:' \
|
||||
-e "${impl_rename}" \
|
||||
{} +
|
||||
|
||||
echo rewrite includes in xrpld
|
||||
# # https://www.baeldung.com/linux/join-multiple-lines
|
||||
libxrpl_dirs="$(cd include/xrpl; ls -d1 */ | sed 's:/$::')"
|
||||
# libxrpl_dirs='a\nb\nc\n'
|
||||
readarray -t libxrpl_dirs <<< "${libxrpl_dirs}"
|
||||
# libxrpl_dirs=(a b c)
|
||||
libxrpl_dirs=$(printf -v txt '%s\\|' "${libxrpl_dirs[@]}"; echo "${txt%\\|}")
|
||||
# libxrpl_dirs='a\|b\|c'
|
||||
find src/xrpld src/test -type f -exec sed -i \
|
||||
-e "${protobuf_replace}" \
|
||||
-e "${ripple_replace}" \
|
||||
-e "${beast_replace}" \
|
||||
-e "s:^#include <ripple/basics/PerfLog.h>:#include <xrpld/perflog/PerfLog.h>:" \
|
||||
-e "s:^#include <ripple/\(${libxrpl_dirs}\)/:#include <xrpl/\1/:" \
|
||||
-e 's:^#include <ripple/:#include <xrpld/:' \
|
||||
-e "${impl_rename}" \
|
||||
{} +
|
||||
|
||||
git commit -m 'Rearrange sources' --author 'Pretty Printer <cpp@ripple.com>'
|
||||
find include src -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format-10 -i {} +
|
||||
git add --update .
|
||||
git commit -m 'Rewrite includes' --author 'Pretty Printer <cpp@ripple.com>'
|
||||
./Builds/levelization/levelization.sh
|
||||
git add --update .
|
||||
git commit -m 'Recompute loops' --author 'Pretty Printer <cpp@ripple.com>'
|
||||
252
bin/rlint.js
252
bin/rlint.js
@@ -1,252 +0,0 @@
|
||||
#!/usr/bin/node
|
||||
|
||||
var async = require('async');
|
||||
var Remote = require('ripple-lib').Remote;
|
||||
var Transaction = require('ripple-lib').Transaction;
|
||||
var UInt160 = require('ripple-lib').UInt160;
|
||||
var Amount = require('ripple-lib').Amount;
|
||||
|
||||
var book_key = function (book) {
|
||||
return book.taker_pays.currency
|
||||
+ ":" + book.taker_pays.issuer
|
||||
+ ":" + book.taker_gets.currency
|
||||
+ ":" + book.taker_gets.issuer;
|
||||
};
|
||||
|
||||
var book_key_cross = function (book) {
|
||||
return book.taker_gets.currency
|
||||
+ ":" + book.taker_gets.issuer
|
||||
+ ":" + book.taker_pays.currency
|
||||
+ ":" + book.taker_pays.issuer;
|
||||
};
|
||||
|
||||
var ledger_verify = function (ledger) {
|
||||
var dir_nodes = ledger.accountState.filter(function (entry) {
|
||||
return entry.LedgerEntryType === 'DirectoryNode' // Only directories
|
||||
&& entry.index === entry.RootIndex // Only root nodes
|
||||
&& 'TakerGetsCurrency' in entry; // Only offer directories
|
||||
});
|
||||
|
||||
var books = {};
|
||||
|
||||
dir_nodes.forEach(function (node) {
|
||||
var book = {
|
||||
taker_gets: {
|
||||
currency: UInt160.from_generic(node.TakerGetsCurrency).to_json(),
|
||||
issuer: UInt160.from_generic(node.TakerGetsIssuer).to_json()
|
||||
},
|
||||
taker_pays: {
|
||||
currency: UInt160.from_generic(node.TakerPaysCurrency).to_json(),
|
||||
issuer: UInt160.from_generic(node.TakerPaysIssuer).to_json()
|
||||
},
|
||||
quality: Amount.from_quality(node.RootIndex),
|
||||
index: node.RootIndex
|
||||
};
|
||||
|
||||
books[book_key(book)] = book;
|
||||
|
||||
// console.log(JSON.stringify(node, undefined, 2));
|
||||
});
|
||||
|
||||
// console.log(JSON.stringify(dir_entry, undefined, 2));
|
||||
console.log("#%s books: %s", ledger.ledger_index, Object.keys(books).length);
|
||||
|
||||
Object.keys(books).forEach(function (key) {
|
||||
var book = books[key];
|
||||
var key_cross = book_key_cross(book);
|
||||
var book_cross = books[key_cross];
|
||||
|
||||
if (book && book_cross && !book_cross.done)
|
||||
{
|
||||
var book_cross_quality_inverted = Amount.from_json("1.0/1/1").divide(book_cross.quality);
|
||||
|
||||
if (book_cross_quality_inverted.compareTo(book.quality) >= 0)
|
||||
{
|
||||
// Crossing books
|
||||
console.log("crossing: #%s :: %s :: %s :: %s :: %s :: %s :: %s", ledger.ledger_index, key, book.quality.to_text(), book_cross.quality.to_text(), book_cross_quality_inverted.to_text(),
|
||||
book.index, book_cross.index);
|
||||
}
|
||||
|
||||
book_cross.done = true;
|
||||
}
|
||||
});
|
||||
|
||||
var ripple_selfs = {};
|
||||
|
||||
var accounts = {};
|
||||
var counts = {};
|
||||
|
||||
ledger.accountState.forEach(function (entry) {
|
||||
if (entry.LedgerEntryType === 'Offer')
|
||||
{
|
||||
counts[entry.Account] = (counts[entry.Account] || 0) + 1;
|
||||
}
|
||||
else if (entry.LedgerEntryType === 'RippleState')
|
||||
{
|
||||
if (entry.Flags & (0x10000 | 0x40000))
|
||||
{
|
||||
counts[entry.LowLimit.issuer] = (counts[entry.LowLimit.issuer] || 0) + 1;
|
||||
}
|
||||
|
||||
if (entry.Flags & (0x20000 | 0x80000))
|
||||
{
|
||||
counts[entry.HighLimit.issuer] = (counts[entry.HighLimit.issuer] || 0) + 1;
|
||||
}
|
||||
|
||||
if (entry.HighLimit.issuer === entry.LowLimit.issuer)
|
||||
ripple_selfs[entry.Account] = entry;
|
||||
}
|
||||
else if (entry.LedgerEntryType == 'AccountRoot')
|
||||
{
|
||||
accounts[entry.Account] = entry;
|
||||
}
|
||||
});
|
||||
|
||||
var low = 0; // Accounts with too low a count.
|
||||
var high = 0;
|
||||
var missing_accounts = 0; // Objects with no referencing account.
|
||||
var missing_objects = 0; // Accounts specifying an object but having none.
|
||||
|
||||
Object.keys(counts).forEach(function (account) {
|
||||
if (account in accounts)
|
||||
{
|
||||
if (counts[account] !== accounts[account].OwnerCount)
|
||||
{
|
||||
if (counts[account] < accounts[account].OwnerCount)
|
||||
{
|
||||
high += 1;
|
||||
console.log("%s: high count %s/%s", account, counts[account], accounts[account].OwnerCount);
|
||||
}
|
||||
else
|
||||
{
|
||||
low += 1;
|
||||
console.log("%s: low count %s/%s", account, counts[account], accounts[account].OwnerCount);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
missing_accounts += 1;
|
||||
|
||||
console.log("%s: missing : count %s", account, counts[account]);
|
||||
}
|
||||
});
|
||||
|
||||
Object.keys(accounts).forEach(function (account) {
|
||||
if (!('OwnerCount' in accounts[account]))
|
||||
{
|
||||
console.log("%s: bad entry : %s", account, JSON.stringify(accounts[account], undefined, 2));
|
||||
}
|
||||
else if (!(account in counts) && accounts[account].OwnerCount)
|
||||
{
|
||||
missing_objects += 1;
|
||||
|
||||
console.log("%s: no objects : %s/%s", account, 0, accounts[account].OwnerCount);
|
||||
}
|
||||
});
|
||||
|
||||
if (low)
|
||||
console.log("counts too low = %s", low);
|
||||
|
||||
if (high)
|
||||
console.log("counts too high = %s", high);
|
||||
|
||||
if (missing_objects)
|
||||
console.log("missing_objects = %s", missing_objects);
|
||||
|
||||
if (missing_accounts)
|
||||
console.log("missing_accounts = %s", missing_accounts);
|
||||
|
||||
if (Object.keys(ripple_selfs).length)
|
||||
console.log("RippleState selfs = %s", Object.keys(ripple_selfs).length);
|
||||
|
||||
};
|
||||
|
||||
var ledger_request = function (remote, ledger_index, done) {
|
||||
remote.request_ledger(undefined, {
|
||||
accounts: true,
|
||||
expand: true,
|
||||
})
|
||||
.ledger_index(ledger_index)
|
||||
.on('success', function (m) {
|
||||
// console.log("ledger: ", ledger_index);
|
||||
// console.log("ledger: ", JSON.stringify(m, undefined, 2));
|
||||
done(m.ledger);
|
||||
})
|
||||
.on('error', function (m) {
|
||||
console.log("error");
|
||||
done();
|
||||
})
|
||||
.request();
|
||||
};
|
||||
|
||||
var usage = function () {
|
||||
console.log("rlint.js _websocket_ip_ _websocket_port_ ");
|
||||
};
|
||||
|
||||
var finish = function (remote) {
|
||||
remote.disconnect();
|
||||
|
||||
// XXX Because remote.disconnect() doesn't work:
|
||||
process.exit();
|
||||
};
|
||||
|
||||
console.log("args: ", process.argv.length);
|
||||
console.log("args: ", process.argv);
|
||||
|
||||
if (process.argv.length < 4) {
|
||||
usage();
|
||||
}
|
||||
else {
|
||||
var remote = Remote.from_config({
|
||||
websocket_ip: process.argv[2],
|
||||
websocket_port: process.argv[3],
|
||||
})
|
||||
.once('ledger_closed', function (m) {
|
||||
console.log("ledger_closed: ", JSON.stringify(m, undefined, 2));
|
||||
|
||||
if (process.argv.length === 5) {
|
||||
var ledger_index = process.argv[4];
|
||||
|
||||
ledger_request(remote, ledger_index, function (l) {
|
||||
if (l) {
|
||||
ledger_verify(l);
|
||||
}
|
||||
|
||||
finish(remote);
|
||||
});
|
||||
|
||||
} else if (process.argv.length === 6) {
|
||||
var ledger_start = Number(process.argv[4]);
|
||||
var ledger_end = Number(process.argv[5]);
|
||||
var ledger_cursor = ledger_end;
|
||||
|
||||
async.whilst(
|
||||
function () {
|
||||
return ledger_start <= ledger_cursor && ledger_cursor <=ledger_end;
|
||||
},
|
||||
function (callback) {
|
||||
// console.log(ledger_cursor);
|
||||
|
||||
ledger_request(remote, ledger_cursor, function (l) {
|
||||
if (l) {
|
||||
ledger_verify(l);
|
||||
}
|
||||
|
||||
--ledger_cursor;
|
||||
|
||||
callback();
|
||||
});
|
||||
},
|
||||
function (error) {
|
||||
finish(remote);
|
||||
});
|
||||
|
||||
} else {
|
||||
finish(remote);
|
||||
}
|
||||
})
|
||||
.connect();
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
@@ -1,51 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -exu
|
||||
|
||||
: ${TRAVIS_BUILD_DIR:=""}
|
||||
: ${VCPKG_DIR:=".vcpkg"}
|
||||
export VCPKG_ROOT=${VCPKG_DIR}
|
||||
: ${VCPKG_DEFAULT_TRIPLET:="x64-windows-static"}
|
||||
|
||||
export VCPKG_DEFAULT_TRIPLET
|
||||
|
||||
EXE="vcpkg"
|
||||
if [[ -z ${COMSPEC:-} ]]; then
|
||||
EXE="${EXE}.exe"
|
||||
fi
|
||||
|
||||
if [[ -d "${VCPKG_DIR}" && -x "${VCPKG_DIR}/${EXE}" && -d "${VCPKG_DIR}/installed" ]] ; then
|
||||
echo "Using cached vcpkg at ${VCPKG_DIR}"
|
||||
${VCPKG_DIR}/${EXE} list
|
||||
else
|
||||
if [[ -d "${VCPKG_DIR}" ]] ; then
|
||||
rm -rf "${VCPKG_DIR}"
|
||||
fi
|
||||
git clone --branch 2021.04.30 https://github.com/Microsoft/vcpkg.git ${VCPKG_DIR}
|
||||
pushd ${VCPKG_DIR}
|
||||
BSARGS=()
|
||||
if [[ "$(uname)" == "Darwin" ]] ; then
|
||||
BSARGS+=(--allowAppleClang)
|
||||
fi
|
||||
if [[ -z ${COMSPEC:-} ]]; then
|
||||
chmod +x ./bootstrap-vcpkg.sh
|
||||
time ./bootstrap-vcpkg.sh "${BSARGS[@]}"
|
||||
else
|
||||
time ./bootstrap-vcpkg.bat
|
||||
fi
|
||||
popd
|
||||
fi
|
||||
|
||||
# TODO: bring boost in this way as well ?
|
||||
# NOTE: can pin specific ports to a commit/version like this:
|
||||
# git checkout <SOME COMMIT HASH> ports/boost
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "No extra packages specified..."
|
||||
PKGS=()
|
||||
else
|
||||
PKGS=( "$@" )
|
||||
fi
|
||||
for LIB in "${PKGS[@]}"; do
|
||||
time ${VCPKG_DIR}/${EXE} --clean-after-build install ${LIB}
|
||||
done
|
||||
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
|
||||
# NOTE: must be sourced from a shell so it can export vars
|
||||
|
||||
cat << BATCH > ./getenv.bat
|
||||
CALL %*
|
||||
ENV
|
||||
BATCH
|
||||
|
||||
while read line ; do
|
||||
IFS='"' read x path arg <<<"${line}"
|
||||
if [ -f "${path}" ] ; then
|
||||
echo "FOUND: $path"
|
||||
export VCINSTALLDIR=$(./getenv.bat "${path}" ${arg} | grep "^VCINSTALLDIR=" | sed -E "s/^VCINSTALLDIR=//g")
|
||||
if [ "${VCINSTALLDIR}" != "" ] ; then
|
||||
echo "USING ${VCINSTALLDIR}"
|
||||
export LIB=$(./getenv.bat "${path}" ${arg} | grep "^LIB=" | sed -E "s/^LIB=//g")
|
||||
export LIBPATH=$(./getenv.bat "${path}" ${arg} | grep "^LIBPATH=" | sed -E "s/^LIBPATH=//g")
|
||||
export INCLUDE=$(./getenv.bat "${path}" ${arg} | grep "^INCLUDE=" | sed -E "s/^INCLUDE=//g")
|
||||
ADDPATH=$(./getenv.bat "${path}" ${arg} | grep "^PATH=" | sed -E "s/^PATH=//g")
|
||||
export PATH="${ADDPATH}:${PATH}"
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done <<EOL
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio/2019/BuildTools/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio/2019/Community/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio/2017/BuildTools/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio 15.0/VC/vcvarsall.bat" amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/vcvarsall.bat" amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio 13.0/VC/vcvarsall.bat" amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio 12.0/VC/vcvarsall.bat" amd64
|
||||
EOL
|
||||
# TODO: update the list above as needed to support newer versions of msvc tools
|
||||
|
||||
rm -f getenv.bat
|
||||
|
||||
if [ "${VCINSTALLDIR}" = "" ] ; then
|
||||
echo "No compatible visual studio found!"
|
||||
fi
|
||||
@@ -1,246 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
"""A script to test rippled in an infinite loop of start-sync-stop.
|
||||
|
||||
- Requires Python 3.7+.
|
||||
- Can be stopped with SIGINT.
|
||||
- Has no dependencies outside the standard library.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
assert sys.version_info.major == 3 and sys.version_info.minor >= 7
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import configparser
|
||||
import contextlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import platform
|
||||
import subprocess
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
|
||||
# Enable asynchronous subprocesses on Windows. The default changed in 3.8.
|
||||
# https://docs.python.org/3.7/library/asyncio-platforms.html#subprocess-support-on-windows
|
||||
if (platform.system() == 'Windows' and sys.version_info.major == 3
|
||||
and sys.version_info.minor < 8):
|
||||
asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
|
||||
|
||||
DEFAULT_EXE = 'rippled'
|
||||
DEFAULT_CONFIGURATION_FILE = 'rippled.cfg'
|
||||
# Number of seconds to wait before forcefully terminating.
|
||||
PATIENCE = 120
|
||||
# Number of contiguous seconds in a sync state to be considered synced.
|
||||
DEFAULT_SYNC_DURATION = 60
|
||||
# Number of seconds between polls of state.
|
||||
DEFAULT_POLL_INTERVAL = 5
|
||||
SYNC_STATES = ('full', 'validating', 'proposing')
|
||||
|
||||
|
||||
def read_config(config_file):
|
||||
# strict = False: Allow duplicate keys, e.g. [rpc_startup].
|
||||
# allow_no_value = True: Allow keys with no values. Generally, these
|
||||
# instances use the "key" as the value, and the section name is the key,
|
||||
# e.g. [debug_logfile].
|
||||
# delimiters = ('='): Allow ':' as a character in Windows paths. Some of
|
||||
# our "keys" are actually values, and we don't want to split them on ':'.
|
||||
config = configparser.ConfigParser(
|
||||
strict=False,
|
||||
allow_no_value=True,
|
||||
delimiters=('='),
|
||||
)
|
||||
config.read(config_file)
|
||||
return config
|
||||
|
||||
|
||||
def to_list(value, separator=','):
|
||||
"""Parse a list from a delimited string value."""
|
||||
return [s.strip() for s in value.split(separator) if s]
|
||||
|
||||
|
||||
def find_log_file(config_file):
|
||||
"""Try to figure out what log file the user has chosen. Raises all kinds
|
||||
of exceptions if there is any possibility of ambiguity."""
|
||||
config = read_config(config_file)
|
||||
values = list(config['debug_logfile'].keys())
|
||||
if len(values) < 1:
|
||||
raise ValueError(
|
||||
f'no [debug_logfile] in configuration file: {config_file}')
|
||||
if len(values) > 1:
|
||||
raise ValueError(
|
||||
f'too many [debug_logfile] in configuration file: {config_file}')
|
||||
return values[0]
|
||||
|
||||
|
||||
def find_http_port(config_file):
|
||||
config = read_config(config_file)
|
||||
names = list(config['server'].keys())
|
||||
for name in names:
|
||||
server = config[name]
|
||||
if 'http' in to_list(server.get('protocol', '')):
|
||||
return int(server['port'])
|
||||
raise ValueError(f'no server in [server] for "http" protocol')
|
||||
|
||||
|
||||
@contextlib.asynccontextmanager
|
||||
async def rippled(exe=DEFAULT_EXE, config_file=DEFAULT_CONFIGURATION_FILE):
|
||||
"""A context manager for a rippled process."""
|
||||
# Start the server.
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
str(exe),
|
||||
'--conf',
|
||||
str(config_file),
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
logging.info(f'rippled started with pid {process.pid}')
|
||||
try:
|
||||
yield process
|
||||
finally:
|
||||
# Ask it to stop.
|
||||
logging.info(f'asking rippled (pid: {process.pid}) to stop')
|
||||
start = time.time()
|
||||
process.terminate()
|
||||
|
||||
# Wait nicely.
|
||||
try:
|
||||
await asyncio.wait_for(process.wait(), PATIENCE)
|
||||
except asyncio.TimeoutError:
|
||||
# Ask the operating system to kill it.
|
||||
logging.warning(f'killing rippled ({process.pid})')
|
||||
try:
|
||||
process.kill()
|
||||
except ProcessLookupError:
|
||||
pass
|
||||
|
||||
code = await process.wait()
|
||||
end = time.time()
|
||||
logging.info(
|
||||
f'rippled stopped after {end - start:.1f} seconds with code {code}'
|
||||
)
|
||||
|
||||
|
||||
async def sync(
|
||||
port,
|
||||
*,
|
||||
duration=DEFAULT_SYNC_DURATION,
|
||||
interval=DEFAULT_POLL_INTERVAL,
|
||||
):
|
||||
"""Poll rippled on an interval until it has been synced for a duration."""
|
||||
start = time.perf_counter()
|
||||
while (time.perf_counter() - start) < duration:
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
request = urllib.request.Request(
|
||||
f'http://127.0.0.1:{port}',
|
||||
data=json.dumps({
|
||||
'method': 'server_state'
|
||||
}).encode(),
|
||||
headers={'Content-Type': 'application/json'},
|
||||
)
|
||||
with urllib.request.urlopen(request) as response:
|
||||
try:
|
||||
body = json.loads(response.read())
|
||||
except urllib.error.HTTPError as cause:
|
||||
logging.warning(f'server_state returned not JSON: {cause}')
|
||||
start = time.perf_counter()
|
||||
continue
|
||||
|
||||
try:
|
||||
state = body['result']['state']['server_state']
|
||||
except KeyError as cause:
|
||||
logging.warning(f'server_state response missing key: {cause.key}')
|
||||
start = time.perf_counter()
|
||||
continue
|
||||
logging.info(f'server_state: {state}')
|
||||
if state not in SYNC_STATES:
|
||||
# Require a contiguous sync state.
|
||||
start = time.perf_counter()
|
||||
|
||||
|
||||
async def loop(test,
|
||||
*,
|
||||
exe=DEFAULT_EXE,
|
||||
config_file=DEFAULT_CONFIGURATION_FILE):
|
||||
"""
|
||||
Start-test-stop rippled in an infinite loop.
|
||||
|
||||
Moves log to a different file after each iteration.
|
||||
"""
|
||||
log_file = find_log_file(config_file)
|
||||
id = 0
|
||||
while True:
|
||||
logging.info(f'iteration: {id}')
|
||||
async with rippled(exe, config_file) as process:
|
||||
start = time.perf_counter()
|
||||
exited = asyncio.create_task(process.wait())
|
||||
tested = asyncio.create_task(test())
|
||||
# Try to sync as long as the process is running.
|
||||
done, pending = await asyncio.wait(
|
||||
{exited, tested},
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
if done == {exited}:
|
||||
code = exited.result()
|
||||
logging.warning(
|
||||
f'server halted for unknown reason with code {code}')
|
||||
else:
|
||||
assert done == {tested}
|
||||
assert tested.exception() is None
|
||||
end = time.perf_counter()
|
||||
logging.info(f'synced after {end - start:.0f} seconds')
|
||||
os.replace(log_file, f'debug.{id}.log')
|
||||
id += 1
|
||||
|
||||
|
||||
logging.basicConfig(
|
||||
format='%(asctime)s %(levelname)-8s %(message)s',
|
||||
level=logging.INFO,
|
||||
datefmt='%Y-%m-%d %H:%M:%S',
|
||||
)
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
parser.add_argument(
|
||||
'rippled',
|
||||
type=Path,
|
||||
nargs='?',
|
||||
default=DEFAULT_EXE,
|
||||
help='Path to rippled.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--conf',
|
||||
type=Path,
|
||||
default=DEFAULT_CONFIGURATION_FILE,
|
||||
help='Path to configuration file.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--duration',
|
||||
type=int,
|
||||
default=DEFAULT_SYNC_DURATION,
|
||||
help='Number of contiguous seconds required in a synchronized state.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--interval',
|
||||
type=int,
|
||||
default=DEFAULT_POLL_INTERVAL,
|
||||
help='Number of seconds to wait between polls of state.',
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
port = find_http_port(args.conf)
|
||||
|
||||
|
||||
def test():
|
||||
return sync(port, duration=args.duration, interval=args.interval)
|
||||
|
||||
|
||||
try:
|
||||
asyncio.run(loop(test, exe=args.rippled, config_file=args.conf))
|
||||
except KeyboardInterrupt:
|
||||
# Squelch the message. This is a normal mode of exit.
|
||||
pass
|
||||
133
bin/stop-test.js
133
bin/stop-test.js
@@ -1,133 +0,0 @@
|
||||
/* -------------------------------- REQUIRES -------------------------------- */
|
||||
|
||||
var child = require("child_process");
|
||||
var assert = require("assert");
|
||||
|
||||
/* --------------------------------- CONFIG --------------------------------- */
|
||||
|
||||
if (process.argv[2] == null) {
|
||||
[
|
||||
'Usage: ',
|
||||
'',
|
||||
' `node bin/stop-test.js i,j [rippled_path] [rippled_conf]`',
|
||||
'',
|
||||
' Launch rippled and stop it after n seconds for all n in [i, j}',
|
||||
' For all even values of n launch rippled with `--fg`',
|
||||
' For values of n where n % 3 == 0 launch rippled with `--fg`\n',
|
||||
'Examples: ',
|
||||
'',
|
||||
' $ node bin/stop-test.js 5,10',
|
||||
(' $ node bin/stop-test.js 1,4 ' +
|
||||
'build/clang.debug/rippled $HOME/.confs/rippled.cfg')
|
||||
]
|
||||
.forEach(function(l){console.log(l)});
|
||||
|
||||
process.exit();
|
||||
} else {
|
||||
var testRange = process.argv[2].split(',').map(Number);
|
||||
var rippledPath = process.argv[3] || 'build/rippled'
|
||||
var rippledConf = process.argv[4] || 'rippled.cfg'
|
||||
}
|
||||
|
||||
var options = {
|
||||
env: process.env,
|
||||
stdio: 'ignore' // we could dump the child io when it fails abnormally
|
||||
};
|
||||
|
||||
// default args
|
||||
var conf_args = ['--conf='+rippledConf];
|
||||
var start_args = conf_args.concat([/*'--net'*/])
|
||||
var stop_args = conf_args.concat(['stop']);
|
||||
|
||||
/* --------------------------------- HELPERS -------------------------------- */
|
||||
|
||||
function start(args) {
|
||||
return child.spawn(rippledPath, args, options);
|
||||
}
|
||||
function stop(rippled) { child.execFile(rippledPath, stop_args, options)}
|
||||
function secs_l8r(ms, f) {setTimeout(f, ms * 1000); }
|
||||
|
||||
function show_results_and_exit(results) {
|
||||
console.log(JSON.stringify(results, undefined, 2));
|
||||
process.exit();
|
||||
}
|
||||
|
||||
var timeTakes = function (range) {
|
||||
function sumRange(n) {return (n+1) * n /2}
|
||||
var ret = sumRange(range[1]);
|
||||
if (range[0] > 1) {
|
||||
ret = ret - sumRange(range[0] - 1)
|
||||
}
|
||||
var stopping = (range[1] - range[0]) * 0.5;
|
||||
return ret + stopping;
|
||||
}
|
||||
|
||||
/* ---------------------------------- TEST ---------------------------------- */
|
||||
|
||||
console.log("Test will take ~%s seconds", timeTakes(testRange));
|
||||
|
||||
(function oneTest(n /* seconds */, results) {
|
||||
if (n >= testRange[1]) {
|
||||
// show_results_and_exit(results);
|
||||
console.log(JSON.stringify(results, undefined, 2));
|
||||
oneTest(testRange[0], []);
|
||||
return;
|
||||
}
|
||||
|
||||
var args = start_args;
|
||||
if (n % 2 == 0) {args = args.concat(['--fg'])}
|
||||
if (n % 3 == 0) {args = args.concat(['--net'])}
|
||||
|
||||
var result = {args: args, alive_for: n};
|
||||
results.push(result);
|
||||
|
||||
console.log("\nLaunching `%s` with `%s` for %d seconds",
|
||||
rippledPath, JSON.stringify(args), n);
|
||||
|
||||
rippled = start(args);
|
||||
console.log("Rippled pid: %d", rippled.pid);
|
||||
|
||||
// defaults
|
||||
var b4StopSent = false;
|
||||
var stopSent = false;
|
||||
var stop_took = null;
|
||||
|
||||
rippled.once('exit', function(){
|
||||
if (!stopSent && !b4StopSent) {
|
||||
console.warn('\nRippled exited itself b4 stop issued');
|
||||
process.exit();
|
||||
};
|
||||
|
||||
// The io handles close AFTER exit, may have implications for
|
||||
// `stdio:'inherit'` option to `child.spawn`.
|
||||
rippled.once('close', function() {
|
||||
result.stop_took = (+new Date() - stop_took) / 1000; // seconds
|
||||
console.log("Stopping after %d seconds took %s seconds",
|
||||
n, result.stop_took);
|
||||
oneTest(n+1, results);
|
||||
});
|
||||
});
|
||||
|
||||
secs_l8r(n, function(){
|
||||
console.log("Stopping rippled after %d seconds", n);
|
||||
|
||||
// possible race here ?
|
||||
// seems highly unlikely, but I was having issues at one point
|
||||
b4StopSent=true;
|
||||
stop_took = (+new Date());
|
||||
// when does `exit` actually get sent?
|
||||
stop();
|
||||
stopSent=true;
|
||||
|
||||
// Sometimes we want to attach with a debugger.
|
||||
if (process.env.ABORT_TESTS_ON_STALL != null) {
|
||||
// We wait 30 seconds, and if it hasn't stopped, we abort the process
|
||||
secs_l8r(30, function() {
|
||||
if (result.stop_took == null) {
|
||||
console.log("rippled has stalled");
|
||||
process.exit();
|
||||
};
|
||||
});
|
||||
}
|
||||
})
|
||||
}(testRange[0], []));
|
||||
@@ -1,119 +0,0 @@
|
||||
/**
|
||||
* bin/update_bintypes.js
|
||||
*
|
||||
* This unholy abomination of a script generates the JavaScript file
|
||||
* src/js/bintypes.js from various parts of the C++ source code.
|
||||
*
|
||||
* This should *NOT* be part of any automatic build process unless the C++
|
||||
* source data are brought into a more easily parseable format. Until then,
|
||||
* simply run this script manually and fix as needed.
|
||||
*/
|
||||
|
||||
// XXX: Process LedgerFormats.(h|cpp) as well.
|
||||
|
||||
var filenameProto = __dirname + '/../src/cpp/ripple/SerializeProto.h',
|
||||
filenameTxFormatsH = __dirname + '/../src/cpp/ripple/TransactionFormats.h',
|
||||
filenameTxFormats = __dirname + '/../src/cpp/ripple/TransactionFormats.cpp';
|
||||
|
||||
var fs = require('fs');
|
||||
|
||||
var output = [];
|
||||
|
||||
// Stage 1: Get the field types and codes from SerializeProto.h
|
||||
var types = {},
|
||||
fields = {};
|
||||
String(fs.readFileSync(filenameProto)).split('\n').forEach(function (line) {
|
||||
line = line.replace(/^\s+|\s+$/g, '').replace(/\s+/g, '');
|
||||
if (!line.length || line.slice(0, 2) === '//' || line.slice(-1) !== ')') return;
|
||||
|
||||
var tmp = line.slice(0, -1).split('('),
|
||||
type = tmp[0],
|
||||
opts = tmp[1].split(',');
|
||||
|
||||
if (type === 'TYPE') types[opts[1]] = [opts[0], +opts[2]];
|
||||
else if (type === 'FIELD') fields[opts[0]] = [types[opts[1]][0], +opts[2]];
|
||||
});
|
||||
|
||||
output.push('var ST = require("./serializedtypes");');
|
||||
output.push('');
|
||||
output.push('var REQUIRED = exports.REQUIRED = 0,');
|
||||
output.push(' OPTIONAL = exports.OPTIONAL = 1,');
|
||||
output.push(' DEFAULT = exports.DEFAULT = 2;');
|
||||
output.push('');
|
||||
|
||||
function pad(s, n) { while (s.length < n) s += ' '; return s; }
|
||||
function padl(s, n) { while (s.length < n) s = ' '+s; return s; }
|
||||
|
||||
Object.keys(types).forEach(function (type) {
|
||||
output.push(pad('ST.'+types[type][0]+'.id', 25) + ' = '+types[type][1]+';');
|
||||
});
|
||||
output.push('');
|
||||
|
||||
// Stage 2: Get the transaction type IDs from TransactionFormats.h
|
||||
var ttConsts = {};
|
||||
String(fs.readFileSync(filenameTxFormatsH)).split('\n').forEach(function (line) {
|
||||
var regex = /tt([A-Z_]+)\s+=\s+([0-9-]+)/;
|
||||
var match = line.match(regex);
|
||||
if (match) ttConsts[match[1]] = +match[2];
|
||||
});
|
||||
|
||||
// Stage 3: Get the transaction formats from TransactionFormats.cpp
|
||||
var base = [],
|
||||
sections = [],
|
||||
current = base;
|
||||
String(fs.readFileSync(filenameTxFormats)).split('\n').forEach(function (line) {
|
||||
line = line.replace(/^\s+|\s+$/g, '').replace(/\s+/g, '');
|
||||
|
||||
var d_regex = /DECLARE_TF\(([A-Za-z]+),tt([A-Z_]+)/;
|
||||
var d_match = line.match(d_regex);
|
||||
|
||||
var s_regex = /SOElement\(sf([a-z]+),SOE_(REQUIRED|OPTIONAL|DEFAULT)/i;
|
||||
var s_match = line.match(s_regex);
|
||||
|
||||
if (d_match) sections.push(current = [d_match[1], ttConsts[d_match[2]]]);
|
||||
else if (s_match) current.push([s_match[1], s_match[2]]);
|
||||
});
|
||||
|
||||
function removeFinalComma(arr) {
|
||||
arr[arr.length-1] = arr[arr.length-1].slice(0, -1);
|
||||
}
|
||||
|
||||
output.push('var base = [');
|
||||
base.forEach(function (field) {
|
||||
var spec = fields[field[0]];
|
||||
output.push(' [ '+
|
||||
pad("'"+field[0]+"'", 21)+', '+
|
||||
pad(field[1], 8)+', '+
|
||||
padl(""+spec[1], 2)+', '+
|
||||
'ST.'+pad(spec[0], 3)+
|
||||
' ],');
|
||||
});
|
||||
removeFinalComma(output);
|
||||
output.push('];');
|
||||
output.push('');
|
||||
|
||||
|
||||
output.push('exports.tx = {');
|
||||
sections.forEach(function (section) {
|
||||
var name = section.shift(),
|
||||
ttid = section.shift();
|
||||
|
||||
output.push(' '+name+': ['+ttid+'].concat(base, [');
|
||||
section.forEach(function (field) {
|
||||
var spec = fields[field[0]];
|
||||
output.push(' [ '+
|
||||
pad("'"+field[0]+"'", 21)+', '+
|
||||
pad(field[1], 8)+', '+
|
||||
padl(""+spec[1], 2)+', '+
|
||||
'ST.'+pad(spec[0], 3)+
|
||||
' ],');
|
||||
});
|
||||
removeFinalComma(output);
|
||||
output.push(' ]),');
|
||||
});
|
||||
removeFinalComma(output);
|
||||
output.push('};');
|
||||
output.push('');
|
||||
|
||||
console.log(output.join('\n'));
|
||||
|
||||
@@ -396,8 +396,8 @@
|
||||
# true - enables compression
|
||||
# false - disables compression [default].
|
||||
#
|
||||
# The rippled server can save bandwidth by compressing its peer-to-peer communications,
|
||||
# at a cost of greater CPU usage. If you enable link compression,
|
||||
# The rippled server can save bandwidth by compressing its peer-to-peer communications,
|
||||
# at a cost of greater CPU usage. If you enable link compression,
|
||||
# the server automatically compresses communications with peer servers
|
||||
# that also have link compression enabled.
|
||||
# https://xrpl.org/enable-link-compression.html
|
||||
@@ -1011,7 +1011,7 @@
|
||||
# that rippled is still in sync with the network,
|
||||
# and that the validated ledger is less than
|
||||
# 'age_threshold_seconds' old. If not, then continue
|
||||
# sleeping for this number of seconds and
|
||||
# sleeping for this number of seconds and
|
||||
# checking until healthy.
|
||||
# Default is 5.
|
||||
#
|
||||
@@ -1113,7 +1113,7 @@
|
||||
# page_size Valid values: integer (MUST be power of 2 between 512 and 65536)
|
||||
# The default is 4096 bytes. This setting determines
|
||||
# the size of a page in the transaction.db file.
|
||||
# See https://www.sqlite.org/pragma.html#pragma_page_size
|
||||
# See https://www.sqlite.org/pragma.html#pragma_page_size
|
||||
# for more details about the available options.
|
||||
#
|
||||
# journal_size_limit Valid values: integer
|
||||
|
||||
@@ -16,13 +16,16 @@ set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
target_compile_definitions (common
|
||||
INTERFACE
|
||||
$<$<CONFIG:Debug>:DEBUG _DEBUG>
|
||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>)
|
||||
# ^^^^ NOTE: CMAKE release builds already have NDEBUG
|
||||
# defined, so no need to add it explicitly except for
|
||||
# this special case of (profile ON) and (assert OFF)
|
||||
# -- presumably this is because we don't want profile
|
||||
# builds asserting unless asserts were specifically
|
||||
# requested
|
||||
#[===[
|
||||
NOTE: CMAKE release builds already have NDEBUG defined, so no need to add it
|
||||
explicitly except for the special case of (profile ON) and (assert OFF).
|
||||
Presumably this is because we don't want profile builds asserting unless
|
||||
asserts were specifically requested.
|
||||
]===]
|
||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>
|
||||
# TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x.
|
||||
OPENSSL_SUPPRESS_DEPRECATED
|
||||
)
|
||||
|
||||
if (MSVC)
|
||||
# remove existing exception flag since we set it to -EHa
|
||||
@@ -90,28 +93,16 @@ if (MSVC)
|
||||
-errorreport:none
|
||||
-machine:X64)
|
||||
else ()
|
||||
# HACK : because these need to come first, before any warning demotion
|
||||
string (APPEND CMAKE_CXX_FLAGS " -Wall -Wdeprecated")
|
||||
if (wextra)
|
||||
string (APPEND CMAKE_CXX_FLAGS " -Wextra -Wno-unused-parameter")
|
||||
endif ()
|
||||
# not MSVC
|
||||
target_compile_options (common
|
||||
INTERFACE
|
||||
-Wall
|
||||
-Wdeprecated
|
||||
$<$<BOOL:${is_clang}>:-Wno-deprecated-declarations>
|
||||
$<$<BOOL:${wextra}>:-Wextra -Wno-unused-parameter>
|
||||
$<$<BOOL:${werr}>:-Werror>
|
||||
$<$<COMPILE_LANGUAGE:CXX>:
|
||||
-frtti
|
||||
-Wnon-virtual-dtor
|
||||
>
|
||||
-Wno-sign-compare
|
||||
-Wno-char-subscripts
|
||||
-Wno-format
|
||||
-Wno-unused-local-typedefs
|
||||
-fstack-protector
|
||||
$<$<BOOL:${is_gcc}>:
|
||||
-Wno-unused-but-set-variable
|
||||
-Wno-deprecated
|
||||
>
|
||||
-Wno-sign-compare
|
||||
-Wno-unused-but-set-variable
|
||||
$<$<NOT:$<CONFIG:Debug>>:-fno-strict-aliasing>
|
||||
# tweak gcc optimization for debug
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<CONFIG:Debug>>:-O0>
|
||||
|
||||
@@ -99,6 +99,15 @@ target_link_libraries(xrpl.libxrpl.protocol PUBLIC
|
||||
add_module(xrpl resource)
|
||||
target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol)
|
||||
|
||||
# Level 06
|
||||
add_module(xrpl net)
|
||||
target_link_libraries(xrpl.libxrpl.net PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.resource
|
||||
)
|
||||
|
||||
add_module(xrpl server)
|
||||
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol)
|
||||
|
||||
@@ -121,6 +130,7 @@ target_link_modules(xrpl PUBLIC
|
||||
protocol
|
||||
resource
|
||||
server
|
||||
net
|
||||
)
|
||||
|
||||
# All headers in libxrpl are in modules.
|
||||
|
||||
@@ -19,6 +19,7 @@ install (
|
||||
xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.resource
|
||||
xrpl.libxrpl.server
|
||||
xrpl.libxrpl.net
|
||||
xrpl.libxrpl
|
||||
antithesis-sdk-cpp
|
||||
EXPORT RippleExports
|
||||
|
||||
9
conan/global.conf
Normal file
9
conan/global.conf
Normal file
@@ -0,0 +1,9 @@
|
||||
# Global configuration for Conan. This is used to set the number of parallel
|
||||
# downloads, uploads, and build jobs. The verbosity is set to verbose to
|
||||
# provide more information during the build process.
|
||||
core:non_interactive=True
|
||||
core.download:parallel={{ os.cpu_count() }}
|
||||
core.upload:parallel={{ os.cpu_count() }}
|
||||
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
|
||||
tools.build:verbosity=verbose
|
||||
tools.compilation:verbosity=verbose
|
||||
34
conan/profiles/default
Normal file
34
conan/profiles/default
Normal file
@@ -0,0 +1,34 @@
|
||||
{% set os = detect_api.detect_os() %}
|
||||
{% set arch = detect_api.detect_arch() %}
|
||||
{% set compiler, version, compiler_exe = detect_api.detect_default_compiler() %}
|
||||
{% set compiler_version = version %}
|
||||
{% if os == "Linux" %}
|
||||
{% set compiler_version = detect_api.default_compiler_version(compiler, version) %}
|
||||
{% endif %}
|
||||
|
||||
[settings]
|
||||
os={{ os }}
|
||||
arch={{ arch }}
|
||||
build_type=Debug
|
||||
compiler={{compiler}}
|
||||
compiler.version={{ compiler_version }}
|
||||
compiler.cppstd=20
|
||||
{% if os == "Windows" %}
|
||||
compiler.runtime=static
|
||||
{% else %}
|
||||
compiler.libcxx={{detect_api.detect_libcxx(compiler, version, compiler_exe)}}
|
||||
{% endif %}
|
||||
|
||||
[conf]
|
||||
{% if compiler == "clang" and compiler_version >= 19 %}
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
{% endif %}
|
||||
{% if compiler == "apple-clang" and compiler_version >= 17 %}
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
{% endif %}
|
||||
{% if compiler == "gcc" and compiler_version < 13 %}
|
||||
tools.build:cxxflags=['-Wno-restrict']
|
||||
{% endif %}
|
||||
|
||||
[tool_requires]
|
||||
!cmake/*: cmake/[>=3 <4]
|
||||
39
conanfile.py
39
conanfile.py
@@ -24,17 +24,20 @@ class Xrpl(ConanFile):
|
||||
}
|
||||
|
||||
requires = [
|
||||
'doctest/2.4.11',
|
||||
'grpc/1.50.1',
|
||||
'libarchive/3.7.6',
|
||||
'nudb/2.0.8',
|
||||
'openssl/1.1.1v',
|
||||
'libarchive/3.8.1',
|
||||
'nudb/2.0.9',
|
||||
'openssl/3.5.2',
|
||||
'soci/4.0.3',
|
||||
'zlib/1.3.1',
|
||||
]
|
||||
|
||||
test_requires = [
|
||||
'doctest/2.4.11',
|
||||
]
|
||||
|
||||
tool_requires = [
|
||||
'protobuf/3.21.9',
|
||||
'protobuf/3.21.12',
|
||||
]
|
||||
|
||||
default_options = {
|
||||
@@ -86,12 +89,13 @@ class Xrpl(ConanFile):
|
||||
}
|
||||
|
||||
def set_version(self):
|
||||
path = f'{self.recipe_folder}/src/libxrpl/protocol/BuildInfo.cpp'
|
||||
regex = r'versionString\s?=\s?\"(.*)\"'
|
||||
with open(path, 'r') as file:
|
||||
matches = (re.search(regex, line) for line in file)
|
||||
match = next(m for m in matches if m)
|
||||
self.version = match.group(1)
|
||||
if self.version is None:
|
||||
path = f'{self.recipe_folder}/src/libxrpl/protocol/BuildInfo.cpp'
|
||||
regex = r'versionString\s?=\s?\"(.*)\"'
|
||||
with open(path, encoding='utf-8') as file:
|
||||
matches = (re.search(regex, line) for line in file)
|
||||
match = next(m for m in matches if m)
|
||||
self.version = match.group(1)
|
||||
|
||||
def configure(self):
|
||||
if self.settings.compiler == 'apple-clang':
|
||||
@@ -100,20 +104,19 @@ class Xrpl(ConanFile):
|
||||
def requirements(self):
|
||||
# Conan 2 requires transitive headers to be specified
|
||||
transitive_headers_opt = {'transitive_headers': True} if conan_version.split('.')[0] == '2' else {}
|
||||
self.requires('boost/1.83.0', force=True, **transitive_headers_opt)
|
||||
self.requires('date/3.0.3', **transitive_headers_opt)
|
||||
self.requires('boost/1.86.0', force=True, **transitive_headers_opt)
|
||||
self.requires('date/3.0.4', **transitive_headers_opt)
|
||||
self.requires('lz4/1.10.0', force=True)
|
||||
self.requires('protobuf/3.21.9', force=True)
|
||||
self.requires('sqlite3/3.47.0', force=True)
|
||||
self.requires('protobuf/3.21.12', force=True)
|
||||
self.requires('sqlite3/3.49.1', force=True)
|
||||
if self.options.jemalloc:
|
||||
self.requires('jemalloc/5.3.0')
|
||||
if self.options.rocksdb:
|
||||
self.requires('rocksdb/9.7.3')
|
||||
self.requires('xxhash/0.8.2', **transitive_headers_opt)
|
||||
self.requires('rocksdb/10.0.1')
|
||||
self.requires('xxhash/0.8.3', **transitive_headers_opt)
|
||||
|
||||
exports_sources = (
|
||||
'CMakeLists.txt',
|
||||
'bin/getRippledInfo',
|
||||
'cfg/*',
|
||||
'cmake/*',
|
||||
'external/*',
|
||||
|
||||
@@ -30,7 +30,7 @@ the ledger (so the entire network has the same view). This will help the network
|
||||
see which validators are **currently** unreliable, and adjust their quorum
|
||||
calculation accordingly.
|
||||
|
||||
*Improving the liveness of the network is the main motivation for the negative UNL.*
|
||||
_Improving the liveness of the network is the main motivation for the negative UNL._
|
||||
|
||||
### Targeted Faults
|
||||
|
||||
@@ -53,16 +53,17 @@ even if the number of remaining validators gets to 60%. Say we have a network
|
||||
with 10 validators on the UNL and everything is operating correctly. The quorum
|
||||
required for this network would be 8 (80% of 10). When validators fail, the
|
||||
quorum required would be as low as 6 (60% of 10), which is the absolute
|
||||
***minimum quorum***. We need the absolute minimum quorum to be strictly greater
|
||||
**_minimum quorum_**. We need the absolute minimum quorum to be strictly greater
|
||||
than 50% of the original UNL so that there cannot be two partitions of
|
||||
well-behaved nodes headed in different directions. We arbitrarily choose 60% as
|
||||
the minimum quorum to give a margin of safety.
|
||||
|
||||
Consider these events in the absence of negative UNL:
|
||||
|
||||
1. 1:00pm - validator1 fails, votes vs. quorum: 9 >= 8, we have quorum
|
||||
1. 3:00pm - validator2 fails, votes vs. quorum: 8 >= 8, we have quorum
|
||||
1. 5:00pm - validator3 fails, votes vs. quorum: 7 < 8, we don’t have quorum
|
||||
* **network cannot validate new ledgers with 3 failed validators**
|
||||
- **network cannot validate new ledgers with 3 failed validators**
|
||||
|
||||
We're below 80% agreement, so new ledgers cannot be validated. This is how the
|
||||
XRP Ledger operates today, but if the negative UNL was enabled, the events would
|
||||
@@ -70,18 +71,20 @@ happen as follows. (Please note that the events below are from a simplified
|
||||
version of our protocol.)
|
||||
|
||||
1. 1:00pm - validator1 fails, votes vs. quorum: 9 >= 8, we have quorum
|
||||
1. 1:40pm - network adds validator1 to negative UNL, quorum changes to ceil(9 * 0.8), or 8
|
||||
1. 1:40pm - network adds validator1 to negative UNL, quorum changes to ceil(9 \* 0.8), or 8
|
||||
1. 3:00pm - validator2 fails, votes vs. quorum: 8 >= 8, we have quorum
|
||||
1. 3:40pm - network adds validator2 to negative UNL, quorum changes to ceil(8 * 0.8), or 7
|
||||
1. 3:40pm - network adds validator2 to negative UNL, quorum changes to ceil(8 \* 0.8), or 7
|
||||
1. 5:00pm - validator3 fails, votes vs. quorum: 7 >= 7, we have quorum
|
||||
1. 5:40pm - network adds validator3 to negative UNL, quorum changes to ceil(7 * 0.8), or 6
|
||||
1. 5:40pm - network adds validator3 to negative UNL, quorum changes to ceil(7 \* 0.8), or 6
|
||||
1. 7:00pm - validator4 fails, votes vs. quorum: 6 >= 6, we have quorum
|
||||
* **network can still validate new ledgers with 4 failed validators**
|
||||
- **network can still validate new ledgers with 4 failed validators**
|
||||
|
||||
## External Interactions
|
||||
|
||||
### Message Format Changes
|
||||
|
||||
This proposal will:
|
||||
|
||||
1. add a new pseudo-transaction type
|
||||
1. add the negative UNL to the ledger data structure.
|
||||
|
||||
@@ -89,19 +92,20 @@ Any tools or systems that rely on the format of this data will have to be
|
||||
updated.
|
||||
|
||||
### Amendment
|
||||
|
||||
This feature **will** need an amendment to activate.
|
||||
|
||||
## Design
|
||||
|
||||
This section discusses the following topics about the Negative UNL design:
|
||||
|
||||
* [Negative UNL protocol overview](#Negative-UNL-Protocol-Overview)
|
||||
* [Validator reliability measurement](#Validator-Reliability-Measurement)
|
||||
* [Format Changes](#Format-Changes)
|
||||
* [Negative UNL maintenance](#Negative-UNL-Maintenance)
|
||||
* [Quorum size calculation](#Quorum-Size-Calculation)
|
||||
* [Filter validation messages](#Filter-Validation-Messages)
|
||||
* [High level sequence diagram of code
|
||||
- [Negative UNL protocol overview](#Negative-UNL-Protocol-Overview)
|
||||
- [Validator reliability measurement](#Validator-Reliability-Measurement)
|
||||
- [Format Changes](#Format-Changes)
|
||||
- [Negative UNL maintenance](#Negative-UNL-Maintenance)
|
||||
- [Quorum size calculation](#Quorum-Size-Calculation)
|
||||
- [Filter validation messages](#Filter-Validation-Messages)
|
||||
- [High level sequence diagram of code
|
||||
changes](#High-Level-Sequence-Diagram-of-Code-Changes)
|
||||
|
||||
### Negative UNL Protocol Overview
|
||||
@@ -114,9 +118,9 @@ with V in their UNL adjust the quorum and V’s validation message is not counte
|
||||
when verifying if a ledger is fully validated. V’s flow of messages and network
|
||||
interactions, however, will remain the same.
|
||||
|
||||
We define the ***effective UNL** = original UNL - negative UNL*, and the
|
||||
***effective quorum*** as the quorum of the *effective UNL*. And we set
|
||||
*effective quorum = Ceiling(80% * effective UNL)*.
|
||||
We define the **\*effective UNL** = original UNL - negative UNL\*, and the
|
||||
**_effective quorum_** as the quorum of the _effective UNL_. And we set
|
||||
_effective quorum = Ceiling(80% _ effective UNL)\*.
|
||||
|
||||
### Validator Reliability Measurement
|
||||
|
||||
@@ -126,16 +130,16 @@ measure about its validators, but we have chosen ledger validation messages.
|
||||
This is because every validator shall send one and only one signed validation
|
||||
message per ledger. This keeps the measurement simple and removes
|
||||
timing/clock-sync issues. A node will measure the percentage of agreeing
|
||||
validation messages (*PAV*) received from each validator on the node's UNL. Note
|
||||
validation messages (_PAV_) received from each validator on the node's UNL. Note
|
||||
that the node will only count the validation messages that agree with its own
|
||||
validations.
|
||||
|
||||
We define the **PAV** as the **P**ercentage of **A**greed **V**alidation
|
||||
messages received for the last N ledgers, where N = 256 by default.
|
||||
|
||||
When the PAV drops below the ***low-water mark***, the validator is considered
|
||||
When the PAV drops below the **_low-water mark_**, the validator is considered
|
||||
unreliable, and is a candidate to be disabled by being added to the negative
|
||||
UNL. A validator must have a PAV higher than the ***high-water mark*** to be
|
||||
UNL. A validator must have a PAV higher than the **_high-water mark_** to be
|
||||
re-enabled. The validator is re-enabled by removing it from the negative UNL. In
|
||||
the implementation, we plan to set the low-water mark as 50% and the high-water
|
||||
mark as 80%.
|
||||
@@ -143,22 +147,24 @@ mark as 80%.
|
||||
### Format Changes
|
||||
|
||||
The negative UNL component in a ledger contains three fields.
|
||||
* ***NegativeUNL***: The current negative UNL, a list of unreliable validators.
|
||||
* ***ToDisable***: The validator to be added to the negative UNL on the next
|
||||
|
||||
- **_NegativeUNL_**: The current negative UNL, a list of unreliable validators.
|
||||
- **_ToDisable_**: The validator to be added to the negative UNL on the next
|
||||
flag ledger.
|
||||
* ***ToReEnable***: The validator to be removed from the negative UNL on the
|
||||
- **_ToReEnable_**: The validator to be removed from the negative UNL on the
|
||||
next flag ledger.
|
||||
|
||||
All three fields are optional. When the *ToReEnable* field exists, the
|
||||
*NegativeUNL* field cannot be empty.
|
||||
All three fields are optional. When the _ToReEnable_ field exists, the
|
||||
_NegativeUNL_ field cannot be empty.
|
||||
|
||||
A new pseudo-transaction, ***UNLModify***, is added. It has three fields
|
||||
* ***Disabling***: A flag indicating whether the modification is to disable or
|
||||
A new pseudo-transaction, **_UNLModify_**, is added. It has three fields
|
||||
|
||||
- **_Disabling_**: A flag indicating whether the modification is to disable or
|
||||
to re-enable a validator.
|
||||
* ***Seq***: The ledger sequence number.
|
||||
* ***Validator***: The validator to be disabled or re-enabled.
|
||||
- **_Seq_**: The ledger sequence number.
|
||||
- **_Validator_**: The validator to be disabled or re-enabled.
|
||||
|
||||
There would be at most one *disable* `UNLModify` and one *re-enable* `UNLModify`
|
||||
There would be at most one _disable_ `UNLModify` and one _re-enable_ `UNLModify`
|
||||
transaction per flag ledger. The full machinery is described further on.
|
||||
|
||||
### Negative UNL Maintenance
|
||||
@@ -167,19 +173,19 @@ The negative UNL can only be modified on the flag ledgers. If a validator's
|
||||
reliability status changes, it takes two flag ledgers to modify the negative
|
||||
UNL. Let's see an example of the algorithm:
|
||||
|
||||
* Ledger seq = 100: A validator V goes offline.
|
||||
* Ledger seq = 256: This is a flag ledger, and V's reliability measurement *PAV*
|
||||
- Ledger seq = 100: A validator V goes offline.
|
||||
- Ledger seq = 256: This is a flag ledger, and V's reliability measurement _PAV_
|
||||
is lower than the low-water mark. Other validators add `UNLModify`
|
||||
pseudo-transactions `{true, 256, V}` to the transaction set which goes through
|
||||
the consensus. Then the pseudo-transaction is applied to the negative UNL
|
||||
ledger component by setting `ToDisable = V`.
|
||||
* Ledger seq = 257 ~ 511: The negative UNL ledger component is copied from the
|
||||
- Ledger seq = 257 ~ 511: The negative UNL ledger component is copied from the
|
||||
parent ledger.
|
||||
* Ledger seq=512: This is a flag ledger, and the negative UNL is updated
|
||||
- Ledger seq=512: This is a flag ledger, and the negative UNL is updated
|
||||
`NegativeUNL = NegativeUNL + ToDisable`.
|
||||
|
||||
The negative UNL may have up to `MaxNegativeListed = floor(original UNL * 25%)`
|
||||
validators. The 25% is because of 75% * 80% = 60%, where 75% = 100% - 25%, 80%
|
||||
validators. The 25% is because of 75% \* 80% = 60%, where 75% = 100% - 25%, 80%
|
||||
is the quorum of the effective UNL, and 60% is the absolute minimum quorum of
|
||||
the original UNL. Adding more than 25% validators to the negative UNL does not
|
||||
improve the liveness of the network, because adding more validators to the
|
||||
@@ -187,52 +193,43 @@ negative UNL cannot lower the effective quorum.
|
||||
|
||||
The following is the detailed algorithm:
|
||||
|
||||
* **If** the ledger seq = x is a flag ledger
|
||||
- **If** the ledger seq = x is a flag ledger
|
||||
1. Compute `NegativeUNL = NegativeUNL + ToDisable - ToReEnable` if they
|
||||
exist in the parent ledger
|
||||
|
||||
1. Compute `NegativeUNL = NegativeUNL + ToDisable - ToReEnable` if they
|
||||
exist in the parent ledger
|
||||
1. Try to find a candidate to disable if `sizeof NegativeUNL < MaxNegativeListed`
|
||||
|
||||
1. Try to find a candidate to disable if `sizeof NegativeUNL < MaxNegativeListed`
|
||||
1. Find a validator V that has a _PAV_ lower than the low-water
|
||||
mark, but is not in `NegativeUNL`.
|
||||
|
||||
1. Find a validator V that has a *PAV* lower than the low-water
|
||||
mark, but is not in `NegativeUNL`.
|
||||
1. If two or more are found, their public keys are XORed with the hash
|
||||
of the parent ledger and the one with the lowest XOR result is chosen.
|
||||
1. If V is found, create a `UNLModify` pseudo-transaction
|
||||
`TxDisableValidator = {true, x, V}`
|
||||
1. Try to find a candidate to re-enable if `sizeof NegativeUNL > 0`:
|
||||
1. Find a validator U that is in `NegativeUNL` and has a _PAV_ higher
|
||||
than the high-water mark.
|
||||
1. If U is not found, try to find one in `NegativeUNL` but not in the
|
||||
local _UNL_.
|
||||
1. If two or more are found, their public keys are XORed with the hash
|
||||
of the parent ledger and the one with the lowest XOR result is chosen.
|
||||
1. If U is found, create a `UNLModify` pseudo-transaction
|
||||
`TxReEnableValidator = {false, x, U}`
|
||||
|
||||
1. If two or more are found, their public keys are XORed with the hash
|
||||
of the parent ledger and the one with the lowest XOR result is chosen.
|
||||
|
||||
1. If V is found, create a `UNLModify` pseudo-transaction
|
||||
`TxDisableValidator = {true, x, V}`
|
||||
|
||||
1. Try to find a candidate to re-enable if `sizeof NegativeUNL > 0`:
|
||||
|
||||
1. Find a validator U that is in `NegativeUNL` and has a *PAV* higher
|
||||
than the high-water mark.
|
||||
|
||||
1. If U is not found, try to find one in `NegativeUNL` but not in the
|
||||
local *UNL*.
|
||||
|
||||
1. If two or more are found, their public keys are XORed with the hash
|
||||
of the parent ledger and the one with the lowest XOR result is chosen.
|
||||
|
||||
1. If U is found, create a `UNLModify` pseudo-transaction
|
||||
`TxReEnableValidator = {false, x, U}`
|
||||
|
||||
1. If any `UNLModify` pseudo-transactions are created, add them to the
|
||||
transaction set. The transaction set goes through the consensus algorithm.
|
||||
|
||||
1. If have enough support, the `UNLModify` pseudo-transactions remain in the
|
||||
transaction set agreed by the validators. Then the pseudo-transactions are
|
||||
applied to the ledger:
|
||||
|
||||
1. If have `TxDisableValidator`, set `ToDisable=TxDisableValidator.V`.
|
||||
Else clear `ToDisable`.
|
||||
|
||||
1. If have `TxReEnableValidator`, set
|
||||
`ToReEnable=TxReEnableValidator.U`. Else clear `ToReEnable`.
|
||||
|
||||
* **Else** (not a flag ledger)
|
||||
1. If any `UNLModify` pseudo-transactions are created, add them to the
|
||||
transaction set. The transaction set goes through the consensus algorithm.
|
||||
1. If have enough support, the `UNLModify` pseudo-transactions remain in the
|
||||
transaction set agreed by the validators. Then the pseudo-transactions are
|
||||
applied to the ledger:
|
||||
|
||||
1. Copy the negative UNL ledger component from the parent ledger
|
||||
1. If have `TxDisableValidator`, set `ToDisable=TxDisableValidator.V`.
|
||||
Else clear `ToDisable`.
|
||||
|
||||
1. If have `TxReEnableValidator`, set
|
||||
`ToReEnable=TxReEnableValidator.U`. Else clear `ToReEnable`.
|
||||
|
||||
- **Else** (not a flag ledger)
|
||||
1. Copy the negative UNL ledger component from the parent ledger
|
||||
|
||||
The negative UNL is stored on each ledger because we don't know when a validator
|
||||
may reconnect to the network. If the negative UNL was stored only on every flag
|
||||
@@ -273,31 +270,26 @@ not counted when checking if the ledger is fully validated.
|
||||
The diagram below is the sequence of one round of consensus. Classes and
|
||||
components with non-trivial changes are colored green.
|
||||
|
||||
* The `ValidatorList` class is modified to compute the quorum of the effective
|
||||
- The `ValidatorList` class is modified to compute the quorum of the effective
|
||||
UNL.
|
||||
|
||||
* The `Validations` class provides an interface for querying the validation
|
||||
- The `Validations` class provides an interface for querying the validation
|
||||
messages from trusted validators.
|
||||
|
||||
* The `ConsensusAdaptor` component:
|
||||
|
||||
* The `RCLConsensus::Adaptor` class is modified for creating `UNLModify`
|
||||
Pseudo-Transactions.
|
||||
|
||||
* The `Change` class is modified for applying `UNLModify`
|
||||
Pseudo-Transactions.
|
||||
|
||||
* The `Ledger` class is modified for creating and adjusting the negative UNL
|
||||
ledger component.
|
||||
|
||||
* The `LedgerMaster` class is modified for filtering out validation messages
|
||||
from negative UNL validators when verifying if a ledger is fully
|
||||
validated.
|
||||
- The `ConsensusAdaptor` component:
|
||||
- The `RCLConsensus::Adaptor` class is modified for creating `UNLModify`
|
||||
Pseudo-Transactions.
|
||||
- The `Change` class is modified for applying `UNLModify`
|
||||
Pseudo-Transactions.
|
||||
- The `Ledger` class is modified for creating and adjusting the negative UNL
|
||||
ledger component.
|
||||
- The `LedgerMaster` class is modified for filtering out validation messages
|
||||
from negative UNL validators when verifying if a ledger is fully
|
||||
validated.
|
||||
|
||||

|
||||
|
||||
|
||||
## Roads Not Taken
|
||||
|
||||
### Use a Mechanism Like Fee Voting to Process UNLModify Pseudo-Transactions
|
||||
@@ -311,7 +303,7 @@ and different quorums for the same ledger. As a result, the network's safety is
|
||||
impacted.
|
||||
|
||||
This updated version does not impact safety though operates a bit more slowly.
|
||||
The negative UNL modifications in the *UNLModify* pseudo-transaction approved by
|
||||
The negative UNL modifications in the _UNLModify_ pseudo-transaction approved by
|
||||
the consensus will take effect at the next flag ledger. The extra time of the
|
||||
256 ledgers should be enough for nodes to be in sync of the negative UNL
|
||||
modifications.
|
||||
@@ -334,29 +326,28 @@ expiration approach cannot be simply applied.
|
||||
### Validator Reliability Measurement and Flag Ledger Frequency
|
||||
|
||||
If the ledger time is about 4.5 seconds and the low-water mark is 50%, then in
|
||||
the worst case, it takes 48 minutes *((0.5 * 256 + 256 + 256) * 4.5 / 60 = 48)*
|
||||
the worst case, it takes 48 minutes _((0.5 _ 256 + 256 + 256) _ 4.5 / 60 = 48)_
|
||||
to put an offline validator on the negative UNL. We considered lowering the flag
|
||||
ledger frequency so that the negative UNL can be more responsive. We also
|
||||
considered decoupling the reliability measurement and flag ledger frequency to
|
||||
be more flexible. In practice, however, their benefits are not clear.
|
||||
|
||||
|
||||
## New Attack Vectors
|
||||
|
||||
A group of malicious validators may try to frame a reliable validator and put it
|
||||
on the negative UNL. But they cannot succeed. Because:
|
||||
|
||||
1. A reliable validator sends a signed validation message every ledger. A
|
||||
sufficient peer-to-peer network will propagate the validation messages to other
|
||||
validators. The validators will decide if another validator is reliable or not
|
||||
only by its local observation of the validation messages received. So an honest
|
||||
validator’s vote on another validator’s reliability is accurate.
|
||||
sufficient peer-to-peer network will propagate the validation messages to other
|
||||
validators. The validators will decide if another validator is reliable or not
|
||||
only by its local observation of the validation messages received. So an honest
|
||||
validator’s vote on another validator’s reliability is accurate.
|
||||
|
||||
1. Given the votes are accurate, and one vote per validator, an honest validator
|
||||
will not create a UNLModify transaction of a reliable validator.
|
||||
will not create a UNLModify transaction of a reliable validator.
|
||||
|
||||
1. A validator can be added to a negative UNL only through a UNLModify
|
||||
transaction.
|
||||
transaction.
|
||||
|
||||
Assuming the group of malicious validators is less than the quorum, they cannot
|
||||
frame a reliable validator.
|
||||
@@ -365,32 +356,32 @@ frame a reliable validator.
|
||||
|
||||
The bullet points below briefly summarize the current proposal:
|
||||
|
||||
* The motivation of the negative UNL is to improve the liveness of the network.
|
||||
- The motivation of the negative UNL is to improve the liveness of the network.
|
||||
|
||||
* The targeted faults are the ones frequently observed in the production
|
||||
- The targeted faults are the ones frequently observed in the production
|
||||
network.
|
||||
|
||||
* Validators propose negative UNL candidates based on their local measurements.
|
||||
- Validators propose negative UNL candidates based on their local measurements.
|
||||
|
||||
* The absolute minimum quorum is 60% of the original UNL.
|
||||
- The absolute minimum quorum is 60% of the original UNL.
|
||||
|
||||
* The format of the ledger is changed, and a new *UNLModify* pseudo-transaction
|
||||
- The format of the ledger is changed, and a new _UNLModify_ pseudo-transaction
|
||||
is added. Any tools or systems that rely on the format of these data will have
|
||||
to be updated.
|
||||
|
||||
* The negative UNL can only be modified on the flag ledgers.
|
||||
- The negative UNL can only be modified on the flag ledgers.
|
||||
|
||||
* At most one validator can be added to the negative UNL at a flag ledger.
|
||||
- At most one validator can be added to the negative UNL at a flag ledger.
|
||||
|
||||
* At most one validator can be removed from the negative UNL at a flag ledger.
|
||||
- At most one validator can be removed from the negative UNL at a flag ledger.
|
||||
|
||||
* If a validator's reliability status changes, it takes two flag ledgers to
|
||||
- If a validator's reliability status changes, it takes two flag ledgers to
|
||||
modify the negative UNL.
|
||||
|
||||
* The quorum is the larger of 80% of the effective UNL and 60% of the original
|
||||
- The quorum is the larger of 80% of the effective UNL and 60% of the original
|
||||
UNL.
|
||||
|
||||
* If a validator is on the negative UNL, its validation messages are ignored
|
||||
- If a validator is on the negative UNL, its validation messages are ignored
|
||||
when the local node verifies if a ledger is fully validated.
|
||||
|
||||
## FAQ
|
||||
@@ -415,7 +406,7 @@ lower quorum size while keeping the network safe.
|
||||
validator removed from the negative UNL? </h3>
|
||||
|
||||
A validator’s reliability is measured by other validators. If a validator
|
||||
becomes unreliable, at a flag ledger, other validators propose *UNLModify*
|
||||
becomes unreliable, at a flag ledger, other validators propose _UNLModify_
|
||||
pseudo-transactions which vote the validator to add to the negative UNL during
|
||||
the consensus session. If agreed, the validator is added to the negative UNL at
|
||||
the next flag ledger. The mechanism of removing a validator from the negative
|
||||
@@ -423,32 +414,32 @@ UNL is the same.
|
||||
|
||||
### Question: Given a negative UNL, what happens if the UNL changes?
|
||||
|
||||
Answer: Let’s consider the cases:
|
||||
Answer: Let’s consider the cases:
|
||||
|
||||
1. A validator is added to the UNL, and it is already in the negative UNL. This
|
||||
case could happen when not all the nodes have the same UNL. Note that the
|
||||
negative UNL on the ledger lists unreliable nodes that are not necessarily the
|
||||
validators for everyone.
|
||||
1. A validator is added to the UNL, and it is already in the negative UNL. This
|
||||
case could happen when not all the nodes have the same UNL. Note that the
|
||||
negative UNL on the ledger lists unreliable nodes that are not necessarily the
|
||||
validators for everyone.
|
||||
|
||||
In this case, the liveness is affected negatively. Because the minimum
|
||||
quorum could be larger but the usable validators are not increased.
|
||||
In this case, the liveness is affected negatively. Because the minimum
|
||||
quorum could be larger but the usable validators are not increased.
|
||||
|
||||
1. A validator is removed from the UNL, and it is in the negative UNL.
|
||||
1. A validator is removed from the UNL, and it is in the negative UNL.
|
||||
|
||||
In this case, the liveness is affected positively. Because the quorum could
|
||||
be smaller but the usable validators are not reduced.
|
||||
|
||||
1. A validator is added to the UNL, and it is not in the negative UNL.
|
||||
1. A validator is removed from the UNL, and it is not in the negative UNL.
|
||||
|
||||
1. A validator is added to the UNL, and it is not in the negative UNL.
|
||||
1. A validator is removed from the UNL, and it is not in the negative UNL.
|
||||
|
||||
Case 3 and 4 are not affected by the negative UNL protocol.
|
||||
|
||||
### Question: Can we simply lower the quorum to 60% without the negative UNL?
|
||||
### Question: Can we simply lower the quorum to 60% without the negative UNL?
|
||||
|
||||
Answer: No, because the negative UNL approach is safer.
|
||||
|
||||
First let’s compare the two approaches intuitively, (1) the *negative UNL*
|
||||
approach, and (2) *lower quorum*: simply lowering the quorum from 80% to 60%
|
||||
First let’s compare the two approaches intuitively, (1) the _negative UNL_
|
||||
approach, and (2) _lower quorum_: simply lowering the quorum from 80% to 60%
|
||||
without the negative UNL. The negative UNL approach uses consensus to come up
|
||||
with a list of unreliable validators, which are then removed from the effective
|
||||
UNL temporarily. With this approach, the list of unreliable validators is agreed
|
||||
@@ -462,75 +453,75 @@ Next we compare the two approaches quantitatively with examples, and apply
|
||||
Theorem 8 of [Analysis of the XRP Ledger Consensus
|
||||
Protocol](https://arxiv.org/abs/1802.07242) paper:
|
||||
|
||||
*XRP LCP guarantees fork safety if **O<sub>i,j</sub> > n<sub>j</sub> / 2 +
|
||||
_XRP LCP guarantees fork safety if **O<sub>i,j</sub> > n<sub>j</sub> / 2 +
|
||||
n<sub>i</sub> − q<sub>i</sub> + t<sub>i,j</sub>** for every pair of nodes
|
||||
P<sub>i</sub>, P<sub>j</sub>,*
|
||||
P<sub>i</sub>, P<sub>j</sub>,_
|
||||
|
||||
where *O<sub>i,j</sub>* is the overlapping requirement, n<sub>j</sub> and
|
||||
where _O<sub>i,j</sub>_ is the overlapping requirement, n<sub>j</sub> and
|
||||
n<sub>i</sub> are UNL sizes, q<sub>i</sub> is the quorum size of P<sub>i</sub>,
|
||||
*t<sub>i,j</sub> = min(t<sub>i</sub>, t<sub>j</sub>, O<sub>i,j</sub>)*, and
|
||||
_t<sub>i,j</sub> = min(t<sub>i</sub>, t<sub>j</sub>, O<sub>i,j</sub>)_, and
|
||||
t<sub>i</sub> and t<sub>j</sub> are the number of faults can be tolerated by
|
||||
P<sub>i</sub> and P<sub>j</sub>.
|
||||
|
||||
We denote *UNL<sub>i</sub>* as *P<sub>i</sub>'s UNL*, and *|UNL<sub>i</sub>|* as
|
||||
the size of *P<sub>i</sub>'s UNL*.
|
||||
We denote _UNL<sub>i</sub>_ as _P<sub>i</sub>'s UNL_, and _|UNL<sub>i</sub>|_ as
|
||||
the size of _P<sub>i</sub>'s UNL_.
|
||||
|
||||
Assuming *|UNL<sub>i</sub>| = |UNL<sub>j</sub>|*, let's consider the following
|
||||
Assuming _|UNL<sub>i</sub>| = |UNL<sub>j</sub>|_, let's consider the following
|
||||
three cases:
|
||||
|
||||
1. With 80% quorum and 20% faults, *O<sub>i,j</sub> > 100% / 2 + 100% - 80% +
|
||||
20% = 90%*. I.e. fork safety requires > 90% UNL overlaps. This is one of the
|
||||
results in the analysis paper.
|
||||
1. With 80% quorum and 20% faults, _O<sub>i,j</sub> > 100% / 2 + 100% - 80% +
|
||||
20% = 90%_. I.e. fork safety requires > 90% UNL overlaps. This is one of the
|
||||
results in the analysis paper.
|
||||
|
||||
1. If the quorum is 60%, the relationship between the overlapping requirement
|
||||
and the faults that can be tolerated is *O<sub>i,j</sub> > 90% +
|
||||
t<sub>i,j</sub>*. Under the same overlapping condition (i.e. 90%), to guarantee
|
||||
the fork safety, the network cannot tolerate any faults. So under the same
|
||||
overlapping condition, if the quorum is simply lowered, the network can tolerate
|
||||
fewer faults.
|
||||
1. If the quorum is 60%, the relationship between the overlapping requirement
|
||||
and the faults that can be tolerated is _O<sub>i,j</sub> > 90% +
|
||||
t<sub>i,j</sub>_. Under the same overlapping condition (i.e. 90%), to guarantee
|
||||
the fork safety, the network cannot tolerate any faults. So under the same
|
||||
overlapping condition, if the quorum is simply lowered, the network can tolerate
|
||||
fewer faults.
|
||||
|
||||
1. With the negative UNL approach, we want to argue that the inequation
|
||||
*O<sub>i,j</sub> > n<sub>j</sub> / 2 + n<sub>i</sub> − q<sub>i</sub> +
|
||||
t<sub>i,j</sub>* is always true to guarantee fork safety, while the negative UNL
|
||||
protocol runs, i.e. the effective quorum is lowered without weakening the
|
||||
network's fault tolerance. To make the discussion easier, we rewrite the
|
||||
inequation as *O<sub>i,j</sub> > n<sub>j</sub> / 2 + (n<sub>i</sub> −
|
||||
q<sub>i</sub>) + min(t<sub>i</sub>, t<sub>j</sub>)*, where O<sub>i,j</sub> is
|
||||
dropped from the definition of t<sub>i,j</sub> because *O<sub>i,j</sub> >
|
||||
min(t<sub>i</sub>, t<sub>j</sub>)* always holds under the parameters we will
|
||||
use. Assuming a validator V is added to the negative UNL, now let's consider the
|
||||
4 cases:
|
||||
1. With the negative UNL approach, we want to argue that the inequation
|
||||
_O<sub>i,j</sub> > n<sub>j</sub> / 2 + n<sub>i</sub> − q<sub>i</sub> +
|
||||
t<sub>i,j</sub>_ is always true to guarantee fork safety, while the negative UNL
|
||||
protocol runs, i.e. the effective quorum is lowered without weakening the
|
||||
network's fault tolerance. To make the discussion easier, we rewrite the
|
||||
inequation as _O<sub>i,j</sub> > n<sub>j</sub> / 2 + (n<sub>i</sub> −
|
||||
q<sub>i</sub>) + min(t<sub>i</sub>, t<sub>j</sub>)_, where O<sub>i,j</sub> is
|
||||
dropped from the definition of t<sub>i,j</sub> because _O<sub>i,j</sub> >
|
||||
min(t<sub>i</sub>, t<sub>j</sub>)_ always holds under the parameters we will
|
||||
use. Assuming a validator V is added to the negative UNL, now let's consider the
|
||||
4 cases:
|
||||
|
||||
1. V is not on UNL<sub>i</sub> nor UNL<sub>j</sub>
|
||||
1. V is not on UNL<sub>i</sub> nor UNL<sub>j</sub>
|
||||
|
||||
The inequation holds because none of the variables change.
|
||||
The inequation holds because none of the variables change.
|
||||
|
||||
1. V is on UNL<sub>i</sub> but not on UNL<sub>j</sub>
|
||||
1. V is on UNL<sub>i</sub> but not on UNL<sub>j</sub>
|
||||
|
||||
The value of *(n<sub>i</sub> − q<sub>i</sub>)* is smaller. The value of
|
||||
*min(t<sub>i</sub>, t<sub>j</sub>)* could be smaller too. Other
|
||||
variables do not change. Overall, the left side of the inequation does
|
||||
not change, but the right side is smaller. So the inequation holds.
|
||||
|
||||
1. V is not on UNL<sub>i</sub> but on UNL<sub>j</sub>
|
||||
The value of *(n<sub>i</sub> − q<sub>i</sub>)* is smaller. The value of
|
||||
*min(t<sub>i</sub>, t<sub>j</sub>)* could be smaller too. Other
|
||||
variables do not change. Overall, the left side of the inequation does
|
||||
not change, but the right side is smaller. So the inequation holds.
|
||||
|
||||
The value of *n<sub>j</sub> / 2* is smaller. The value of
|
||||
*min(t<sub>i</sub>, t<sub>j</sub>)* could be smaller too. Other
|
||||
variables do not change. Overall, the left side of the inequation does
|
||||
not change, but the right side is smaller. So the inequation holds.
|
||||
|
||||
1. V is on both UNL<sub>i</sub> and UNL<sub>j</sub>
|
||||
1. V is not on UNL<sub>i</sub> but on UNL<sub>j</sub>
|
||||
|
||||
The value of *O<sub>i,j</sub>* is reduced by 1. The values of
|
||||
*n<sub>j</sub> / 2*, *(n<sub>i</sub> − q<sub>i</sub>)*, and
|
||||
*min(t<sub>i</sub>, t<sub>j</sub>)* are reduced by 0.5, 0.2, and 1
|
||||
respectively. The right side is reduced by 1.7. Overall, the left side
|
||||
of the inequation is reduced by 1, and the right side is reduced by 1.7.
|
||||
So the inequation holds.
|
||||
The value of *n<sub>j</sub> / 2* is smaller. The value of
|
||||
*min(t<sub>i</sub>, t<sub>j</sub>)* could be smaller too. Other
|
||||
variables do not change. Overall, the left side of the inequation does
|
||||
not change, but the right side is smaller. So the inequation holds.
|
||||
|
||||
The inequation holds for all the cases. So with the negative UNL approach,
|
||||
the network's fork safety is preserved, while the quorum is lowered that
|
||||
increases the network's liveness.
|
||||
1. V is on both UNL<sub>i</sub> and UNL<sub>j</sub>
|
||||
|
||||
The value of *O<sub>i,j</sub>* is reduced by 1. The values of
|
||||
*n<sub>j</sub> / 2*, *(n<sub>i</sub> − q<sub>i</sub>)*, and
|
||||
*min(t<sub>i</sub>, t<sub>j</sub>)* are reduced by 0.5, 0.2, and 1
|
||||
respectively. The right side is reduced by 1.7. Overall, the left side
|
||||
of the inequation is reduced by 1, and the right side is reduced by 1.7.
|
||||
So the inequation holds.
|
||||
|
||||
The inequation holds for all the cases. So with the negative UNL approach,
|
||||
the network's fork safety is preserved, while the quorum is lowered that
|
||||
increases the network's liveness.
|
||||
|
||||
<h3> Question: We have observed that occasionally a validator wanders off on its
|
||||
own chain. How is this case handled by the negative UNL algorithm? </h3>
|
||||
@@ -565,11 +556,11 @@ will be used after that. We want to see the test cases still pass with real
|
||||
network delay. A test case specifies:
|
||||
|
||||
1. a UNL with different number of validators for different test cases,
|
||||
1. a network with zero or more non-validator nodes,
|
||||
1. a network with zero or more non-validator nodes,
|
||||
1. a sequence of validator reliability change events (by killing/restarting
|
||||
nodes, or by running modified rippled that does not send all validation
|
||||
messages),
|
||||
1. the correct outcomes.
|
||||
1. the correct outcomes.
|
||||
|
||||
For all the test cases, the correct outcomes are verified by examining logs. We
|
||||
will grep the log to see if the correct negative UNLs are generated, and whether
|
||||
@@ -579,6 +570,7 @@ timing parameters of rippled will be changed to have faster ledger time. Most if
|
||||
not all test cases do not need client transactions.
|
||||
|
||||
For example, the test cases for the prototype:
|
||||
|
||||
1. A 10-validator UNL.
|
||||
1. The network does not have other nodes.
|
||||
1. The validators will be started from the genesis. Once they start to produce
|
||||
@@ -587,11 +579,11 @@ For example, the test cases for the prototype:
|
||||
1. A sequence of events (or the lack of events) such as a killed validator is
|
||||
added to the negative UNL.
|
||||
|
||||
#### Roads Not Taken: Test with Extended CSF
|
||||
#### Roads Not Taken: Test with Extended CSF
|
||||
|
||||
We considered testing with the current unit test framework, specifically the
|
||||
[Consensus Simulation
|
||||
Framework](https://github.com/ripple/rippled/blob/develop/src/test/csf/README.md)
|
||||
(CSF). However, the CSF currently can only test the generic consensus algorithm
|
||||
as in the paper: [Analysis of the XRP Ledger Consensus
|
||||
Protocol](https://arxiv.org/abs/1802.07242).
|
||||
Protocol](https://arxiv.org/abs/1802.07242).
|
||||
|
||||
@@ -5,8 +5,8 @@ skinparam roundcorner 20
|
||||
skinparam maxmessagesize 160
|
||||
|
||||
actor "Rippled Start" as RS
|
||||
participant "Timer" as T
|
||||
participant "NetworkOPs" as NOP
|
||||
participant "Timer" as T
|
||||
participant "NetworkOPs" as NOP
|
||||
participant "ValidatorList" as VL #lightgreen
|
||||
participant "Consensus" as GC
|
||||
participant "ConsensusAdaptor" as CA #lightgreen
|
||||
@@ -20,7 +20,7 @@ VL -> NOP
|
||||
NOP -> VL: update trusted validators
|
||||
activate VL
|
||||
VL -> VL: re-calculate quorum
|
||||
hnote over VL#lightgreen: ignore negative listed validators\nwhen calculate quorum
|
||||
hnote over VL#lightgreen: ignore negative listed validators\nwhen calculate quorum
|
||||
VL -> NOP
|
||||
deactivate VL
|
||||
NOP -> GC: start round
|
||||
@@ -36,14 +36,14 @@ activate GC
|
||||
end
|
||||
|
||||
alt phase == OPEN
|
||||
alt should close ledger
|
||||
alt should close ledger
|
||||
GC -> GC: phase = ESTABLISH
|
||||
GC -> CA: onClose
|
||||
activate CA
|
||||
alt sqn%256==0
|
||||
alt sqn%256==0
|
||||
CA -[#green]> RM: <font color=green>getValidations
|
||||
CA -[#green]> CA: <font color=green>create UNLModify Tx
|
||||
hnote over CA#lightgreen: use validatations of the last 256 ledgers\nto figure out UNLModify Tx candidates.\nIf any, create UNLModify Tx, and add to TxSet.
|
||||
CA -[#green]> CA: <font color=green>create UNLModify Tx
|
||||
hnote over CA#lightgreen: use validatations of the last 256 ledgers\nto figure out UNLModify Tx candidates.\nIf any, create UNLModify Tx, and add to TxSet.
|
||||
end
|
||||
CA -> GC
|
||||
GC -> CA: propose
|
||||
@@ -61,14 +61,14 @@ else phase == ESTABLISH
|
||||
CA -> CA : build LCL
|
||||
hnote over CA #lightgreen: copy negative UNL from parent ledger
|
||||
alt sqn%256==0
|
||||
CA -[#green]> CA: <font color=green>Adjust negative UNL
|
||||
CA -[#green]> CA: <font color=green>Adjust negative UNL
|
||||
CA -[#green]> CA: <font color=green>apply UNLModify Tx
|
||||
end
|
||||
CA -> CA : validate and send validation message
|
||||
activate NOP
|
||||
CA -> NOP : end consensus and\n<b>begin next consensus round
|
||||
deactivate NOP
|
||||
deactivate CA
|
||||
deactivate CA
|
||||
hnote over RM: receive validations
|
||||
end
|
||||
else phase == ACCEPTED
|
||||
@@ -76,4 +76,4 @@ else phase == ACCEPTED
|
||||
end
|
||||
deactivate GC
|
||||
|
||||
@enduml
|
||||
@enduml
|
||||
|
||||
@@ -82,7 +82,9 @@ pattern and the way coroutines are implemented, where every yield saves the spot
|
||||
in the code where it left off and every resume jumps back to that spot.
|
||||
|
||||
### Sequence Diagram
|
||||
|
||||

|
||||
|
||||
### Class Diagram
|
||||
|
||||

|
||||
|
||||
@@ -4,7 +4,7 @@ class TimeoutCounter {
|
||||
#app_ : Application&
|
||||
}
|
||||
|
||||
TimeoutCounter o-- "1" Application
|
||||
TimeoutCounter o-- "1" Application
|
||||
': app_
|
||||
|
||||
Stoppable <.. Application
|
||||
@@ -14,13 +14,13 @@ class Application {
|
||||
-m_inboundLedgers : uptr<InboundLedgers>
|
||||
}
|
||||
|
||||
Application *-- "1" LedgerReplayer
|
||||
Application *-- "1" LedgerReplayer
|
||||
': m_ledgerReplayer
|
||||
Application *-- "1" InboundLedgers
|
||||
Application *-- "1" InboundLedgers
|
||||
': m_inboundLedgers
|
||||
|
||||
Stoppable <.. InboundLedgers
|
||||
Application "1" --o InboundLedgers
|
||||
Application "1" --o InboundLedgers
|
||||
': app_
|
||||
|
||||
class InboundLedgers {
|
||||
@@ -28,9 +28,9 @@ class InboundLedgers {
|
||||
}
|
||||
|
||||
Stoppable <.. LedgerReplayer
|
||||
InboundLedgers "1" --o LedgerReplayer
|
||||
InboundLedgers "1" --o LedgerReplayer
|
||||
': inboundLedgers_
|
||||
Application "1" --o LedgerReplayer
|
||||
Application "1" --o LedgerReplayer
|
||||
': app_
|
||||
|
||||
class LedgerReplayer {
|
||||
@@ -42,17 +42,17 @@ class LedgerReplayer {
|
||||
-skipLists_ : hash_map<u256, wptr<SkipListAcquire>>
|
||||
}
|
||||
|
||||
LedgerReplayer *-- LedgerReplayTask
|
||||
LedgerReplayer *-- LedgerReplayTask
|
||||
': tasks_
|
||||
LedgerReplayer o-- LedgerDeltaAcquire
|
||||
LedgerReplayer o-- LedgerDeltaAcquire
|
||||
': deltas_
|
||||
LedgerReplayer o-- SkipListAcquire
|
||||
LedgerReplayer o-- SkipListAcquire
|
||||
': skipLists_
|
||||
|
||||
TimeoutCounter <.. LedgerReplayTask
|
||||
InboundLedgers "1" --o LedgerReplayTask
|
||||
InboundLedgers "1" --o LedgerReplayTask
|
||||
': inboundLedgers_
|
||||
LedgerReplayer "1" --o LedgerReplayTask
|
||||
LedgerReplayer "1" --o LedgerReplayTask
|
||||
': replayer_
|
||||
|
||||
class LedgerReplayTask {
|
||||
@@ -63,15 +63,15 @@ class LedgerReplayTask {
|
||||
+addDelta(sptr<LedgerDeltaAcquire>)
|
||||
}
|
||||
|
||||
LedgerReplayTask *-- "1" SkipListAcquire
|
||||
LedgerReplayTask *-- "1" SkipListAcquire
|
||||
': skipListAcquirer_
|
||||
LedgerReplayTask *-- LedgerDeltaAcquire
|
||||
LedgerReplayTask *-- LedgerDeltaAcquire
|
||||
': deltas_
|
||||
|
||||
TimeoutCounter <.. SkipListAcquire
|
||||
InboundLedgers "1" --o SkipListAcquire
|
||||
InboundLedgers "1" --o SkipListAcquire
|
||||
': inboundLedgers_
|
||||
LedgerReplayer "1" --o SkipListAcquire
|
||||
LedgerReplayer "1" --o SkipListAcquire
|
||||
': replayer_
|
||||
LedgerReplayTask --o SkipListAcquire : implicit via callback
|
||||
|
||||
@@ -83,9 +83,9 @@ class SkipListAcquire {
|
||||
}
|
||||
|
||||
TimeoutCounter <.. LedgerDeltaAcquire
|
||||
InboundLedgers "1" --o LedgerDeltaAcquire
|
||||
InboundLedgers "1" --o LedgerDeltaAcquire
|
||||
': inboundLedgers_
|
||||
LedgerReplayer "1" --o LedgerDeltaAcquire
|
||||
LedgerReplayer "1" --o LedgerDeltaAcquire
|
||||
': replayer_
|
||||
LedgerReplayTask --o LedgerDeltaAcquire : implicit via callback
|
||||
|
||||
@@ -95,4 +95,4 @@ class LedgerDeltaAcquire {
|
||||
-replayer_ : LedgerReplayer&
|
||||
-dataReadyCallbacks_ : vector<callback>
|
||||
}
|
||||
@enduml
|
||||
@enduml
|
||||
|
||||
@@ -38,7 +38,7 @@ deactivate lr
|
||||
loop
|
||||
lr -> lda : make_shared(ledgerId, ledgerSeq)
|
||||
return delta
|
||||
lr -> lrt : addDelta(delta)
|
||||
lr -> lrt : addDelta(delta)
|
||||
lrt -> lda : addDataCallback(callback)
|
||||
return
|
||||
return
|
||||
@@ -62,7 +62,7 @@ deactivate peer
|
||||
lr -> lda : processData(ledgerHeader, txns)
|
||||
lda -> lda : notify()
|
||||
note over lda: call the callbacks added by\naddDataCallback(callback).
|
||||
lda -> lrt : callback(ledgerId)
|
||||
lda -> lrt : callback(ledgerId)
|
||||
lrt -> lrt : deltaReady(ledgerId)
|
||||
lrt -> lrt : tryAdvance()
|
||||
loop as long as child can be built
|
||||
@@ -82,4 +82,4 @@ deactivate peer
|
||||
deactivate peer
|
||||
|
||||
|
||||
@enduml
|
||||
@enduml
|
||||
|
||||
@@ -16,5 +16,5 @@
|
||||
## Function
|
||||
|
||||
- Minimize external dependencies
|
||||
* Pass options in the ctor instead of using theConfig
|
||||
* Use as few other classes as possible
|
||||
- Pass options in the ctor instead of using theConfig
|
||||
- Use as few other classes as possible
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
# Coding Standards
|
||||
|
||||
Coding standards used here gradually evolve and propagate through
|
||||
Coding standards used here gradually evolve and propagate through
|
||||
code reviews. Some aspects are enforced more strictly than others.
|
||||
|
||||
## Rules
|
||||
|
||||
These rules only apply to our own code. We can't enforce any sort of
|
||||
These rules only apply to our own code. We can't enforce any sort of
|
||||
style on the external repositories and libraries we include. The best
|
||||
guideline is to maintain the standards that are used in those libraries.
|
||||
|
||||
* Tab inserts 4 spaces. No tab characters.
|
||||
* Braces are indented in the [Allman style][1].
|
||||
* Modern C++ principles. No naked ```new``` or ```delete```.
|
||||
* Line lengths limited to 80 characters. Exceptions limited to data and tables.
|
||||
- Tab inserts 4 spaces. No tab characters.
|
||||
- Braces are indented in the [Allman style][1].
|
||||
- Modern C++ principles. No naked `new` or `delete`.
|
||||
- Line lengths limited to 80 characters. Exceptions limited to data and tables.
|
||||
|
||||
## Guidelines
|
||||
|
||||
@@ -21,17 +21,17 @@ why you're doing it. Think, use common sense, and consider that this
|
||||
your changes will probably need to be maintained long after you've
|
||||
moved on to other projects.
|
||||
|
||||
* Use white space and blank lines to guide the eye and keep your intent clear.
|
||||
* Put private data members at the top of a class, and the 6 public special
|
||||
members immediately after, in the following order:
|
||||
* Destructor
|
||||
* Default constructor
|
||||
* Copy constructor
|
||||
* Copy assignment
|
||||
* Move constructor
|
||||
* Move assignment
|
||||
* Don't over-inline by defining large functions within the class
|
||||
declaration, not even for template classes.
|
||||
- Use white space and blank lines to guide the eye and keep your intent clear.
|
||||
- Put private data members at the top of a class, and the 6 public special
|
||||
members immediately after, in the following order:
|
||||
- Destructor
|
||||
- Default constructor
|
||||
- Copy constructor
|
||||
- Copy assignment
|
||||
- Move constructor
|
||||
- Move assignment
|
||||
- Don't over-inline by defining large functions within the class
|
||||
declaration, not even for template classes.
|
||||
|
||||
## Formatting
|
||||
|
||||
@@ -39,44 +39,44 @@ The goal of source code formatting should always be to make things as easy to
|
||||
read as possible. White space is used to guide the eye so that details are not
|
||||
overlooked. Blank lines are used to separate code into "paragraphs."
|
||||
|
||||
* Always place a space before and after all binary operators,
|
||||
- Always place a space before and after all binary operators,
|
||||
especially assignments (`operator=`).
|
||||
* The `!` operator should be preceded by a space, but not followed by one.
|
||||
* The `~` operator should be preceded by a space, but not followed by one.
|
||||
* The `++` and `--` operators should have no spaces between the operator and
|
||||
- The `!` operator should be preceded by a space, but not followed by one.
|
||||
- The `~` operator should be preceded by a space, but not followed by one.
|
||||
- The `++` and `--` operators should have no spaces between the operator and
|
||||
the operand.
|
||||
* A space never appears before a comma, and always appears after a comma.
|
||||
* Don't put spaces after a parenthesis. A typical member function call might
|
||||
- A space never appears before a comma, and always appears after a comma.
|
||||
- Don't put spaces after a parenthesis. A typical member function call might
|
||||
look like this: `foobar (1, 2, 3);`
|
||||
* In general, leave a blank line before an `if` statement.
|
||||
* In general, leave a blank line after a closing brace `}`.
|
||||
* Do not place code on the same line as any opening or
|
||||
- In general, leave a blank line before an `if` statement.
|
||||
- In general, leave a blank line after a closing brace `}`.
|
||||
- Do not place code on the same line as any opening or
|
||||
closing brace.
|
||||
* Do not write `if` statements all-on-one-line. The exception to this is when
|
||||
- Do not write `if` statements all-on-one-line. The exception to this is when
|
||||
you've got a sequence of similar `if` statements, and are aligning them all
|
||||
vertically to highlight their similarities.
|
||||
* In an `if-else` statement, if you surround one half of the statement with
|
||||
- In an `if-else` statement, if you surround one half of the statement with
|
||||
braces, you also need to put braces around the other half, to match.
|
||||
* When writing a pointer type, use this spacing: `SomeObject* myObject`.
|
||||
- When writing a pointer type, use this spacing: `SomeObject* myObject`.
|
||||
Technically, a more correct spacing would be `SomeObject *myObject`, but
|
||||
it makes more sense for the asterisk to be grouped with the type name,
|
||||
since being a pointer is part of the type, not the variable name. The only
|
||||
time that this can lead to any problems is when you're declaring multiple
|
||||
pointers of the same type in the same statement - which leads on to the next
|
||||
rule:
|
||||
* When declaring multiple pointers, never do so in a single statement, e.g.
|
||||
- When declaring multiple pointers, never do so in a single statement, e.g.
|
||||
`SomeObject* p1, *p2;` - instead, always split them out onto separate lines
|
||||
and write the type name again, to make it quite clear what's going on, and
|
||||
avoid the danger of missing out any vital asterisks.
|
||||
* The previous point also applies to references, so always put the `&` next to
|
||||
- The previous point also applies to references, so always put the `&` next to
|
||||
the type rather than the variable, e.g. `void foo (Thing const& thing)`. And
|
||||
don't put a space on both sides of the `*` or `&` - always put a space after
|
||||
it, but never before it.
|
||||
* The word `const` should be placed to the right of the thing that it modifies,
|
||||
- The word `const` should be placed to the right of the thing that it modifies,
|
||||
for consistency. For example `int const` refers to an int which is const.
|
||||
`int const*` is a pointer to an int which is const. `int *const` is a const
|
||||
pointer to an int.
|
||||
* Always place a space in between the template angle brackets and the type
|
||||
- Always place a space in between the template angle brackets and the type
|
||||
name. Template code is already hard enough to read!
|
||||
|
||||
[1]: http://en.wikipedia.org/wiki/Indent_style#Allman_style
|
||||
|
||||
@@ -31,7 +31,7 @@ and header under /opt/local/include:
|
||||
|
||||
$ scons clang profile-jemalloc=/opt/local
|
||||
|
||||
----------------------
|
||||
---
|
||||
|
||||
## Using the jemalloc library from within the code
|
||||
|
||||
@@ -60,4 +60,3 @@ Linking against the jemalloc library will override
|
||||
the system's default `malloc()` and related functions with jemalloc's
|
||||
implementation. This is the case even if the code is not instrumented
|
||||
to use jemalloc's specific API.
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@ Install these dependencies:
|
||||
- [Doxygen](http://www.doxygen.nl): All major platforms have [official binary
|
||||
distributions](http://www.doxygen.nl/download.html#srcbin), or you can
|
||||
build from [source](http://www.doxygen.nl/download.html#srcbin).
|
||||
|
||||
- MacOS: We recommend installing via Homebrew: `brew install doxygen`.
|
||||
The executable will be installed in `/usr/local/bin` which is already
|
||||
in the default `PATH`.
|
||||
@@ -21,18 +20,15 @@ Install these dependencies:
|
||||
$ ln -s /Applications/Doxygen.app/Contents/Resources/doxygen /usr/local/bin/doxygen
|
||||
```
|
||||
|
||||
- [PlantUML](http://plantuml.com):
|
||||
|
||||
- [PlantUML](http://plantuml.com):
|
||||
1. Install a functioning Java runtime, if you don't already have one.
|
||||
2. Download [`plantuml.jar`](http://sourceforge.net/projects/plantuml/files/plantuml.jar/download).
|
||||
|
||||
- [Graphviz](https://www.graphviz.org):
|
||||
|
||||
- Linux: Install from your package manager.
|
||||
- Windows: Use an [official installer](https://graphviz.gitlab.io/_pages/Download/Download_windows.html).
|
||||
- MacOS: Install via Homebrew: `brew install graphviz`.
|
||||
|
||||
|
||||
## Docker
|
||||
|
||||
Instead of installing the above dependencies locally, you can use the official
|
||||
@@ -40,14 +36,16 @@ build environment Docker image, which has all of them installed already.
|
||||
|
||||
1. Install [Docker](https://docs.docker.com/engine/installation/)
|
||||
2. Pull the image:
|
||||
```
|
||||
sudo docker pull rippleci/rippled-ci-builder:2944b78d22db
|
||||
```
|
||||
3. Run the image from the project folder:
|
||||
```
|
||||
sudo docker run -v $PWD:/opt/rippled --rm rippleci/rippled-ci-builder:2944b78d22db
|
||||
```
|
||||
|
||||
```
|
||||
sudo docker pull rippleci/rippled-ci-builder:2944b78d22db
|
||||
```
|
||||
|
||||
3. Run the image from the project folder:
|
||||
|
||||
```
|
||||
sudo docker run -v $PWD:/opt/rippled --rm rippleci/rippled-ci-builder:2944b78d22db
|
||||
```
|
||||
|
||||
## Build
|
||||
|
||||
|
||||
14
docs/build/conan.md
vendored
14
docs/build/conan.md
vendored
@@ -5,7 +5,6 @@ we should first understand _why_ we use Conan,
|
||||
and to understand that,
|
||||
we need to understand how we use CMake.
|
||||
|
||||
|
||||
### CMake
|
||||
|
||||
Technically, you don't need CMake to build this project.
|
||||
@@ -33,9 +32,9 @@ Parameters include:
|
||||
- where to find the compiler and linker
|
||||
- where to find dependencies, e.g. libraries and headers
|
||||
- how to link dependencies, e.g. any special compiler or linker flags that
|
||||
need to be used with them, including preprocessor definitions
|
||||
need to be used with them, including preprocessor definitions
|
||||
- how to compile translation units, e.g. with optimizations, debug symbols,
|
||||
position-independent code, etc.
|
||||
position-independent code, etc.
|
||||
- on Windows, which runtime library to link with
|
||||
|
||||
For some of these parameters, like the build system and compiler,
|
||||
@@ -54,7 +53,6 @@ Most humans prefer to put them into a configuration file, once, that
|
||||
CMake can read every time it is configured.
|
||||
For CMake, that file is a [toolchain file][toolchain].
|
||||
|
||||
|
||||
### Conan
|
||||
|
||||
These next few paragraphs on Conan are going to read much like the ones above
|
||||
@@ -79,10 +77,10 @@ Those files include:
|
||||
|
||||
- A single toolchain file.
|
||||
- For every dependency, a CMake [package configuration file][pcf],
|
||||
[package version file][pvf], and for every build type, a package
|
||||
targets file.
|
||||
Together, these files implement version checking and define `IMPORTED`
|
||||
targets for the dependencies.
|
||||
[package version file][pvf], and for every build type, a package
|
||||
targets file.
|
||||
Together, these files implement version checking and define `IMPORTED`
|
||||
targets for the dependencies.
|
||||
|
||||
The toolchain file itself amends the search path
|
||||
([`CMAKE_PREFIX_PATH`][prefix_path]) so that [`find_package()`][find_package]
|
||||
|
||||
5
docs/build/depend.md
vendored
5
docs/build/depend.md
vendored
@@ -2,8 +2,7 @@ We recommend two different methods to depend on libxrpl in your own [CMake][]
|
||||
project.
|
||||
Both methods add a CMake library target named `xrpl::libxrpl`.
|
||||
|
||||
|
||||
## Conan requirement
|
||||
## Conan requirement
|
||||
|
||||
The first method adds libxrpl as a [Conan][] requirement.
|
||||
With this method, there is no need for a Git [submodule][].
|
||||
@@ -48,7 +47,6 @@ cmake \
|
||||
cmake --build . --parallel
|
||||
```
|
||||
|
||||
|
||||
## CMake subdirectory
|
||||
|
||||
The second method adds the [rippled][] project as a CMake
|
||||
@@ -90,7 +88,6 @@ cmake \
|
||||
cmake --build . --parallel
|
||||
```
|
||||
|
||||
|
||||
[add_subdirectory]: https://cmake.org/cmake/help/latest/command/add_subdirectory.html
|
||||
[submodule]: https://git-scm.com/book/en/v2/Git-Tools-Submodules
|
||||
[rippled]: https://github.com/ripple/rippled
|
||||
|
||||
84
docs/build/environment.md
vendored
84
docs/build/environment.md
vendored
@@ -5,42 +5,39 @@ platforms: Linux, macOS, or Windows.
|
||||
|
||||
[BUILD.md]: ../../BUILD.md
|
||||
|
||||
|
||||
## Linux
|
||||
|
||||
Package ecosystems vary across Linux distributions,
|
||||
so there is no one set of instructions that will work for every Linux user.
|
||||
These instructions are written for Ubuntu 22.04.
|
||||
They are largely copied from the [script][1] used to configure our Docker
|
||||
container for continuous integration.
|
||||
That script handles many more responsibilities.
|
||||
These instructions are just the bare minimum to build one configuration of
|
||||
rippled.
|
||||
You can check that codebase for other Linux distributions and versions.
|
||||
If you cannot find yours there,
|
||||
then we hope that these instructions can at least guide you in the right
|
||||
direction.
|
||||
The instructions below are written for Debian 12 (Bookworm).
|
||||
|
||||
```
|
||||
apt update
|
||||
apt install --yes curl git libssl-dev pipx python3.10-dev python3-pip make g++-11 libprotobuf-dev protobuf-compiler
|
||||
export GCC_RELEASE=12
|
||||
sudo apt update
|
||||
sudo apt install --yes gcc-${GCC_RELEASE} g++-${GCC_RELEASE} python3-pip \
|
||||
python-is-python3 python3-venv python3-dev curl wget ca-certificates \
|
||||
git build-essential cmake ninja-build libc6-dev
|
||||
sudo pip install --break-system-packages conan
|
||||
|
||||
curl --location --remote-name \
|
||||
"https://github.com/Kitware/CMake/releases/download/v3.25.1/cmake-3.25.1.tar.gz"
|
||||
tar -xzf cmake-3.25.1.tar.gz
|
||||
rm cmake-3.25.1.tar.gz
|
||||
cd cmake-3.25.1
|
||||
./bootstrap --parallel=$(nproc)
|
||||
make --jobs $(nproc)
|
||||
make install
|
||||
cd ..
|
||||
|
||||
pipx install 'conan<2'
|
||||
pipx ensurepath
|
||||
sudo update-alternatives --install /usr/bin/cc cc /usr/bin/gcc-${GCC_RELEASE} 999
|
||||
sudo update-alternatives --install \
|
||||
/usr/bin/gcc gcc /usr/bin/gcc-${GCC_RELEASE} 100 \
|
||||
--slave /usr/bin/g++ g++ /usr/bin/g++-${GCC_RELEASE} \
|
||||
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-${GCC_RELEASE} \
|
||||
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-${GCC_RELEASE} \
|
||||
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-${GCC_RELEASE} \
|
||||
--slave /usr/bin/gcov gcov /usr/bin/gcov-${GCC_RELEASE} \
|
||||
--slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-${GCC_RELEASE} \
|
||||
--slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-${GCC_RELEASE} \
|
||||
--slave /usr/bin/lto-dump lto-dump /usr/bin/lto-dump-${GCC_RELEASE}
|
||||
sudo update-alternatives --auto cc
|
||||
sudo update-alternatives --auto gcc
|
||||
```
|
||||
|
||||
[1]: https://github.com/thejohnfreeman/rippled-docker/blob/master/ubuntu-22.04/install.sh
|
||||
|
||||
If you use different Linux distribution, hope the instruction above can guide
|
||||
you in the right direction. We try to maintain compatibility with all recent
|
||||
compiler releases, so if you use a rolling distribution like e.g. Arch or CentOS
|
||||
then there is a chance that everything will "just work".
|
||||
|
||||
## macOS
|
||||
|
||||
@@ -53,6 +50,33 @@ minimum required (see [BUILD.md][]).
|
||||
clang --version
|
||||
```
|
||||
|
||||
### Install Xcode Specific Version (Optional)
|
||||
|
||||
If you develop other applications using XCode you might be consistently updating to the newest version of Apple Clang.
|
||||
This will likely cause issues building rippled. You may want to install a specific version of Xcode:
|
||||
|
||||
1. **Download Xcode**
|
||||
- Visit [Apple Developer Downloads](https://developer.apple.com/download/more/)
|
||||
- Sign in with your Apple Developer account
|
||||
- Search for an Xcode version that includes **Apple Clang (Expected Version)**
|
||||
- Download the `.xip` file
|
||||
|
||||
2. **Install and Configure Xcode**
|
||||
|
||||
```bash
|
||||
# Extract the .xip file and rename for version management
|
||||
# Example: Xcode_16.2.app
|
||||
|
||||
# Move to Applications directory
|
||||
sudo mv Xcode_16.2.app /Applications/
|
||||
|
||||
# Set as default toolchain (persistent)
|
||||
sudo xcode-select -s /Applications/Xcode_16.2.app/Contents/Developer
|
||||
|
||||
# Set as environment variable (temporary)
|
||||
export DEVELOPER_DIR=/Applications/Xcode_16.2.app/Contents/Developer
|
||||
```
|
||||
|
||||
The command line developer tools should include Git too:
|
||||
|
||||
```
|
||||
@@ -72,10 +96,10 @@ and use it to install Conan:
|
||||
brew update
|
||||
brew install xz
|
||||
brew install pyenv
|
||||
pyenv install 3.10-dev
|
||||
pyenv global 3.10-dev
|
||||
pyenv install 3.11
|
||||
pyenv global 3.11
|
||||
eval "$(pyenv init -)"
|
||||
pip install 'conan<2'
|
||||
pip install 'conan'
|
||||
```
|
||||
|
||||
Install CMake with Homebrew too:
|
||||
|
||||
42
docs/build/install.md
vendored
42
docs/build/install.md
vendored
@@ -6,7 +6,6 @@ like CentOS.
|
||||
Installing from source is an option for all platforms,
|
||||
and the only supported option for installing custom builds.
|
||||
|
||||
|
||||
## From source
|
||||
|
||||
From a source build, you can install rippled and libxrpl using CMake's
|
||||
@@ -21,25 +20,23 @@ The default [prefix][1] is typically `/usr/local` on Linux and macOS and
|
||||
|
||||
[1]: https://cmake.org/cmake/help/latest/variable/CMAKE_INSTALL_PREFIX.html
|
||||
|
||||
|
||||
## With the APT package manager
|
||||
|
||||
1. Update repositories:
|
||||
1. Update repositories:
|
||||
|
||||
sudo apt update -y
|
||||
|
||||
2. Install utilities:
|
||||
2. Install utilities:
|
||||
|
||||
sudo apt install -y apt-transport-https ca-certificates wget gnupg
|
||||
|
||||
3. Add Ripple's package-signing GPG key to your list of trusted keys:
|
||||
3. Add Ripple's package-signing GPG key to your list of trusted keys:
|
||||
|
||||
sudo mkdir /usr/local/share/keyrings/
|
||||
wget -q -O - "https://repos.ripple.com/repos/api/gpg/key/public" | gpg --dearmor > ripple-key.gpg
|
||||
sudo mv ripple-key.gpg /usr/local/share/keyrings
|
||||
|
||||
|
||||
4. Check the fingerprint of the newly-added key:
|
||||
4. Check the fingerprint of the newly-added key:
|
||||
|
||||
gpg /usr/local/share/keyrings/ripple-key.gpg
|
||||
|
||||
@@ -51,37 +48,34 @@ The default [prefix][1] is typically `/usr/local` on Linux and macOS and
|
||||
uid TechOps Team at Ripple <techops+rippled@ripple.com>
|
||||
sub rsa3072 2019-02-14 [E] [expires: 2026-02-17]
|
||||
|
||||
|
||||
In particular, make sure that the fingerprint matches. (In the above example, the fingerprint is on the third line, starting with `C001`.)
|
||||
|
||||
4. Add the appropriate Ripple repository for your operating system version:
|
||||
5. Add the appropriate Ripple repository for your operating system version:
|
||||
|
||||
echo "deb [signed-by=/usr/local/share/keyrings/ripple-key.gpg] https://repos.ripple.com/repos/rippled-deb focal stable" | \
|
||||
sudo tee -a /etc/apt/sources.list.d/ripple.list
|
||||
|
||||
The above example is appropriate for **Ubuntu 20.04 Focal Fossa**. For other operating systems, replace the word `focal` with one of the following:
|
||||
|
||||
- `jammy` for **Ubuntu 22.04 Jammy Jellyfish**
|
||||
- `bionic` for **Ubuntu 18.04 Bionic Beaver**
|
||||
- `bullseye` for **Debian 11 Bullseye**
|
||||
- `buster` for **Debian 10 Buster**
|
||||
|
||||
If you want access to development or pre-release versions of `rippled`, use one of the following instead of `stable`:
|
||||
|
||||
- `unstable` - Pre-release builds ([`release` branch](https://github.com/ripple/rippled/tree/release))
|
||||
- `nightly` - Experimental/development builds ([`develop` branch](https://github.com/ripple/rippled/tree/develop))
|
||||
|
||||
**Warning:** Unstable and nightly builds may be broken at any time. Do not use these builds for production servers.
|
||||
|
||||
5. Fetch the Ripple repository.
|
||||
6. Fetch the Ripple repository.
|
||||
|
||||
sudo apt -y update
|
||||
|
||||
6. Install the `rippled` software package:
|
||||
7. Install the `rippled` software package:
|
||||
|
||||
sudo apt -y install rippled
|
||||
|
||||
7. Check the status of the `rippled` service:
|
||||
8. Check the status of the `rippled` service:
|
||||
|
||||
systemctl status rippled.service
|
||||
|
||||
@@ -89,24 +83,22 @@ The default [prefix][1] is typically `/usr/local` on Linux and macOS and
|
||||
|
||||
sudo systemctl start rippled.service
|
||||
|
||||
8. Optional: allow `rippled` to bind to privileged ports.
|
||||
9. Optional: allow `rippled` to bind to privileged ports.
|
||||
|
||||
This allows you to serve incoming API requests on port 80 or 443. (If you want to do so, you must also update the config file's port settings.)
|
||||
|
||||
sudo setcap 'cap_net_bind_service=+ep' /opt/ripple/bin/rippled
|
||||
|
||||
|
||||
## With the YUM package manager
|
||||
|
||||
1. Install the Ripple RPM repository:
|
||||
1. Install the Ripple RPM repository:
|
||||
|
||||
Choose the appropriate RPM repository for the stability of releases you want:
|
||||
|
||||
- `stable` for the latest production release (`master` branch)
|
||||
- `unstable` for pre-release builds (`release` branch)
|
||||
- `nightly` for experimental/development builds (`develop` branch)
|
||||
|
||||
*Stable*
|
||||
_Stable_
|
||||
|
||||
cat << REPOFILE | sudo tee /etc/yum.repos.d/ripple.repo
|
||||
[ripple-stable]
|
||||
@@ -118,7 +110,7 @@ The default [prefix][1] is typically `/usr/local` on Linux and macOS and
|
||||
gpgkey=https://repos.ripple.com/repos/rippled-rpm/stable/repodata/repomd.xml.key
|
||||
REPOFILE
|
||||
|
||||
*Unstable*
|
||||
_Unstable_
|
||||
|
||||
cat << REPOFILE | sudo tee /etc/yum.repos.d/ripple.repo
|
||||
[ripple-unstable]
|
||||
@@ -130,7 +122,7 @@ The default [prefix][1] is typically `/usr/local` on Linux and macOS and
|
||||
gpgkey=https://repos.ripple.com/repos/rippled-rpm/unstable/repodata/repomd.xml.key
|
||||
REPOFILE
|
||||
|
||||
*Nightly*
|
||||
_Nightly_
|
||||
|
||||
cat << REPOFILE | sudo tee /etc/yum.repos.d/ripple.repo
|
||||
[ripple-nightly]
|
||||
@@ -142,18 +134,18 @@ The default [prefix][1] is typically `/usr/local` on Linux and macOS and
|
||||
gpgkey=https://repos.ripple.com/repos/rippled-rpm/nightly/repodata/repomd.xml.key
|
||||
REPOFILE
|
||||
|
||||
2. Fetch the latest repo updates:
|
||||
2. Fetch the latest repo updates:
|
||||
|
||||
sudo yum -y update
|
||||
|
||||
3. Install the new `rippled` package:
|
||||
3. Install the new `rippled` package:
|
||||
|
||||
sudo yum install -y rippled
|
||||
|
||||
4. Configure the `rippled` service to start on boot:
|
||||
4. Configure the `rippled` service to start on boot:
|
||||
|
||||
sudo systemctl enable rippled.service
|
||||
|
||||
5. Start the `rippled` service:
|
||||
5. Start the `rippled` service:
|
||||
|
||||
sudo systemctl start rippled.service
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
**This section is a work in progress!!**
|
||||
|
||||
Consensus is the task of reaching agreement within a distributed system in the
|
||||
presence of faulty or even malicious participants. This document outlines the
|
||||
presence of faulty or even malicious participants. This document outlines the
|
||||
[XRP Ledger Consensus Algorithm](https://arxiv.org/abs/1802.07242)
|
||||
as implemented in [rippled](https://github.com/ripple/rippled), but
|
||||
focuses on its utility as a generic consensus algorithm independent of the
|
||||
@@ -15,38 +15,38 @@ collectively trusted subnetworks.
|
||||
## Distributed Agreement
|
||||
|
||||
A challenge for distributed systems is reaching agreement on changes in shared
|
||||
state. For the Ripple network, the shared state is the current ledger--account
|
||||
information, account balances, order books and other financial data. We will
|
||||
state. For the Ripple network, the shared state is the current ledger--account
|
||||
information, account balances, order books and other financial data. We will
|
||||
refer to shared distributed state as a /ledger/ throughout the remainder of this
|
||||
document.
|
||||
|
||||

|
||||
|
||||
As shown above, new ledgers are made by applying a set of transactions to the
|
||||
prior ledger. For the Ripple network, transactions include payments,
|
||||
prior ledger. For the Ripple network, transactions include payments,
|
||||
modification of account settings, updates to offers and more.
|
||||
|
||||
In a centralized system, generating the next ledger is trivial since there is a
|
||||
single unique arbiter of which transactions to include and how to apply them to
|
||||
a ledger. For decentralized systems, participants must resolve disagreements on
|
||||
a ledger. For decentralized systems, participants must resolve disagreements on
|
||||
the set of transactions to include, the order to apply those transactions, and
|
||||
even the resulting ledger after applying the transactions. This is even more
|
||||
even the resulting ledger after applying the transactions. This is even more
|
||||
difficult when some participants are faulty or malicious.
|
||||
|
||||
The Ripple network is a decentralized and **trust-full** network. Anyone is free
|
||||
The Ripple network is a decentralized and **trust-full** network. Anyone is free
|
||||
to join and participants are free to choose a subset of peers that are
|
||||
collectively trusted to not collude in an attempt to defraud the participant.
|
||||
Leveraging this network of trust, the Ripple algorithm has two main components.
|
||||
|
||||
* *Consensus* in which network participants agree on the transactions to apply
|
||||
- _Consensus_ in which network participants agree on the transactions to apply
|
||||
to a prior ledger, based on the positions of their chosen peers.
|
||||
* *Validation* in which network participants agree on what ledger was
|
||||
- _Validation_ in which network participants agree on what ledger was
|
||||
generated, based on the ledgers generated by chosen peers.
|
||||
|
||||
These phases are continually repeated to process transactions submitted to the
|
||||
network, generating successive ledgers and giving rise to the blockchain ledger
|
||||
history depicted below. In this diagram, time is flowing to the right, but
|
||||
links between ledgers point backward to the parent. Also note the alternate
|
||||
history depicted below. In this diagram, time is flowing to the right, but
|
||||
links between ledgers point backward to the parent. Also note the alternate
|
||||
Ledger 2 that was generated by some participants, but which failed validation
|
||||
and was abandoned.
|
||||
|
||||
@@ -54,7 +54,7 @@ and was abandoned.
|
||||
|
||||
The remainder of this section describes the Consensus and Validation algorithms
|
||||
in more detail and is meant as a companion guide to understanding the generic
|
||||
implementation in `rippled`. The document **does not** discuss correctness,
|
||||
implementation in `rippled`. The document **does not** discuss correctness,
|
||||
fault-tolerance or liveness properties of the algorithms or the full details of
|
||||
how they integrate within `rippled` to support the Ripple Consensus Ledger.
|
||||
|
||||
@@ -62,76 +62,76 @@ how they integrate within `rippled` to support the Ripple Consensus Ledger.
|
||||
|
||||
### Definitions
|
||||
|
||||
* The *ledger* is the shared distributed state. Each ledger has a unique ID to
|
||||
distinguish it from all other ledgers. During consensus, the *previous*,
|
||||
*prior* or *last-closed* ledger is the most recent ledger seen by consensus
|
||||
- The _ledger_ is the shared distributed state. Each ledger has a unique ID to
|
||||
distinguish it from all other ledgers. During consensus, the _previous_,
|
||||
_prior_ or _last-closed_ ledger is the most recent ledger seen by consensus
|
||||
and is the basis upon which it will build the next ledger.
|
||||
* A *transaction* is an instruction for an atomic change in the ledger state. A
|
||||
- A _transaction_ is an instruction for an atomic change in the ledger state. A
|
||||
unique ID distinguishes a transaction from other transactions.
|
||||
* A *transaction set* is a set of transactions under consideration by consensus.
|
||||
The goal of consensus is to reach agreement on this set. The generic
|
||||
- A _transaction set_ is a set of transactions under consideration by consensus.
|
||||
The goal of consensus is to reach agreement on this set. The generic
|
||||
consensus algorithm does not rely on an ordering of transactions within the
|
||||
set, nor does it specify how to apply a transaction set to a ledger to
|
||||
generate a new ledger. A unique ID distinguishes a set of transactions from
|
||||
generate a new ledger. A unique ID distinguishes a set of transactions from
|
||||
all other sets of transactions.
|
||||
* A *node* is one of the distributed actors running the consensus algorithm. It
|
||||
- A _node_ is one of the distributed actors running the consensus algorithm. It
|
||||
has a unique ID to distinguish it from all other nodes.
|
||||
* A *peer* of a node is another node that it has chosen to follow and which it
|
||||
believes will not collude with other chosen peers. The choice of peers is not
|
||||
- A _peer_ of a node is another node that it has chosen to follow and which it
|
||||
believes will not collude with other chosen peers. The choice of peers is not
|
||||
symmetric, since participants can decide on their chosen sets independently.
|
||||
* A /position/ is the current belief of the next ledger's transaction set and
|
||||
- A /position/ is the current belief of the next ledger's transaction set and
|
||||
close time. Position can refer to the node's own position or the position of a
|
||||
peer.
|
||||
* A *proposal* is one of a sequence of positions a node shares during consensus.
|
||||
- A _proposal_ is one of a sequence of positions a node shares during consensus.
|
||||
An initial proposal contains the starting position taken by a node before it
|
||||
considers any peer positions. If a node subsequently updates its position in
|
||||
response to its peers, it will issue an updated proposal. A proposal is
|
||||
considers any peer positions. If a node subsequently updates its position in
|
||||
response to its peers, it will issue an updated proposal. A proposal is
|
||||
uniquely identified by the ID of the proposing node, the ID of the position
|
||||
taken, the ID of the prior ledger the proposal is for, and the sequence number
|
||||
of the proposal.
|
||||
* A *dispute* is a transaction that is either not part of a node's position or
|
||||
- A _dispute_ is a transaction that is either not part of a node's position or
|
||||
not in a peer's position. During consensus, the node will add or remove
|
||||
disputed transactions from its position based on that transaction's support
|
||||
amongst its peers.
|
||||
|
||||
Note that most types have an ID as a lightweight identifier of instances of that
|
||||
type. Consensus often operates on the IDs directly since the underlying type is
|
||||
potentially expensive to share over the network. For example, proposal's only
|
||||
contain the ID of the position of a peer. Since many peers likely have the same
|
||||
type. Consensus often operates on the IDs directly since the underlying type is
|
||||
potentially expensive to share over the network. For example, proposal's only
|
||||
contain the ID of the position of a peer. Since many peers likely have the same
|
||||
position, this reduces the need to send the full transaction set multiple times.
|
||||
Instead, a node can request the transaction set from the network if necessary.
|
||||
|
||||
### Overview
|
||||
### Overview
|
||||
|
||||

|
||||
|
||||
The diagram above is an overview of the consensus process from the perspective
|
||||
of a single participant. Recall that during a single consensus round, a node is
|
||||
of a single participant. Recall that during a single consensus round, a node is
|
||||
trying to agree with its peers on which transactions to apply to its prior
|
||||
ledger when generating the next ledger. It also attempts to agree on the
|
||||
[network time when the ledger closed](#effective_close_time). There are
|
||||
ledger when generating the next ledger. It also attempts to agree on the
|
||||
[network time when the ledger closed](#effective_close_time). There are
|
||||
3 main phases to a consensus round:
|
||||
|
||||
* A call to `startRound` places the node in the `Open` phase. In this phase,
|
||||
the node is waiting for transactions to include in its open ledger.
|
||||
* At some point, the node will `Close` the open ledger and transition to the
|
||||
`Establish` phase. In this phase, the node shares/receives peer proposals on
|
||||
which transactions should be accepted in the closed ledger.
|
||||
* At some point, the node determines it has reached consensus with its peers on
|
||||
which transactions to include. It transitions to the `Accept` phase. In this
|
||||
phase, the node works on applying the transactions to the prior ledger to
|
||||
generate a new closed ledger. Once the new ledger is completed, the node shares
|
||||
the validated ledger hash with the network and makes a call to `startRound` to
|
||||
start the cycle again for the next ledger.
|
||||
- A call to `startRound` places the node in the `Open` phase. In this phase,
|
||||
the node is waiting for transactions to include in its open ledger.
|
||||
- At some point, the node will `Close` the open ledger and transition to the
|
||||
`Establish` phase. In this phase, the node shares/receives peer proposals on
|
||||
which transactions should be accepted in the closed ledger.
|
||||
- At some point, the node determines it has reached consensus with its peers on
|
||||
which transactions to include. It transitions to the `Accept` phase. In this
|
||||
phase, the node works on applying the transactions to the prior ledger to
|
||||
generate a new closed ledger. Once the new ledger is completed, the node shares
|
||||
the validated ledger hash with the network and makes a call to `startRound` to
|
||||
start the cycle again for the next ledger.
|
||||
|
||||
Throughout, a heartbeat timer calls `timerEntry` at a regular frequency to drive
|
||||
the process forward. Although the `startRound` call occurs at arbitrary times
|
||||
based on when the initial round began and the time it takes to apply
|
||||
transactions, the transitions from `Open` to `Establish` and `Establish` to
|
||||
`Accept` only occur during calls to `timerEntry`. Similarly, transactions can
|
||||
`Accept` only occur during calls to `timerEntry`. Similarly, transactions can
|
||||
arrive at arbitrary times, independent of the heartbeat timer. Transactions
|
||||
received after the `Open` to `Close` transition and not part of peer proposals
|
||||
won't be considered until the next consensus round. They are represented above
|
||||
won't be considered until the next consensus round. They are represented above
|
||||
by the light green triangles.
|
||||
|
||||
Peer proposals are issued by a node during a `timerEntry` call, but since peers
|
||||
@@ -139,16 +139,16 @@ do not synchronize `timerEntry` calls, they are received by other peers at
|
||||
arbitrary times. Peer proposals are only considered if received prior to the
|
||||
`Establish` to `Accept` transition, and only if the peer is working on the same
|
||||
prior ledger. Peer proposals received after consensus is reached will not be
|
||||
meaningful and are represented above by the circle with the X in it. Only
|
||||
meaningful and are represented above by the circle with the X in it. Only
|
||||
proposals from chosen peers are considered.
|
||||
|
||||
### Effective Close Time ### {#effective_close_time}
|
||||
|
||||
### Effective Close Time ### {#effective_close_time}
|
||||
|
||||
In addition to agreeing on a transaction set, each consensus round tries to
|
||||
agree on the time the ledger closed. Each node calculates its own close time
|
||||
when it closes the open ledger. This exact close time is rounded to the nearest
|
||||
multiple of the current *effective close time resolution*. It is this
|
||||
*effective close time* that nodes seek to agree on. This allows servers to
|
||||
agree on the time the ledger closed. Each node calculates its own close time
|
||||
when it closes the open ledger. This exact close time is rounded to the nearest
|
||||
multiple of the current _effective close time resolution_. It is this
|
||||
_effective close time_ that nodes seek to agree on. This allows servers to
|
||||
derive a common time for a ledger without the need for perfectly synchronized
|
||||
clocks. As depicted below, the 3 pink arrows represent exact close times from 3
|
||||
consensus nodes that round to the same effective close time given the current
|
||||
@@ -158,9 +158,9 @@ different effective close time given the current resolution.
|
||||

|
||||
|
||||
The effective close time is part of the node's position and is shared with peers
|
||||
in its proposals. Just like the position on the consensus transaction set, a
|
||||
in its proposals. Just like the position on the consensus transaction set, a
|
||||
node will update its close time position in response to its peers' effective
|
||||
close time positions. Peers can agree to disagree on the close time, in which
|
||||
close time positions. Peers can agree to disagree on the close time, in which
|
||||
case the effective close time is taken as 1 second past the prior close.
|
||||
|
||||
The close time resolution is itself dynamic, decreasing (coarser) resolution in
|
||||
@@ -173,12 +173,12 @@ reach close time consensus.
|
||||
Internally, a node operates under one of the following consensus modes. Either
|
||||
of the first two modes may be chosen when a consensus round starts.
|
||||
|
||||
* *Proposing* indicates the node is a full-fledged consensus participant. It
|
||||
- _Proposing_ indicates the node is a full-fledged consensus participant. It
|
||||
takes on positions and sends proposals to its peers.
|
||||
* *Observing* indicates the node is a passive consensus participant. It
|
||||
- _Observing_ indicates the node is a passive consensus participant. It
|
||||
maintains a position internally, but does not propose that position to its
|
||||
peers. Instead, it receives peer proposals and updates its position
|
||||
to track the majority of its peers. This may be preferred if the node is only
|
||||
to track the majority of its peers. This may be preferred if the node is only
|
||||
being used to track the state of the network or during a start-up phase while
|
||||
it is still synchronizing with the network.
|
||||
|
||||
@@ -186,14 +186,14 @@ The other two modes are set internally during the consensus round when the node
|
||||
believes it is no longer working on the dominant ledger chain based on peer
|
||||
validations. It checks this on every call to `timerEntry`.
|
||||
|
||||
* *Wrong Ledger* indicates the node is not working on the correct prior ledger
|
||||
and does not have it available. It requests that ledger from the network, but
|
||||
continues to work towards consensus this round while waiting. If it had been
|
||||
*proposing*, it will send a special "bowout" proposal to its peers to indicate
|
||||
- _Wrong Ledger_ indicates the node is not working on the correct prior ledger
|
||||
and does not have it available. It requests that ledger from the network, but
|
||||
continues to work towards consensus this round while waiting. If it had been
|
||||
_proposing_, it will send a special "bowout" proposal to its peers to indicate
|
||||
its change in mode for the rest of this round. For the duration of the round,
|
||||
it defers to peer positions for determining the consensus outcome as if it
|
||||
were just *observing*.
|
||||
* *Switch Ledger* indicates that the node has acquired the correct prior ledger
|
||||
were just _observing_.
|
||||
- _Switch Ledger_ indicates that the node has acquired the correct prior ledger
|
||||
from the network. Although it now has the correct prior ledger, the fact that
|
||||
it had the wrong one at some point during this round means it is likely behind
|
||||
and should defer to peer positions for determining the consensus outcome.
|
||||
@@ -201,7 +201,7 @@ validations. It checks this on every call to `timerEntry`.
|
||||

|
||||
|
||||
Once either wrong ledger or switch ledger are reached, the node cannot
|
||||
return to proposing or observing until the next consensus round. However,
|
||||
return to proposing or observing until the next consensus round. However,
|
||||
the node could change its view of the correct prior ledger, so going from
|
||||
switch ledger to wrong ledger and back again is possible.
|
||||
|
||||
@@ -215,16 +215,16 @@ decide how best to generate the next ledger once it declares consensus.
|
||||
### Phases
|
||||
|
||||
As depicted in the overview diagram, consensus is best viewed as a progression
|
||||
through 3 phases. There are 4 public methods of the generic consensus algorithm
|
||||
through 3 phases. There are 4 public methods of the generic consensus algorithm
|
||||
that determine this progression
|
||||
|
||||
* `startRound` begins a consensus round.
|
||||
* `timerEntry` is called at a regular frequency (`LEDGER_MIN_CLOSE`) and is the
|
||||
only call to consensus that can change the phase from `Open` to `Establish`
|
||||
- `startRound` begins a consensus round.
|
||||
- `timerEntry` is called at a regular frequency (`LEDGER_MIN_CLOSE`) and is the
|
||||
only call to consensus that can change the phase from `Open` to `Establish`
|
||||
or `Accept`.
|
||||
* `peerProposal` is called whenever a peer proposal is received and is what
|
||||
- `peerProposal` is called whenever a peer proposal is received and is what
|
||||
allows a node to update its position in a subsequent `timerEntry` call.
|
||||
* `gotTxSet` is called when a transaction set is received from the network. This
|
||||
- `gotTxSet` is called when a transaction set is received from the network. This
|
||||
is typically in response to a prior request from the node to acquire the
|
||||
transaction set corresponding to a disagreeing peer's position.
|
||||
|
||||
@@ -234,13 +234,13 @@ actions are taken in response to these calls.
|
||||
#### Open
|
||||
|
||||
The `Open` phase is a quiescent period to allow transactions to build up in the
|
||||
node's open ledger. The duration is a trade-off between latency and throughput.
|
||||
node's open ledger. The duration is a trade-off between latency and throughput.
|
||||
A shorter window reduces the latency to generating the next ledger, but also
|
||||
reduces transaction throughput due to fewer transactions accepted into the
|
||||
ledger.
|
||||
|
||||
A call to `startRound` would forcibly begin the next consensus round, skipping
|
||||
completion of the current round. This is not expected during normal operation.
|
||||
completion of the current round. This is not expected during normal operation.
|
||||
Calls to `peerProposal` or `gotTxSet` simply store the proposal or transaction
|
||||
set for use in the coming `Establish` phase.
|
||||
|
||||
@@ -254,28 +254,27 @@ the ledger.
|
||||
Under normal circumstances, the open ledger period ends when one of the following
|
||||
is true
|
||||
|
||||
* if there are transactions in the open ledger and more than `LEDGER_MIN_CLOSE`
|
||||
have elapsed. This is the typical behavior.
|
||||
* if there are no open transactions and a suitably longer idle interval has
|
||||
elapsed. This increases the opportunity to get some transaction into
|
||||
- if there are transactions in the open ledger and more than `LEDGER_MIN_CLOSE`
|
||||
have elapsed. This is the typical behavior.
|
||||
- if there are no open transactions and a suitably longer idle interval has
|
||||
elapsed. This increases the opportunity to get some transaction into
|
||||
the next ledger and avoids doing useless work closing an empty ledger.
|
||||
* if more than half the number of prior round peers have already closed or finished
|
||||
- if more than half the number of prior round peers have already closed or finished
|
||||
this round. This indicates the node is falling behind and needs to catch up.
|
||||
|
||||
|
||||
When closing the ledger, the node takes its initial position based on the
|
||||
transactions in the open ledger and uses the current time as
|
||||
its initial close time estimate. If in the proposing mode, the node shares its
|
||||
initial position with peers. Now that the node has taken a position, it will
|
||||
consider any peer positions for this round that arrived earlier. The node
|
||||
its initial close time estimate. If in the proposing mode, the node shares its
|
||||
initial position with peers. Now that the node has taken a position, it will
|
||||
consider any peer positions for this round that arrived earlier. The node
|
||||
generates disputed transactions for each transaction not in common with a peer's
|
||||
position. The node also records the vote of each peer for each disputed
|
||||
position. The node also records the vote of each peer for each disputed
|
||||
transaction.
|
||||
|
||||
In the example below, we suppose our node has closed with transactions 1,2 and 3. It creates disputes
|
||||
In the example below, we suppose our node has closed with transactions 1,2 and 3. It creates disputes
|
||||
for transactions 2,3 and 4, since at least one peer position differs on each.
|
||||
|
||||
##### disputes ##### {#disputes_image}
|
||||
##### disputes ##### {#disputes_image}
|
||||
|
||||

|
||||
|
||||
@@ -286,22 +285,22 @@ exchanges proposals with peers in an attempt to reach agreement on the consensus
|
||||
transactions and effective close time.
|
||||
|
||||
A call to `startRound` would forcibly begin the next consensus round, skipping
|
||||
completion of the current round. This is not expected during normal operation.
|
||||
completion of the current round. This is not expected during normal operation.
|
||||
Calls to `peerProposal` or `gotTxSet` that reflect new positions will generate
|
||||
disputed transactions for any new disagreements and will update the peer's vote
|
||||
for all disputed transactions.
|
||||
|
||||
A call to `timerEntry` first checks that the node is working from the correct
|
||||
prior ledger. If not, the node will update the mode and request the correct
|
||||
ledger. Otherwise, the node updates the node's position and considers whether
|
||||
to switch to the `Accepted` phase and declare consensus reached. However, at
|
||||
least `LEDGER_MIN_CONSENSUS` time must have elapsed before doing either. This
|
||||
prior ledger. If not, the node will update the mode and request the correct
|
||||
ledger. Otherwise, the node updates the node's position and considers whether
|
||||
to switch to the `Accepted` phase and declare consensus reached. However, at
|
||||
least `LEDGER_MIN_CONSENSUS` time must have elapsed before doing either. This
|
||||
allows peers an opportunity to take an initial position and share it.
|
||||
|
||||
##### Update Position
|
||||
|
||||
In order to achieve consensus, the node is looking for a transaction set that is
|
||||
supported by a super-majority of peers. The node works towards this set by
|
||||
supported by a super-majority of peers. The node works towards this set by
|
||||
adding or removing disputed transactions from its position based on an
|
||||
increasing threshold for inclusion.
|
||||
|
||||
@@ -310,23 +309,23 @@ increasing threshold for inclusion.
|
||||
By starting with a lower threshold, a node initially allows a wide set of
|
||||
transactions into its position. If the establish round continues and the node is
|
||||
"stuck", a higher threshold can focus on accepting transactions with the most
|
||||
support. The constants that define the thresholds and durations at which the
|
||||
support. The constants that define the thresholds and durations at which the
|
||||
thresholds change are given by `AV_XXX_CONSENSUS_PCT` and
|
||||
`AV_XXX_CONSENSUS_TIME` respectively, where `XXX` is `INIT`,`MID`,`LATE` and
|
||||
`STUCK`. The effective close time position is updated using the same
|
||||
`STUCK`. The effective close time position is updated using the same
|
||||
thresholds.
|
||||
|
||||
Given the [example disputes above](#disputes_image) and an initial threshold
|
||||
of 50%, our node would retain its position since transaction 1 was not in
|
||||
dispute and transactions 2 and 3 have 75% support. Since its position did not
|
||||
change, it would not need to send a new proposal to peers. Peer C would not
|
||||
dispute and transactions 2 and 3 have 75% support. Since its position did not
|
||||
change, it would not need to send a new proposal to peers. Peer C would not
|
||||
change either. Peer A would add transaction 3 to its position and Peer B would
|
||||
remove transaction 4 from its position; both would then send an updated
|
||||
position.
|
||||
|
||||
Conversely, if the diagram reflected a later call to =timerEntry= that occurs in
|
||||
the stuck region with a threshold of say 95%, our node would remove transactions
|
||||
2 and 3 from its candidate set and send an updated position. Likewise, all the
|
||||
2 and 3 from its candidate set and send an updated position. Likewise, all the
|
||||
other peers would end up with only transaction 1 in their position.
|
||||
|
||||
Lastly, if our node were not in the proposing mode, it would not include its own
|
||||
@@ -336,7 +335,7 @@ our node would maintain its position of transactions 1, 2 and 3.
|
||||
##### Checking Consensus
|
||||
|
||||
After updating its position, the node checks for supermajority agreement with
|
||||
its peers on its current position. This agreement is of the exact transaction
|
||||
its peers on its current position. This agreement is of the exact transaction
|
||||
set, not just the support of individual transactions. That is, if our position
|
||||
is a subset of a peer's position, that counts as a disagreement. Also recall
|
||||
that effective close time agreement allows a supermajority of participants
|
||||
@@ -344,10 +343,10 @@ agreeing to disagree.
|
||||
|
||||
Consensus is declared when the following 3 clauses are true:
|
||||
|
||||
* `LEDGER_MIN_CONSENSUS` time has elapsed in the establish phase
|
||||
* At least 75% of the prior round proposers have proposed OR this establish
|
||||
- `LEDGER_MIN_CONSENSUS` time has elapsed in the establish phase
|
||||
- At least 75% of the prior round proposers have proposed OR this establish
|
||||
phase is `LEDGER_MIN_CONSENSUS` longer than the last round's establish phase
|
||||
* `minimumConsensusPercentage` of ourself and our peers share the same position
|
||||
- `minimumConsensusPercentage` of ourself and our peers share the same position
|
||||
|
||||
The middle condition ensures slower peers have a chance to share positions, but
|
||||
prevents waiting too long on peers that have disconnected. Additionally, a node
|
||||
@@ -364,22 +363,22 @@ logic.
|
||||
Once consensus is reached (or moved on), the node switches to the `Accept` phase
|
||||
and signals to the implementing code that the round is complete. That code is
|
||||
responsible for using the consensus transaction set to generate the next ledger
|
||||
and calling `startRound` to begin the next round. The implementation has total
|
||||
and calling `startRound` to begin the next round. The implementation has total
|
||||
freedom on ordering transactions, deciding what to do if consensus moved on,
|
||||
determining whether to retry or abandon local transactions that did not make the
|
||||
consensus set and updating any internal state based on the consensus progress.
|
||||
|
||||
#### Accept
|
||||
|
||||
The `Accept` phase is the terminal phase of the consensus algorithm. Calls to
|
||||
The `Accept` phase is the terminal phase of the consensus algorithm. Calls to
|
||||
`timerEntry`, `peerProposal` and `gotTxSet` will not change the internal
|
||||
consensus state while in the accept phase. The expectation is that the
|
||||
consensus state while in the accept phase. The expectation is that the
|
||||
application specific code is working to generate the new ledger based on the
|
||||
consensus outcome. Once complete, that code should make a call to `startRound`
|
||||
to kick off the next consensus round. The `startRound` call includes the new
|
||||
prior ledger, prior ledger ID and whether the round should begin in the
|
||||
proposing or observing mode. After setting some initial state, the phase
|
||||
transitions to `Open`. The node will also check if the provided prior ledger
|
||||
proposing or observing mode. After setting some initial state, the phase
|
||||
transitions to `Open`. The node will also check if the provided prior ledger
|
||||
and ID are correct, updating the mode and requesting the proper ledger from the
|
||||
network if necessary.
|
||||
|
||||
@@ -448,9 +447,9 @@ struct TxSet
|
||||
### Ledger
|
||||
|
||||
The `Ledger` type represents the state shared amongst the
|
||||
distributed participants. Notice that the details of how the next ledger is
|
||||
distributed participants. Notice that the details of how the next ledger is
|
||||
generated from the prior ledger and the consensus accepted transaction set is
|
||||
not part of the interface. Within the generic code, this type is primarily used
|
||||
not part of the interface. Within the generic code, this type is primarily used
|
||||
to know that peers are working on the same tip of the ledger chain and to
|
||||
provide some basic timing data for consensus.
|
||||
|
||||
@@ -626,7 +625,7 @@ struct Adaptor
|
||||
|
||||
// Called when consensus operating mode changes
|
||||
void onModeChange(ConsensuMode before, ConsensusMode after);
|
||||
|
||||
|
||||
// Called when ledger closes. Implementation should generate an initial Result
|
||||
// with position based on the current open ledger's transactions.
|
||||
ConsensusResult onClose(Ledger const &, Ledger const & prev, ConsensusMode mode);
|
||||
@@ -657,27 +656,24 @@ struct Adaptor
|
||||
The implementing class hides many details of the peer communication
|
||||
model from the generic code.
|
||||
|
||||
* The `share` member functions are responsible for sharing the given type with a
|
||||
- The `share` member functions are responsible for sharing the given type with a
|
||||
node's peers, but are agnostic to the mechanism. Ideally, messages are delivered
|
||||
faster than `LEDGER_GRANULARITY`.
|
||||
* The generic code does not specify how transactions are submitted by clients,
|
||||
faster than `LEDGER_GRANULARITY`.
|
||||
- The generic code does not specify how transactions are submitted by clients,
|
||||
propagated through the network or stored in the open ledger. Indeed, the open
|
||||
ledger is only conceptual from the perspective of the generic code---the
|
||||
initial position and transaction set are opaquely generated in a
|
||||
`Consensus::Result` instance returned from the `onClose` callback.
|
||||
* The calls to `acquireLedger` and `acquireTxSet` only have non-trivial return
|
||||
if the ledger or transaction set of interest is available. The implementing
|
||||
- The calls to `acquireLedger` and `acquireTxSet` only have non-trivial return
|
||||
if the ledger or transaction set of interest is available. The implementing
|
||||
class is free to block while acquiring, or return the empty option while
|
||||
servicing the request asynchronously. Due to legacy reasons, the two calls
|
||||
servicing the request asynchronously. Due to legacy reasons, the two calls
|
||||
are not symmetric. `acquireTxSet` requires the host application to call
|
||||
`gotTxSet` when an asynchronous `acquire` completes. Conversely,
|
||||
`acquireLedger` will be called again later by the consensus code if it still
|
||||
desires the ledger with the hope that the asynchronous acquisition is
|
||||
complete.
|
||||
|
||||
|
||||
## Validation
|
||||
|
||||
Coming Soon!
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
---
|
||||
DisableFormat: true
|
||||
SortIncludes: false
|
||||
SortIncludes: Never
|
||||
17
external/README.md
vendored
17
external/README.md
vendored
@@ -1,14 +1,9 @@
|
||||
# External Conan recipes
|
||||
|
||||
The subdirectories in this directory contain either copies or Conan recipes
|
||||
of external libraries used by rippled.
|
||||
The Conan recipes include patches we have not yet pushed upstream.
|
||||
The subdirectories in this directory contain external libraries used by rippled.
|
||||
|
||||
| Folder | Upstream | Description |
|
||||
|:----------------|:---------------------------------------------|:------------|
|
||||
| `antithesis-sdk`| [Project](https://github.com/antithesishq/antithesis-sdk-cpp/) | [Antithesis](https://antithesis.com/docs/using_antithesis/sdk/cpp/overview.html) SDK for C++ |
|
||||
| `ed25519-donna` | [Project](https://github.com/floodyberry/ed25519-donna) | [Ed25519](http://ed25519.cr.yp.to/) digital signatures |
|
||||
| `rocksdb` | [Recipe](https://github.com/conan-io/conan-center-index/tree/master/recipes/rocksdb) | Fast key/value database. (Supports rotational disks better than NuDB.) |
|
||||
| `secp256k1` | [Project](https://github.com/bitcoin-core/secp256k1) | ECDSA digital signatures using the **secp256k1** curve |
|
||||
| `snappy` | [Recipe](https://github.com/conan-io/conan-center-index/tree/master/recipes/snappy) | "Snappy" lossless compression algorithm. |
|
||||
| `soci` | [Recipe](https://github.com/conan-io/conan-center-index/tree/master/recipes/soci) | Abstraction layer for database access. |
|
||||
| Folder | Upstream | Description |
|
||||
| :--------------- | :------------------------------------------------------------- | :------------------------------------------------------------------------------------------- |
|
||||
| `antithesis-sdk` | [Project](https://github.com/antithesishq/antithesis-sdk-cpp/) | [Antithesis](https://antithesis.com/docs/using_antithesis/sdk/cpp/overview.html) SDK for C++ |
|
||||
| `ed25519-donna` | [Project](https://github.com/floodyberry/ed25519-donna) | [Ed25519](http://ed25519.cr.yp.to/) digital signatures |
|
||||
| `secp256k1` | [Project](https://github.com/bitcoin-core/secp256k1) | ECDSA digital signatures using the **secp256k1** curve |
|
||||
|
||||
7
external/antithesis-sdk/README.md
vendored
7
external/antithesis-sdk/README.md
vendored
@@ -1,8 +1,9 @@
|
||||
# Antithesis C++ SDK
|
||||
|
||||
This library provides methods for C++ programs to configure the [Antithesis](https://antithesis.com) platform. It contains three kinds of functionality:
|
||||
* Assertion macros that allow you to define test properties about your software or workload.
|
||||
* Randomness functions for requesting both structured and unstructured randomness from the Antithesis platform.
|
||||
* Lifecycle functions that inform the Antithesis environment that particular test phases or milestones have been reached.
|
||||
|
||||
- Assertion macros that allow you to define test properties about your software or workload.
|
||||
- Randomness functions for requesting both structured and unstructured randomness from the Antithesis platform.
|
||||
- Lifecycle functions that inform the Antithesis environment that particular test phases or milestones have been reached.
|
||||
|
||||
For general usage guidance see the [Antithesis C++ SDK Documentation](https://antithesis.com/docs/using_antithesis/sdk/cpp/overview/)
|
||||
|
||||
3
external/ed25519-donna/CMakeLists.txt
vendored
3
external/ed25519-donna/CMakeLists.txt
vendored
@@ -17,6 +17,9 @@ add_library(ed25519 STATIC
|
||||
)
|
||||
add_library(ed25519::ed25519 ALIAS ed25519)
|
||||
target_link_libraries(ed25519 PUBLIC OpenSSL::SSL)
|
||||
if(NOT MSVC)
|
||||
target_compile_options(ed25519 PRIVATE -Wno-implicit-fallthrough)
|
||||
endif()
|
||||
|
||||
include(GNUInstallDirs)
|
||||
|
||||
|
||||
10
external/nudb/conandata.yml
vendored
10
external/nudb/conandata.yml
vendored
@@ -1,10 +0,0 @@
|
||||
sources:
|
||||
"2.0.8":
|
||||
url: "https://github.com/CPPAlliance/NuDB/archive/2.0.8.tar.gz"
|
||||
sha256: "9b71903d8ba111cd893ab064b9a8b6ac4124ed8bd6b4f67250205bc43c7f13a8"
|
||||
patches:
|
||||
"2.0.8":
|
||||
- patch_file: "patches/2.0.8-0001-add-include-stdexcept-for-msvc.patch"
|
||||
patch_description: "Fix build for MSVC by including stdexcept"
|
||||
patch_type: "portability"
|
||||
patch_source: "https://github.com/cppalliance/NuDB/pull/100/files"
|
||||
72
external/nudb/conanfile.py
vendored
72
external/nudb/conanfile.py
vendored
@@ -1,72 +0,0 @@
|
||||
import os
|
||||
|
||||
from conan import ConanFile
|
||||
from conan.tools.build import check_min_cppstd
|
||||
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get
|
||||
from conan.tools.layout import basic_layout
|
||||
|
||||
required_conan_version = ">=1.52.0"
|
||||
|
||||
|
||||
class NudbConan(ConanFile):
|
||||
name = "nudb"
|
||||
description = "A fast key/value insert-only database for SSD drives in C++11"
|
||||
license = "BSL-1.0"
|
||||
url = "https://github.com/conan-io/conan-center-index"
|
||||
homepage = "https://github.com/CPPAlliance/NuDB"
|
||||
topics = ("header-only", "KVS", "insert-only")
|
||||
|
||||
package_type = "header-library"
|
||||
settings = "os", "arch", "compiler", "build_type"
|
||||
no_copy_source = True
|
||||
|
||||
@property
|
||||
def _min_cppstd(self):
|
||||
return 11
|
||||
|
||||
def export_sources(self):
|
||||
export_conandata_patches(self)
|
||||
|
||||
def layout(self):
|
||||
basic_layout(self, src_folder="src")
|
||||
|
||||
def requirements(self):
|
||||
self.requires("boost/1.83.0")
|
||||
|
||||
def package_id(self):
|
||||
self.info.clear()
|
||||
|
||||
def validate(self):
|
||||
if self.settings.compiler.cppstd:
|
||||
check_min_cppstd(self, self._min_cppstd)
|
||||
|
||||
def source(self):
|
||||
get(self, **self.conan_data["sources"][self.version], strip_root=True)
|
||||
|
||||
def build(self):
|
||||
apply_conandata_patches(self)
|
||||
|
||||
def package(self):
|
||||
copy(self, "LICENSE*",
|
||||
dst=os.path.join(self.package_folder, "licenses"),
|
||||
src=self.source_folder)
|
||||
copy(self, "*",
|
||||
dst=os.path.join(self.package_folder, "include"),
|
||||
src=os.path.join(self.source_folder, "include"))
|
||||
|
||||
def package_info(self):
|
||||
self.cpp_info.bindirs = []
|
||||
self.cpp_info.libdirs = []
|
||||
|
||||
self.cpp_info.set_property("cmake_target_name", "NuDB")
|
||||
self.cpp_info.set_property("cmake_target_aliases", ["NuDB::nudb"])
|
||||
self.cpp_info.set_property("cmake_find_mode", "both")
|
||||
|
||||
self.cpp_info.components["core"].set_property("cmake_target_name", "nudb")
|
||||
self.cpp_info.components["core"].names["cmake_find_package"] = "nudb"
|
||||
self.cpp_info.components["core"].names["cmake_find_package_multi"] = "nudb"
|
||||
self.cpp_info.components["core"].requires = ["boost::thread", "boost::system"]
|
||||
|
||||
# TODO: to remove in conan v2 once cmake_find_package_* generators removed
|
||||
self.cpp_info.names["cmake_find_package"] = "NuDB"
|
||||
self.cpp_info.names["cmake_find_package_multi"] = "NuDB"
|
||||
@@ -1,24 +0,0 @@
|
||||
diff --git a/include/nudb/detail/stream.hpp b/include/nudb/detail/stream.hpp
|
||||
index 6c07bf1..e0ce8ed 100644
|
||||
--- a/include/nudb/detail/stream.hpp
|
||||
+++ b/include/nudb/detail/stream.hpp
|
||||
@@ -14,6 +14,7 @@
|
||||
#include <cstdint>
|
||||
#include <cstring>
|
||||
#include <memory>
|
||||
+#include <stdexcept>
|
||||
|
||||
namespace nudb {
|
||||
namespace detail {
|
||||
diff --git a/include/nudb/impl/context.ipp b/include/nudb/impl/context.ipp
|
||||
index beb7058..ffde0b3 100644
|
||||
--- a/include/nudb/impl/context.ipp
|
||||
+++ b/include/nudb/impl/context.ipp
|
||||
@@ -9,6 +9,7 @@
|
||||
#define NUDB_IMPL_CONTEXT_IPP
|
||||
|
||||
#include <nudb/detail/store_base.hpp>
|
||||
+#include <stdexcept>
|
||||
|
||||
namespace nudb {
|
||||
|
||||
12
external/rocksdb/conandata.yml
vendored
12
external/rocksdb/conandata.yml
vendored
@@ -1,12 +0,0 @@
|
||||
sources:
|
||||
"9.7.3":
|
||||
url: "https://github.com/facebook/rocksdb/archive/refs/tags/v9.7.3.tar.gz"
|
||||
sha256: "acfabb989cbfb5b5c4d23214819b059638193ec33dad2d88373c46448d16d38b"
|
||||
patches:
|
||||
"9.7.3":
|
||||
- patch_file: "patches/9.x.x-0001-exclude-thirdparty.patch"
|
||||
patch_description: "Do not include thirdparty.inc"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/9.7.3-0001-memory-leak.patch"
|
||||
patch_description: "Fix a leak of obsolete blob files left open until DB::Close()"
|
||||
patch_type: "portability"
|
||||
235
external/rocksdb/conanfile.py
vendored
235
external/rocksdb/conanfile.py
vendored
@@ -1,235 +0,0 @@
|
||||
import os
|
||||
import glob
|
||||
import shutil
|
||||
|
||||
from conan import ConanFile
|
||||
from conan.errors import ConanInvalidConfiguration
|
||||
from conan.tools.build import check_min_cppstd
|
||||
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
|
||||
from conan.tools.files import apply_conandata_patches, collect_libs, copy, export_conandata_patches, get, rm, rmdir
|
||||
from conan.tools.microsoft import check_min_vs, is_msvc, is_msvc_static_runtime
|
||||
from conan.tools.scm import Version
|
||||
|
||||
required_conan_version = ">=1.53.0"
|
||||
|
||||
|
||||
class RocksDBConan(ConanFile):
|
||||
name = "rocksdb"
|
||||
description = "A library that provides an embeddable, persistent key-value store for fast storage"
|
||||
license = ("GPL-2.0-only", "Apache-2.0")
|
||||
url = "https://github.com/conan-io/conan-center-index"
|
||||
homepage = "https://github.com/facebook/rocksdb"
|
||||
topics = ("database", "leveldb", "facebook", "key-value")
|
||||
package_type = "library"
|
||||
settings = "os", "arch", "compiler", "build_type"
|
||||
options = {
|
||||
"shared": [True, False],
|
||||
"fPIC": [True, False],
|
||||
"lite": [True, False],
|
||||
"with_gflags": [True, False],
|
||||
"with_snappy": [True, False],
|
||||
"with_lz4": [True, False],
|
||||
"with_zlib": [True, False],
|
||||
"with_zstd": [True, False],
|
||||
"with_tbb": [True, False],
|
||||
"with_jemalloc": [True, False],
|
||||
"enable_sse": [False, "sse42", "avx2"],
|
||||
"use_rtti": [True, False],
|
||||
}
|
||||
default_options = {
|
||||
"shared": False,
|
||||
"fPIC": True,
|
||||
"lite": False,
|
||||
"with_snappy": False,
|
||||
"with_lz4": False,
|
||||
"with_zlib": False,
|
||||
"with_zstd": False,
|
||||
"with_gflags": False,
|
||||
"with_tbb": False,
|
||||
"with_jemalloc": False,
|
||||
"enable_sse": False,
|
||||
"use_rtti": False,
|
||||
}
|
||||
|
||||
@property
|
||||
def _min_cppstd(self):
|
||||
return "11" if Version(self.version) < "8.8.1" else "17"
|
||||
|
||||
@property
|
||||
def _compilers_minimum_version(self):
|
||||
return {} if self._min_cppstd == "11" else {
|
||||
"apple-clang": "10",
|
||||
"clang": "7",
|
||||
"gcc": "7",
|
||||
"msvc": "191",
|
||||
"Visual Studio": "15",
|
||||
}
|
||||
|
||||
def export_sources(self):
|
||||
export_conandata_patches(self)
|
||||
|
||||
def config_options(self):
|
||||
if self.settings.os == "Windows":
|
||||
del self.options.fPIC
|
||||
if self.settings.arch != "x86_64":
|
||||
del self.options.with_tbb
|
||||
if self.settings.build_type == "Debug":
|
||||
self.options.use_rtti = True # Rtti are used in asserts for debug mode...
|
||||
|
||||
def configure(self):
|
||||
if self.options.shared:
|
||||
self.options.rm_safe("fPIC")
|
||||
|
||||
def layout(self):
|
||||
cmake_layout(self, src_folder="src")
|
||||
|
||||
def requirements(self):
|
||||
if self.options.with_gflags:
|
||||
self.requires("gflags/2.2.2")
|
||||
if self.options.with_snappy:
|
||||
self.requires("snappy/1.1.10")
|
||||
if self.options.with_lz4:
|
||||
self.requires("lz4/1.10.0")
|
||||
if self.options.with_zlib:
|
||||
self.requires("zlib/[>=1.2.11 <2]")
|
||||
if self.options.with_zstd:
|
||||
self.requires("zstd/1.5.6")
|
||||
if self.options.get_safe("with_tbb"):
|
||||
self.requires("onetbb/2021.12.0")
|
||||
if self.options.with_jemalloc:
|
||||
self.requires("jemalloc/5.3.0")
|
||||
|
||||
def validate(self):
|
||||
if self.settings.compiler.get_safe("cppstd"):
|
||||
check_min_cppstd(self, self._min_cppstd)
|
||||
|
||||
minimum_version = self._compilers_minimum_version.get(str(self.settings.compiler), False)
|
||||
if minimum_version and Version(self.settings.compiler.version) < minimum_version:
|
||||
raise ConanInvalidConfiguration(
|
||||
f"{self.ref} requires C++{self._min_cppstd}, which your compiler does not support."
|
||||
)
|
||||
|
||||
if self.settings.arch not in ["x86_64", "ppc64le", "ppc64", "mips64", "armv8"]:
|
||||
raise ConanInvalidConfiguration("Rocksdb requires 64 bits")
|
||||
|
||||
check_min_vs(self, "191")
|
||||
|
||||
if self.version == "6.20.3" and \
|
||||
self.settings.os == "Linux" and \
|
||||
self.settings.compiler == "gcc" and \
|
||||
Version(self.settings.compiler.version) < "5":
|
||||
raise ConanInvalidConfiguration("Rocksdb 6.20.3 is not compilable with gcc <5.") # See https://github.com/facebook/rocksdb/issues/3522
|
||||
|
||||
def source(self):
|
||||
get(self, **self.conan_data["sources"][self.version], strip_root=True)
|
||||
|
||||
def generate(self):
|
||||
tc = CMakeToolchain(self)
|
||||
tc.variables["FAIL_ON_WARNINGS"] = False
|
||||
tc.variables["WITH_TESTS"] = False
|
||||
tc.variables["WITH_TOOLS"] = False
|
||||
tc.variables["WITH_CORE_TOOLS"] = False
|
||||
tc.variables["WITH_BENCHMARK_TOOLS"] = False
|
||||
tc.variables["WITH_FOLLY_DISTRIBUTED_MUTEX"] = False
|
||||
if is_msvc(self):
|
||||
tc.variables["WITH_MD_LIBRARY"] = not is_msvc_static_runtime(self)
|
||||
tc.variables["ROCKSDB_INSTALL_ON_WINDOWS"] = self.settings.os == "Windows"
|
||||
tc.variables["ROCKSDB_LITE"] = self.options.lite
|
||||
tc.variables["WITH_GFLAGS"] = self.options.with_gflags
|
||||
tc.variables["WITH_SNAPPY"] = self.options.with_snappy
|
||||
tc.variables["WITH_LZ4"] = self.options.with_lz4
|
||||
tc.variables["WITH_ZLIB"] = self.options.with_zlib
|
||||
tc.variables["WITH_ZSTD"] = self.options.with_zstd
|
||||
tc.variables["WITH_TBB"] = self.options.get_safe("with_tbb", False)
|
||||
tc.variables["WITH_JEMALLOC"] = self.options.with_jemalloc
|
||||
tc.variables["ROCKSDB_BUILD_SHARED"] = self.options.shared
|
||||
tc.variables["ROCKSDB_LIBRARY_EXPORTS"] = self.settings.os == "Windows" and self.options.shared
|
||||
tc.variables["ROCKSDB_DLL" ] = self.settings.os == "Windows" and self.options.shared
|
||||
tc.variables["USE_RTTI"] = self.options.use_rtti
|
||||
if not bool(self.options.enable_sse):
|
||||
tc.variables["PORTABLE"] = True
|
||||
tc.variables["FORCE_SSE42"] = False
|
||||
elif self.options.enable_sse == "sse42":
|
||||
tc.variables["PORTABLE"] = True
|
||||
tc.variables["FORCE_SSE42"] = True
|
||||
elif self.options.enable_sse == "avx2":
|
||||
tc.variables["PORTABLE"] = False
|
||||
tc.variables["FORCE_SSE42"] = False
|
||||
# not available yet in CCI
|
||||
tc.variables["WITH_NUMA"] = False
|
||||
tc.generate()
|
||||
|
||||
deps = CMakeDeps(self)
|
||||
if self.options.with_jemalloc:
|
||||
deps.set_property("jemalloc", "cmake_file_name", "JeMalloc")
|
||||
deps.set_property("jemalloc", "cmake_target_name", "JeMalloc::JeMalloc")
|
||||
if self.options.with_zstd:
|
||||
deps.set_property("zstd", "cmake_target_name", "zstd::zstd")
|
||||
deps.generate()
|
||||
|
||||
def build(self):
|
||||
apply_conandata_patches(self)
|
||||
cmake = CMake(self)
|
||||
cmake.configure()
|
||||
cmake.build()
|
||||
|
||||
def _remove_static_libraries(self):
|
||||
rm(self, "rocksdb.lib", os.path.join(self.package_folder, "lib"))
|
||||
for lib in glob.glob(os.path.join(self.package_folder, "lib", "*.a")):
|
||||
if not lib.endswith(".dll.a"):
|
||||
os.remove(lib)
|
||||
|
||||
def _remove_cpp_headers(self):
|
||||
for path in glob.glob(os.path.join(self.package_folder, "include", "rocksdb", "*")):
|
||||
if path != os.path.join(self.package_folder, "include", "rocksdb", "c.h"):
|
||||
if os.path.isfile(path):
|
||||
os.remove(path)
|
||||
else:
|
||||
shutil.rmtree(path)
|
||||
|
||||
def package(self):
|
||||
copy(self, "COPYING", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
|
||||
copy(self, "LICENSE*", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
|
||||
cmake = CMake(self)
|
||||
cmake.install()
|
||||
if self.options.shared:
|
||||
self._remove_static_libraries()
|
||||
self._remove_cpp_headers() # Force stable ABI for shared libraries
|
||||
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
|
||||
rmdir(self, os.path.join(self.package_folder, "lib", "pkgconfig"))
|
||||
|
||||
def package_info(self):
|
||||
cmake_target = "rocksdb-shared" if self.options.shared else "rocksdb"
|
||||
self.cpp_info.set_property("cmake_file_name", "RocksDB")
|
||||
self.cpp_info.set_property("cmake_target_name", f"RocksDB::{cmake_target}")
|
||||
# TODO: back to global scope in conan v2 once cmake_find_package* generators removed
|
||||
self.cpp_info.components["librocksdb"].libs = collect_libs(self)
|
||||
if self.settings.os == "Windows":
|
||||
self.cpp_info.components["librocksdb"].system_libs = ["shlwapi", "rpcrt4"]
|
||||
if self.options.shared:
|
||||
self.cpp_info.components["librocksdb"].defines = ["ROCKSDB_DLL"]
|
||||
elif self.settings.os in ["Linux", "FreeBSD"]:
|
||||
self.cpp_info.components["librocksdb"].system_libs = ["pthread", "m"]
|
||||
if self.options.lite:
|
||||
self.cpp_info.components["librocksdb"].defines.append("ROCKSDB_LITE")
|
||||
|
||||
# TODO: to remove in conan v2 once cmake_find_package* generators removed
|
||||
self.cpp_info.names["cmake_find_package"] = "RocksDB"
|
||||
self.cpp_info.names["cmake_find_package_multi"] = "RocksDB"
|
||||
self.cpp_info.components["librocksdb"].names["cmake_find_package"] = cmake_target
|
||||
self.cpp_info.components["librocksdb"].names["cmake_find_package_multi"] = cmake_target
|
||||
self.cpp_info.components["librocksdb"].set_property("cmake_target_name", f"RocksDB::{cmake_target}")
|
||||
if self.options.with_gflags:
|
||||
self.cpp_info.components["librocksdb"].requires.append("gflags::gflags")
|
||||
if self.options.with_snappy:
|
||||
self.cpp_info.components["librocksdb"].requires.append("snappy::snappy")
|
||||
if self.options.with_lz4:
|
||||
self.cpp_info.components["librocksdb"].requires.append("lz4::lz4")
|
||||
if self.options.with_zlib:
|
||||
self.cpp_info.components["librocksdb"].requires.append("zlib::zlib")
|
||||
if self.options.with_zstd:
|
||||
self.cpp_info.components["librocksdb"].requires.append("zstd::zstd")
|
||||
if self.options.get_safe("with_tbb"):
|
||||
self.cpp_info.components["librocksdb"].requires.append("onetbb::onetbb")
|
||||
if self.options.with_jemalloc:
|
||||
self.cpp_info.components["librocksdb"].requires.append("jemalloc::jemalloc")
|
||||
@@ -1,319 +0,0 @@
|
||||
diff --git a/HISTORY.md b/HISTORY.md
|
||||
index 36d472229..05ad1a202 100644
|
||||
--- a/HISTORY.md
|
||||
+++ b/HISTORY.md
|
||||
@@ -1,6 +1,10 @@
|
||||
# Rocksdb Change Log
|
||||
> NOTE: Entries for next release do not go here. Follow instructions in `unreleased_history/README.txt`
|
||||
|
||||
+## 9.7.4 (10/31/2024)
|
||||
+### Bug Fixes
|
||||
+* Fix a leak of obsolete blob files left open until DB::Close(). This bug was introduced in version 9.4.0.
|
||||
+
|
||||
## 9.7.3 (10/16/2024)
|
||||
### Behavior Changes
|
||||
* OPTIONS file to be loaded by remote worker is now preserved so that it does not get purged by the primary host. A similar technique as how we are preserving new SST files from getting purged is used for this. min_options_file_numbers_ is tracked like pending_outputs_ is tracked.
|
||||
diff --git a/db/blob/blob_file_cache.cc b/db/blob/blob_file_cache.cc
|
||||
index 5f340aadf..1b9faa238 100644
|
||||
--- a/db/blob/blob_file_cache.cc
|
||||
+++ b/db/blob/blob_file_cache.cc
|
||||
@@ -42,6 +42,7 @@ Status BlobFileCache::GetBlobFileReader(
|
||||
assert(blob_file_reader);
|
||||
assert(blob_file_reader->IsEmpty());
|
||||
|
||||
+ // NOTE: sharing same Cache with table_cache
|
||||
const Slice key = GetSliceForKey(&blob_file_number);
|
||||
|
||||
assert(cache_);
|
||||
@@ -98,4 +99,13 @@ Status BlobFileCache::GetBlobFileReader(
|
||||
return Status::OK();
|
||||
}
|
||||
|
||||
+void BlobFileCache::Evict(uint64_t blob_file_number) {
|
||||
+ // NOTE: sharing same Cache with table_cache
|
||||
+ const Slice key = GetSliceForKey(&blob_file_number);
|
||||
+
|
||||
+ assert(cache_);
|
||||
+
|
||||
+ cache_.get()->Erase(key);
|
||||
+}
|
||||
+
|
||||
} // namespace ROCKSDB_NAMESPACE
|
||||
diff --git a/db/blob/blob_file_cache.h b/db/blob/blob_file_cache.h
|
||||
index 740e67ada..6858d012b 100644
|
||||
--- a/db/blob/blob_file_cache.h
|
||||
+++ b/db/blob/blob_file_cache.h
|
||||
@@ -36,6 +36,15 @@ class BlobFileCache {
|
||||
uint64_t blob_file_number,
|
||||
CacheHandleGuard<BlobFileReader>* blob_file_reader);
|
||||
|
||||
+ // Called when a blob file is obsolete to ensure it is removed from the cache
|
||||
+ // to avoid effectively leaking the open file and assicated memory
|
||||
+ void Evict(uint64_t blob_file_number);
|
||||
+
|
||||
+ // Used to identify cache entries for blob files (not normally useful)
|
||||
+ static const Cache::CacheItemHelper* GetHelper() {
|
||||
+ return CacheInterface::GetBasicHelper();
|
||||
+ }
|
||||
+
|
||||
private:
|
||||
using CacheInterface =
|
||||
BasicTypedCacheInterface<BlobFileReader, CacheEntryRole::kMisc>;
|
||||
diff --git a/db/column_family.h b/db/column_family.h
|
||||
index e4b7adde8..86637736a 100644
|
||||
--- a/db/column_family.h
|
||||
+++ b/db/column_family.h
|
||||
@@ -401,6 +401,7 @@ class ColumnFamilyData {
|
||||
SequenceNumber earliest_seq);
|
||||
|
||||
TableCache* table_cache() const { return table_cache_.get(); }
|
||||
+ BlobFileCache* blob_file_cache() const { return blob_file_cache_.get(); }
|
||||
BlobSource* blob_source() const { return blob_source_.get(); }
|
||||
|
||||
// See documentation in compaction_picker.h
|
||||
diff --git a/db/db_impl/db_impl.cc b/db/db_impl/db_impl.cc
|
||||
index 261593423..06573ac2e 100644
|
||||
--- a/db/db_impl/db_impl.cc
|
||||
+++ b/db/db_impl/db_impl.cc
|
||||
@@ -659,8 +659,9 @@ Status DBImpl::CloseHelper() {
|
||||
// We need to release them before the block cache is destroyed. The block
|
||||
// cache may be destroyed inside versions_.reset(), when column family data
|
||||
// list is destroyed, so leaving handles in table cache after
|
||||
- // versions_.reset() may cause issues.
|
||||
- // Here we clean all unreferenced handles in table cache.
|
||||
+ // versions_.reset() may cause issues. Here we clean all unreferenced handles
|
||||
+ // in table cache, and (for certain builds/conditions) assert that no obsolete
|
||||
+ // files are hanging around unreferenced (leak) in the table/blob file cache.
|
||||
// Now we assume all user queries have finished, so only version set itself
|
||||
// can possibly hold the blocks from block cache. After releasing unreferenced
|
||||
// handles here, only handles held by version set left and inside
|
||||
@@ -668,6 +669,9 @@ Status DBImpl::CloseHelper() {
|
||||
// time a handle is released, we erase it from the cache too. By doing that,
|
||||
// we can guarantee that after versions_.reset(), table cache is empty
|
||||
// so the cache can be safely destroyed.
|
||||
+#ifndef NDEBUG
|
||||
+ TEST_VerifyNoObsoleteFilesCached(/*db_mutex_already_held=*/true);
|
||||
+#endif // !NDEBUG
|
||||
table_cache_->EraseUnRefEntries();
|
||||
|
||||
for (auto& txn_entry : recovered_transactions_) {
|
||||
@@ -3227,6 +3231,8 @@ Status DBImpl::MultiGetImpl(
|
||||
s = Status::Aborted();
|
||||
break;
|
||||
}
|
||||
+ // This could be a long-running operation
|
||||
+ ROCKSDB_THREAD_YIELD_HOOK();
|
||||
}
|
||||
|
||||
// Post processing (decrement reference counts and record statistics)
|
||||
diff --git a/db/db_impl/db_impl.h b/db/db_impl/db_impl.h
|
||||
index 5e4fa310b..ccc0abfa7 100644
|
||||
--- a/db/db_impl/db_impl.h
|
||||
+++ b/db/db_impl/db_impl.h
|
||||
@@ -1241,9 +1241,14 @@ class DBImpl : public DB {
|
||||
static Status TEST_ValidateOptions(const DBOptions& db_options) {
|
||||
return ValidateOptions(db_options);
|
||||
}
|
||||
-
|
||||
#endif // NDEBUG
|
||||
|
||||
+ // In certain configurations, verify that the table/blob file cache only
|
||||
+ // contains entries for live files, to check for effective leaks of open
|
||||
+ // files. This can only be called when purging of obsolete files has
|
||||
+ // "settled," such as during parts of DB Close().
|
||||
+ void TEST_VerifyNoObsoleteFilesCached(bool db_mutex_already_held) const;
|
||||
+
|
||||
// persist stats to column family "_persistent_stats"
|
||||
void PersistStats();
|
||||
|
||||
diff --git a/db/db_impl/db_impl_debug.cc b/db/db_impl/db_impl_debug.cc
|
||||
index 790a50d7a..67f5b4aaf 100644
|
||||
--- a/db/db_impl/db_impl_debug.cc
|
||||
+++ b/db/db_impl/db_impl_debug.cc
|
||||
@@ -9,6 +9,7 @@
|
||||
|
||||
#ifndef NDEBUG
|
||||
|
||||
+#include "db/blob/blob_file_cache.h"
|
||||
#include "db/column_family.h"
|
||||
#include "db/db_impl/db_impl.h"
|
||||
#include "db/error_handler.h"
|
||||
@@ -328,5 +329,49 @@ size_t DBImpl::TEST_EstimateInMemoryStatsHistorySize() const {
|
||||
InstrumentedMutexLock l(&const_cast<DBImpl*>(this)->stats_history_mutex_);
|
||||
return EstimateInMemoryStatsHistorySize();
|
||||
}
|
||||
+
|
||||
+void DBImpl::TEST_VerifyNoObsoleteFilesCached(
|
||||
+ bool db_mutex_already_held) const {
|
||||
+ // This check is somewhat expensive and obscure to make a part of every
|
||||
+ // unit test in every build variety. Thus, we only enable it for ASAN builds.
|
||||
+ if (!kMustFreeHeapAllocations) {
|
||||
+ return;
|
||||
+ }
|
||||
+
|
||||
+ std::optional<InstrumentedMutexLock> l;
|
||||
+ if (db_mutex_already_held) {
|
||||
+ mutex_.AssertHeld();
|
||||
+ } else {
|
||||
+ l.emplace(&mutex_);
|
||||
+ }
|
||||
+
|
||||
+ std::vector<uint64_t> live_files;
|
||||
+ for (auto cfd : *versions_->GetColumnFamilySet()) {
|
||||
+ if (cfd->IsDropped()) {
|
||||
+ continue;
|
||||
+ }
|
||||
+ // Sneakily add both SST and blob files to the same list
|
||||
+ cfd->current()->AddLiveFiles(&live_files, &live_files);
|
||||
+ }
|
||||
+ std::sort(live_files.begin(), live_files.end());
|
||||
+
|
||||
+ auto fn = [&live_files](const Slice& key, Cache::ObjectPtr, size_t,
|
||||
+ const Cache::CacheItemHelper* helper) {
|
||||
+ if (helper != BlobFileCache::GetHelper()) {
|
||||
+ // Skip non-blob files for now
|
||||
+ // FIXME: diagnose and fix the leaks of obsolete SST files revealed in
|
||||
+ // unit tests.
|
||||
+ return;
|
||||
+ }
|
||||
+ // See TableCache and BlobFileCache
|
||||
+ assert(key.size() == sizeof(uint64_t));
|
||||
+ uint64_t file_number;
|
||||
+ GetUnaligned(reinterpret_cast<const uint64_t*>(key.data()), &file_number);
|
||||
+ // Assert file is in sorted live_files
|
||||
+ assert(
|
||||
+ std::binary_search(live_files.begin(), live_files.end(), file_number));
|
||||
+ };
|
||||
+ table_cache_->ApplyToAllEntries(fn, {});
|
||||
+}
|
||||
} // namespace ROCKSDB_NAMESPACE
|
||||
#endif // NDEBUG
|
||||
diff --git a/db/db_iter.cc b/db/db_iter.cc
|
||||
index e02586377..bf4749eb9 100644
|
||||
--- a/db/db_iter.cc
|
||||
+++ b/db/db_iter.cc
|
||||
@@ -540,6 +540,8 @@ bool DBIter::FindNextUserEntryInternal(bool skipping_saved_key,
|
||||
} else {
|
||||
iter_.Next();
|
||||
}
|
||||
+ // This could be a long-running operation due to tombstones, etc.
|
||||
+ ROCKSDB_THREAD_YIELD_HOOK();
|
||||
} while (iter_.Valid());
|
||||
|
||||
valid_ = false;
|
||||
diff --git a/db/table_cache.cc b/db/table_cache.cc
|
||||
index 71fc29c32..8a5be75e8 100644
|
||||
--- a/db/table_cache.cc
|
||||
+++ b/db/table_cache.cc
|
||||
@@ -164,6 +164,7 @@ Status TableCache::GetTableReader(
|
||||
}
|
||||
|
||||
Cache::Handle* TableCache::Lookup(Cache* cache, uint64_t file_number) {
|
||||
+ // NOTE: sharing same Cache with BlobFileCache
|
||||
Slice key = GetSliceForFileNumber(&file_number);
|
||||
return cache->Lookup(key);
|
||||
}
|
||||
@@ -179,6 +180,7 @@ Status TableCache::FindTable(
|
||||
size_t max_file_size_for_l0_meta_pin, Temperature file_temperature) {
|
||||
PERF_TIMER_GUARD_WITH_CLOCK(find_table_nanos, ioptions_.clock);
|
||||
uint64_t number = file_meta.fd.GetNumber();
|
||||
+ // NOTE: sharing same Cache with BlobFileCache
|
||||
Slice key = GetSliceForFileNumber(&number);
|
||||
*handle = cache_.Lookup(key);
|
||||
TEST_SYNC_POINT_CALLBACK("TableCache::FindTable:0",
|
||||
diff --git a/db/version_builder.cc b/db/version_builder.cc
|
||||
index ed8ab8214..c98f53f42 100644
|
||||
--- a/db/version_builder.cc
|
||||
+++ b/db/version_builder.cc
|
||||
@@ -24,6 +24,7 @@
|
||||
#include <vector>
|
||||
|
||||
#include "cache/cache_reservation_manager.h"
|
||||
+#include "db/blob/blob_file_cache.h"
|
||||
#include "db/blob/blob_file_meta.h"
|
||||
#include "db/dbformat.h"
|
||||
#include "db/internal_stats.h"
|
||||
@@ -744,12 +745,9 @@ class VersionBuilder::Rep {
|
||||
return Status::Corruption("VersionBuilder", oss.str());
|
||||
}
|
||||
|
||||
- // Note: we use C++11 for now but in C++14, this could be done in a more
|
||||
- // elegant way using generalized lambda capture.
|
||||
- VersionSet* const vs = version_set_;
|
||||
- const ImmutableCFOptions* const ioptions = ioptions_;
|
||||
-
|
||||
- auto deleter = [vs, ioptions](SharedBlobFileMetaData* shared_meta) {
|
||||
+ auto deleter = [vs = version_set_, ioptions = ioptions_,
|
||||
+ bc = cfd_ ? cfd_->blob_file_cache()
|
||||
+ : nullptr](SharedBlobFileMetaData* shared_meta) {
|
||||
if (vs) {
|
||||
assert(ioptions);
|
||||
assert(!ioptions->cf_paths.empty());
|
||||
@@ -758,6 +756,9 @@ class VersionBuilder::Rep {
|
||||
vs->AddObsoleteBlobFile(shared_meta->GetBlobFileNumber(),
|
||||
ioptions->cf_paths.front().path);
|
||||
}
|
||||
+ if (bc) {
|
||||
+ bc->Evict(shared_meta->GetBlobFileNumber());
|
||||
+ }
|
||||
|
||||
delete shared_meta;
|
||||
};
|
||||
@@ -766,7 +767,7 @@ class VersionBuilder::Rep {
|
||||
blob_file_number, blob_file_addition.GetTotalBlobCount(),
|
||||
blob_file_addition.GetTotalBlobBytes(),
|
||||
blob_file_addition.GetChecksumMethod(),
|
||||
- blob_file_addition.GetChecksumValue(), deleter);
|
||||
+ blob_file_addition.GetChecksumValue(), std::move(deleter));
|
||||
|
||||
mutable_blob_file_metas_.emplace(
|
||||
blob_file_number, MutableBlobFileMetaData(std::move(shared_meta)));
|
||||
diff --git a/db/version_set.h b/db/version_set.h
|
||||
index 9336782b1..024f869e7 100644
|
||||
--- a/db/version_set.h
|
||||
+++ b/db/version_set.h
|
||||
@@ -1514,7 +1514,6 @@ class VersionSet {
|
||||
void GetLiveFilesMetaData(std::vector<LiveFileMetaData>* metadata);
|
||||
|
||||
void AddObsoleteBlobFile(uint64_t blob_file_number, std::string path) {
|
||||
- // TODO: Erase file from BlobFileCache?
|
||||
obsolete_blob_files_.emplace_back(blob_file_number, std::move(path));
|
||||
}
|
||||
|
||||
diff --git a/include/rocksdb/version.h b/include/rocksdb/version.h
|
||||
index 2a19796b8..0afa2cab1 100644
|
||||
--- a/include/rocksdb/version.h
|
||||
+++ b/include/rocksdb/version.h
|
||||
@@ -13,7 +13,7 @@
|
||||
// minor or major version number planned for release.
|
||||
#define ROCKSDB_MAJOR 9
|
||||
#define ROCKSDB_MINOR 7
|
||||
-#define ROCKSDB_PATCH 3
|
||||
+#define ROCKSDB_PATCH 4
|
||||
|
||||
// Do not use these. We made the mistake of declaring macros starting with
|
||||
// double underscore. Now we have to live with our choice. We'll deprecate these
|
||||
diff --git a/port/port.h b/port/port.h
|
||||
index 13aa56d47..141716e5b 100644
|
||||
--- a/port/port.h
|
||||
+++ b/port/port.h
|
||||
@@ -19,3 +19,19 @@
|
||||
#elif defined(OS_WIN)
|
||||
#include "port/win/port_win.h"
|
||||
#endif
|
||||
+
|
||||
+#ifdef OS_LINUX
|
||||
+// A temporary hook into long-running RocksDB threads to support modifying their
|
||||
+// priority etc. This should become a public API hook once the requirements
|
||||
+// are better understood.
|
||||
+extern "C" void RocksDbThreadYield() __attribute__((__weak__));
|
||||
+#define ROCKSDB_THREAD_YIELD_HOOK() \
|
||||
+ { \
|
||||
+ if (RocksDbThreadYield) { \
|
||||
+ RocksDbThreadYield(); \
|
||||
+ } \
|
||||
+ }
|
||||
+#else
|
||||
+#define ROCKSDB_THREAD_YIELD_HOOK() \
|
||||
+ {}
|
||||
+#endif
|
||||
@@ -1,30 +0,0 @@
|
||||
diff --git a/CMakeLists.txt b/CMakeLists.txt
|
||||
index 93b884d..b715cb6 100644
|
||||
--- a/CMakeLists.txt
|
||||
+++ b/CMakeLists.txt
|
||||
@@ -106,14 +106,9 @@ endif()
|
||||
include(CMakeDependentOption)
|
||||
|
||||
if(MSVC)
|
||||
- option(WITH_GFLAGS "build with GFlags" OFF)
|
||||
option(WITH_XPRESS "build with windows built in compression" OFF)
|
||||
- option(ROCKSDB_SKIP_THIRDPARTY "skip thirdparty.inc" OFF)
|
||||
-
|
||||
- if(NOT ROCKSDB_SKIP_THIRDPARTY)
|
||||
- include(${CMAKE_CURRENT_SOURCE_DIR}/thirdparty.inc)
|
||||
- endif()
|
||||
-else()
|
||||
+endif()
|
||||
+if(TRUE)
|
||||
if(CMAKE_SYSTEM_NAME MATCHES "FreeBSD" AND NOT CMAKE_SYSTEM_NAME MATCHES "kFreeBSD")
|
||||
# FreeBSD has jemalloc as default malloc
|
||||
# but it does not have all the jemalloc files in include/...
|
||||
@@ -126,7 +121,7 @@ else()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
- if(MINGW)
|
||||
+ if(MSVC OR MINGW)
|
||||
option(WITH_GFLAGS "build with GFlags" OFF)
|
||||
else()
|
||||
option(WITH_GFLAGS "build with GFlags" ON)
|
||||
40
external/snappy/conandata.yml
vendored
40
external/snappy/conandata.yml
vendored
@@ -1,40 +0,0 @@
|
||||
sources:
|
||||
"1.1.10":
|
||||
url: "https://github.com/google/snappy/archive/1.1.10.tar.gz"
|
||||
sha256: "49d831bffcc5f3d01482340fe5af59852ca2fe76c3e05df0e67203ebbe0f1d90"
|
||||
"1.1.9":
|
||||
url: "https://github.com/google/snappy/archive/1.1.9.tar.gz"
|
||||
sha256: "75c1fbb3d618dd3a0483bff0e26d0a92b495bbe5059c8b4f1c962b478b6e06e7"
|
||||
"1.1.8":
|
||||
url: "https://github.com/google/snappy/archive/1.1.8.tar.gz"
|
||||
sha256: "16b677f07832a612b0836178db7f374e414f94657c138e6993cbfc5dcc58651f"
|
||||
"1.1.7":
|
||||
url: "https://github.com/google/snappy/archive/1.1.7.tar.gz"
|
||||
sha256: "3dfa02e873ff51a11ee02b9ca391807f0c8ea0529a4924afa645fbf97163f9d4"
|
||||
patches:
|
||||
"1.1.10":
|
||||
- patch_file: "patches/1.1.10-0001-fix-inlining-failure.patch"
|
||||
patch_description: "disable inlining for compilation error"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.9-0002-no-Werror.patch"
|
||||
patch_description: "disable 'warning as error' options"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.10-0003-fix-clobber-list-older-llvm.patch"
|
||||
patch_description: "disable inline asm on apple-clang"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.9-0004-rtti-by-default.patch"
|
||||
patch_description: "remove 'disable rtti'"
|
||||
patch_type: "conan"
|
||||
"1.1.9":
|
||||
- patch_file: "patches/1.1.9-0001-fix-inlining-failure.patch"
|
||||
patch_description: "disable inlining for compilation error"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.9-0002-no-Werror.patch"
|
||||
patch_description: "disable 'warning as error' options"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.9-0003-fix-clobber-list-older-llvm.patch"
|
||||
patch_description: "disable inline asm on apple-clang"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/1.1.9-0004-rtti-by-default.patch"
|
||||
patch_description: "remove 'disable rtti'"
|
||||
patch_type: "conan"
|
||||
89
external/snappy/conanfile.py
vendored
89
external/snappy/conanfile.py
vendored
@@ -1,89 +0,0 @@
|
||||
from conan import ConanFile
|
||||
from conan.tools.build import check_min_cppstd
|
||||
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
|
||||
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, rmdir
|
||||
from conan.tools.scm import Version
|
||||
import os
|
||||
|
||||
required_conan_version = ">=1.54.0"
|
||||
|
||||
|
||||
class SnappyConan(ConanFile):
|
||||
name = "snappy"
|
||||
description = "A fast compressor/decompressor"
|
||||
topics = ("google", "compressor", "decompressor")
|
||||
url = "https://github.com/conan-io/conan-center-index"
|
||||
homepage = "https://github.com/google/snappy"
|
||||
license = "BSD-3-Clause"
|
||||
|
||||
package_type = "library"
|
||||
settings = "os", "arch", "compiler", "build_type"
|
||||
options = {
|
||||
"shared": [True, False],
|
||||
"fPIC": [True, False],
|
||||
}
|
||||
default_options = {
|
||||
"shared": False,
|
||||
"fPIC": True,
|
||||
}
|
||||
|
||||
def export_sources(self):
|
||||
export_conandata_patches(self)
|
||||
|
||||
def config_options(self):
|
||||
if self.settings.os == 'Windows':
|
||||
del self.options.fPIC
|
||||
|
||||
def configure(self):
|
||||
if self.options.shared:
|
||||
self.options.rm_safe("fPIC")
|
||||
|
||||
def layout(self):
|
||||
cmake_layout(self, src_folder="src")
|
||||
|
||||
def validate(self):
|
||||
if self.settings.compiler.get_safe("cppstd"):
|
||||
check_min_cppstd(self, 11)
|
||||
|
||||
def source(self):
|
||||
get(self, **self.conan_data["sources"][self.version], strip_root=True)
|
||||
|
||||
def generate(self):
|
||||
tc = CMakeToolchain(self)
|
||||
tc.variables["SNAPPY_BUILD_TESTS"] = False
|
||||
if Version(self.version) >= "1.1.8":
|
||||
tc.variables["SNAPPY_FUZZING_BUILD"] = False
|
||||
tc.variables["SNAPPY_REQUIRE_AVX"] = False
|
||||
tc.variables["SNAPPY_REQUIRE_AVX2"] = False
|
||||
tc.variables["SNAPPY_INSTALL"] = True
|
||||
if Version(self.version) >= "1.1.9":
|
||||
tc.variables["SNAPPY_BUILD_BENCHMARKS"] = False
|
||||
tc.generate()
|
||||
|
||||
def build(self):
|
||||
apply_conandata_patches(self)
|
||||
cmake = CMake(self)
|
||||
cmake.configure()
|
||||
cmake.build()
|
||||
|
||||
def package(self):
|
||||
copy(self, "COPYING", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
|
||||
cmake = CMake(self)
|
||||
cmake.install()
|
||||
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
|
||||
|
||||
def package_info(self):
|
||||
self.cpp_info.set_property("cmake_file_name", "Snappy")
|
||||
self.cpp_info.set_property("cmake_target_name", "Snappy::snappy")
|
||||
# TODO: back to global scope in conan v2 once cmake_find_package* generators removed
|
||||
self.cpp_info.components["snappylib"].libs = ["snappy"]
|
||||
if not self.options.shared:
|
||||
if self.settings.os in ["Linux", "FreeBSD"]:
|
||||
self.cpp_info.components["snappylib"].system_libs.append("m")
|
||||
|
||||
# TODO: to remove in conan v2 once cmake_find_package* generators removed
|
||||
self.cpp_info.names["cmake_find_package"] = "Snappy"
|
||||
self.cpp_info.names["cmake_find_package_multi"] = "Snappy"
|
||||
self.cpp_info.components["snappylib"].names["cmake_find_package"] = "snappy"
|
||||
self.cpp_info.components["snappylib"].names["cmake_find_package_multi"] = "snappy"
|
||||
self.cpp_info.components["snappylib"].set_property("cmake_target_name", "Snappy::snappy")
|
||||
@@ -1,13 +0,0 @@
|
||||
diff --git a/snappy-stubs-internal.h b/snappy-stubs-internal.h
|
||||
index 1548ed7..3b4a9f3 100644
|
||||
--- a/snappy-stubs-internal.h
|
||||
+++ b/snappy-stubs-internal.h
|
||||
@@ -100,7 +100,7 @@
|
||||
|
||||
// Inlining hints.
|
||||
#if HAVE_ATTRIBUTE_ALWAYS_INLINE
|
||||
-#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline))
|
||||
+#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
||||
#else
|
||||
#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
||||
#endif // HAVE_ATTRIBUTE_ALWAYS_INLINE
|
||||
@@ -1,13 +0,0 @@
|
||||
diff --git a/snappy.cc b/snappy.cc
|
||||
index d414718..e4efb59 100644
|
||||
--- a/snappy.cc
|
||||
+++ b/snappy.cc
|
||||
@@ -1132,7 +1132,7 @@ inline size_t AdvanceToNextTagX86Optimized(const uint8_t** ip_p, size_t* tag) {
|
||||
size_t literal_len = *tag >> 2;
|
||||
size_t tag_type = *tag;
|
||||
bool is_literal;
|
||||
-#if defined(__GCC_ASM_FLAG_OUTPUTS__) && defined(__x86_64__)
|
||||
+#if defined(__GCC_ASM_FLAG_OUTPUTS__) && defined(__x86_64__) && ( (!defined(__clang__) && !defined(__APPLE__)) || (!defined(__APPLE__) && defined(__clang__) && (__clang_major__ >= 9)) || (defined(__APPLE__) && defined(__clang__) && (__clang_major__ > 11)) )
|
||||
// TODO clang misses the fact that the (c & 3) already correctly
|
||||
// sets the zero flag.
|
||||
asm("and $3, %k[tag_type]\n\t"
|
||||
@@ -1,14 +0,0 @@
|
||||
Fixes the following error:
|
||||
error: inlining failed in call to ‘always_inline’ ‘size_t snappy::AdvanceToNextTag(const uint8_t**, size_t*)’: function body can be overwritten at link time
|
||||
|
||||
--- snappy-stubs-internal.h
|
||||
+++ snappy-stubs-internal.h
|
||||
@@ -100,7 +100,7 @@
|
||||
|
||||
// Inlining hints.
|
||||
#ifdef HAVE_ATTRIBUTE_ALWAYS_INLINE
|
||||
-#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline))
|
||||
+#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
||||
#else
|
||||
#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
||||
#endif
|
||||
@@ -1,12 +0,0 @@
|
||||
--- CMakeLists.txt
|
||||
+++ CMakeLists.txt
|
||||
@@ -69,7 +69,7 @@
|
||||
- # Use -Werror for clang only.
|
||||
+if(0)
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
if(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror")
|
||||
endif(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
|
||||
endif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
-
|
||||
+endif()
|
||||
@@ -1,12 +0,0 @@
|
||||
asm clobbers do not work for clang < 9 and apple-clang < 11 (found by SpaceIm)
|
||||
--- snappy.cc
|
||||
+++ snappy.cc
|
||||
@@ -1026,7 +1026,7 @@
|
||||
size_t literal_len = *tag >> 2;
|
||||
size_t tag_type = *tag;
|
||||
bool is_literal;
|
||||
-#if defined(__GNUC__) && defined(__x86_64__)
|
||||
+#if defined(__GNUC__) && defined(__x86_64__) && ( (!defined(__clang__) && !defined(__APPLE__)) || (!defined(__APPLE__) && defined(__clang__) && (__clang_major__ >= 9)) || (defined(__APPLE__) && defined(__clang__) && (__clang_major__ > 11)) )
|
||||
// TODO clang misses the fact that the (c & 3) already correctly
|
||||
// sets the zero flag.
|
||||
asm("and $3, %k[tag_type]\n\t"
|
||||
@@ -1,20 +0,0 @@
|
||||
--- a/CMakeLists.txt
|
||||
+++ b/CMakeLists.txt
|
||||
@@ -53,8 +53,6 @@ if(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
add_definitions(-D_HAS_EXCEPTIONS=0)
|
||||
|
||||
# Disable RTTI.
|
||||
- string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
|
||||
else(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
# Use -Wall for clang and gcc.
|
||||
if(NOT CMAKE_CXX_FLAGS MATCHES "-Wall")
|
||||
@@ -78,8 +76,6 @@ endif()
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions")
|
||||
|
||||
# Disable RTTI.
|
||||
- string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
|
||||
endif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
|
||||
# BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make
|
||||
12
external/soci/conandata.yml
vendored
12
external/soci/conandata.yml
vendored
@@ -1,12 +0,0 @@
|
||||
sources:
|
||||
"4.0.3":
|
||||
url: "https://github.com/SOCI/soci/archive/v4.0.3.tar.gz"
|
||||
sha256: "4b1ff9c8545c5d802fbe06ee6cd2886630e5c03bf740e269bb625b45cf934928"
|
||||
patches:
|
||||
"4.0.3":
|
||||
- patch_file: "patches/0001-Remove-hardcoded-INSTALL_NAME_DIR-for-relocatable-li.patch"
|
||||
patch_description: "Generate relocatable libraries on MacOS"
|
||||
patch_type: "portability"
|
||||
- patch_file: "patches/0002-Fix-soci_backend.patch"
|
||||
patch_description: "Fix variable names for dependencies"
|
||||
patch_type: "conan"
|
||||
212
external/soci/conanfile.py
vendored
212
external/soci/conanfile.py
vendored
@@ -1,212 +0,0 @@
|
||||
from conan import ConanFile
|
||||
from conan.tools.build import check_min_cppstd
|
||||
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
|
||||
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, rmdir
|
||||
from conan.tools.microsoft import is_msvc
|
||||
from conan.tools.scm import Version
|
||||
from conan.errors import ConanInvalidConfiguration
|
||||
import os
|
||||
|
||||
required_conan_version = ">=1.55.0"
|
||||
|
||||
|
||||
class SociConan(ConanFile):
|
||||
name = "soci"
|
||||
homepage = "https://github.com/SOCI/soci"
|
||||
url = "https://github.com/conan-io/conan-center-index"
|
||||
description = "The C++ Database Access Library "
|
||||
topics = ("mysql", "odbc", "postgresql", "sqlite3")
|
||||
license = "BSL-1.0"
|
||||
|
||||
settings = "os", "arch", "compiler", "build_type"
|
||||
options = {
|
||||
"shared": [True, False],
|
||||
"fPIC": [True, False],
|
||||
"empty": [True, False],
|
||||
"with_sqlite3": [True, False],
|
||||
"with_db2": [True, False],
|
||||
"with_odbc": [True, False],
|
||||
"with_oracle": [True, False],
|
||||
"with_firebird": [True, False],
|
||||
"with_mysql": [True, False],
|
||||
"with_postgresql": [True, False],
|
||||
"with_boost": [True, False],
|
||||
}
|
||||
default_options = {
|
||||
"shared": False,
|
||||
"fPIC": True,
|
||||
"empty": False,
|
||||
"with_sqlite3": False,
|
||||
"with_db2": False,
|
||||
"with_odbc": False,
|
||||
"with_oracle": False,
|
||||
"with_firebird": False,
|
||||
"with_mysql": False,
|
||||
"with_postgresql": False,
|
||||
"with_boost": False,
|
||||
}
|
||||
|
||||
def export_sources(self):
|
||||
export_conandata_patches(self)
|
||||
|
||||
def layout(self):
|
||||
cmake_layout(self, src_folder="src")
|
||||
|
||||
def config_options(self):
|
||||
if self.settings.os == "Windows":
|
||||
self.options.rm_safe("fPIC")
|
||||
|
||||
def configure(self):
|
||||
if self.options.shared:
|
||||
self.options.rm_safe("fPIC")
|
||||
|
||||
def requirements(self):
|
||||
if self.options.with_sqlite3:
|
||||
self.requires("sqlite3/3.47.0")
|
||||
if self.options.with_odbc and self.settings.os != "Windows":
|
||||
self.requires("odbc/2.3.11")
|
||||
if self.options.with_mysql:
|
||||
self.requires("libmysqlclient/8.1.0")
|
||||
if self.options.with_postgresql:
|
||||
self.requires("libpq/15.5")
|
||||
if self.options.with_boost:
|
||||
self.requires("boost/1.83.0")
|
||||
|
||||
@property
|
||||
def _minimum_compilers_version(self):
|
||||
return {
|
||||
"Visual Studio": "14",
|
||||
"gcc": "4.8",
|
||||
"clang": "3.8",
|
||||
"apple-clang": "8.0"
|
||||
}
|
||||
|
||||
def validate(self):
|
||||
if self.settings.compiler.get_safe("cppstd"):
|
||||
check_min_cppstd(self, 11)
|
||||
|
||||
compiler = str(self.settings.compiler)
|
||||
compiler_version = Version(self.settings.compiler.version.value)
|
||||
if compiler not in self._minimum_compilers_version:
|
||||
self.output.warning("{} recipe lacks information about the {} compiler support.".format(self.name, self.settings.compiler))
|
||||
elif compiler_version < self._minimum_compilers_version[compiler]:
|
||||
raise ConanInvalidConfiguration("{} requires a {} version >= {}".format(self.name, compiler, compiler_version))
|
||||
|
||||
prefix = "Dependencies for"
|
||||
message = "not configured in this conan package."
|
||||
if self.options.with_db2:
|
||||
# self.requires("db2/0.0.0") # TODO add support for db2
|
||||
raise ConanInvalidConfiguration("{} DB2 {} ".format(prefix, message))
|
||||
if self.options.with_oracle:
|
||||
# self.requires("oracle_db/0.0.0") # TODO add support for oracle
|
||||
raise ConanInvalidConfiguration("{} ORACLE {} ".format(prefix, message))
|
||||
if self.options.with_firebird:
|
||||
# self.requires("firebird/0.0.0") # TODO add support for firebird
|
||||
raise ConanInvalidConfiguration("{} firebird {} ".format(prefix, message))
|
||||
|
||||
def source(self):
|
||||
get(self, **self.conan_data["sources"][self.version], strip_root=True)
|
||||
|
||||
def generate(self):
|
||||
tc = CMakeToolchain(self)
|
||||
|
||||
tc.variables["SOCI_SHARED"] = self.options.shared
|
||||
tc.variables["SOCI_STATIC"] = not self.options.shared
|
||||
tc.variables["SOCI_TESTS"] = False
|
||||
tc.variables["SOCI_CXX11"] = True
|
||||
tc.variables["SOCI_EMPTY"] = self.options.empty
|
||||
tc.variables["WITH_SQLITE3"] = self.options.with_sqlite3
|
||||
tc.variables["WITH_DB2"] = self.options.with_db2
|
||||
tc.variables["WITH_ODBC"] = self.options.with_odbc
|
||||
tc.variables["WITH_ORACLE"] = self.options.with_oracle
|
||||
tc.variables["WITH_FIREBIRD"] = self.options.with_firebird
|
||||
tc.variables["WITH_MYSQL"] = self.options.with_mysql
|
||||
tc.variables["WITH_POSTGRESQL"] = self.options.with_postgresql
|
||||
tc.variables["WITH_BOOST"] = self.options.with_boost
|
||||
tc.generate()
|
||||
|
||||
deps = CMakeDeps(self)
|
||||
deps.generate()
|
||||
|
||||
def build(self):
|
||||
apply_conandata_patches(self)
|
||||
cmake = CMake(self)
|
||||
cmake.configure()
|
||||
cmake.build()
|
||||
|
||||
def package(self):
|
||||
copy(self, "LICENSE_1_0.txt", dst=os.path.join(self.package_folder, "licenses"), src=self.source_folder)
|
||||
|
||||
cmake = CMake(self)
|
||||
cmake.install()
|
||||
|
||||
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
|
||||
|
||||
def package_info(self):
|
||||
self.cpp_info.set_property("cmake_file_name", "SOCI")
|
||||
|
||||
target_suffix = "" if self.options.shared else "_static"
|
||||
lib_prefix = "lib" if is_msvc(self) and not self.options.shared else ""
|
||||
version = Version(self.version)
|
||||
lib_suffix = "_{}_{}".format(version.major, version.minor) if self.settings.os == "Windows" else ""
|
||||
|
||||
# soci_core
|
||||
self.cpp_info.components["soci_core"].set_property("cmake_target_name", "SOCI::soci_core{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_core"].libs = ["{}soci_core{}".format(lib_prefix, lib_suffix)]
|
||||
if self.options.with_boost:
|
||||
self.cpp_info.components["soci_core"].requires.append("boost::boost")
|
||||
|
||||
# soci_empty
|
||||
if self.options.empty:
|
||||
self.cpp_info.components["soci_empty"].set_property("cmake_target_name", "SOCI::soci_empty{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_empty"].libs = ["{}soci_empty{}".format(lib_prefix, lib_suffix)]
|
||||
self.cpp_info.components["soci_empty"].requires = ["soci_core"]
|
||||
|
||||
# soci_sqlite3
|
||||
if self.options.with_sqlite3:
|
||||
self.cpp_info.components["soci_sqlite3"].set_property("cmake_target_name", "SOCI::soci_sqlite3{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_sqlite3"].libs = ["{}soci_sqlite3{}".format(lib_prefix, lib_suffix)]
|
||||
self.cpp_info.components["soci_sqlite3"].requires = ["soci_core", "sqlite3::sqlite3"]
|
||||
|
||||
# soci_odbc
|
||||
if self.options.with_odbc:
|
||||
self.cpp_info.components["soci_odbc"].set_property("cmake_target_name", "SOCI::soci_odbc{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_odbc"].libs = ["{}soci_odbc{}".format(lib_prefix, lib_suffix)]
|
||||
self.cpp_info.components["soci_odbc"].requires = ["soci_core"]
|
||||
if self.settings.os == "Windows":
|
||||
self.cpp_info.components["soci_odbc"].system_libs.append("odbc32")
|
||||
else:
|
||||
self.cpp_info.components["soci_odbc"].requires.append("odbc::odbc")
|
||||
|
||||
# soci_mysql
|
||||
if self.options.with_mysql:
|
||||
self.cpp_info.components["soci_mysql"].set_property("cmake_target_name", "SOCI::soci_mysql{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_mysql"].libs = ["{}soci_mysql{}".format(lib_prefix, lib_suffix)]
|
||||
self.cpp_info.components["soci_mysql"].requires = ["soci_core", "libmysqlclient::libmysqlclient"]
|
||||
|
||||
# soci_postgresql
|
||||
if self.options.with_postgresql:
|
||||
self.cpp_info.components["soci_postgresql"].set_property("cmake_target_name", "SOCI::soci_postgresql{}".format(target_suffix))
|
||||
self.cpp_info.components["soci_postgresql"].libs = ["{}soci_postgresql{}".format(lib_prefix, lib_suffix)]
|
||||
self.cpp_info.components["soci_postgresql"].requires = ["soci_core", "libpq::libpq"]
|
||||
|
||||
# TODO: to remove in conan v2 once cmake_find_package* generators removed
|
||||
self.cpp_info.names["cmake_find_package"] = "SOCI"
|
||||
self.cpp_info.names["cmake_find_package_multi"] = "SOCI"
|
||||
self.cpp_info.components["soci_core"].names["cmake_find_package"] = "soci_core{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_core"].names["cmake_find_package_multi"] = "soci_core{}".format(target_suffix)
|
||||
if self.options.empty:
|
||||
self.cpp_info.components["soci_empty"].names["cmake_find_package"] = "soci_empty{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_empty"].names["cmake_find_package_multi"] = "soci_empty{}".format(target_suffix)
|
||||
if self.options.with_sqlite3:
|
||||
self.cpp_info.components["soci_sqlite3"].names["cmake_find_package"] = "soci_sqlite3{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_sqlite3"].names["cmake_find_package_multi"] = "soci_sqlite3{}".format(target_suffix)
|
||||
if self.options.with_odbc:
|
||||
self.cpp_info.components["soci_odbc"].names["cmake_find_package"] = "soci_odbc{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_odbc"].names["cmake_find_package_multi"] = "soci_odbc{}".format(target_suffix)
|
||||
if self.options.with_mysql:
|
||||
self.cpp_info.components["soci_mysql"].names["cmake_find_package"] = "soci_mysql{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_mysql"].names["cmake_find_package_multi"] = "soci_mysql{}".format(target_suffix)
|
||||
if self.options.with_postgresql:
|
||||
self.cpp_info.components["soci_postgresql"].names["cmake_find_package"] = "soci_postgresql{}".format(target_suffix)
|
||||
self.cpp_info.components["soci_postgresql"].names["cmake_find_package_multi"] = "soci_postgresql{}".format(target_suffix)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user