mirror of
https://github.com/XRPLF/rippled.git
synced 2025-11-04 19:25:51 +00:00
chore: Fix file formatting (#5718)
This commit is contained in:
18
.github/actions/build-deps/action.yml
vendored
18
.github/actions/build-deps/action.yml
vendored
@@ -7,33 +7,33 @@ name: Build Conan dependencies
|
|||||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
||||||
inputs:
|
inputs:
|
||||||
build_dir:
|
build_dir:
|
||||||
description: 'The directory where to build.'
|
description: "The directory where to build."
|
||||||
required: true
|
required: true
|
||||||
build_type:
|
build_type:
|
||||||
description: 'The build type to use ("Debug", "Release").'
|
description: 'The build type to use ("Debug", "Release").'
|
||||||
required: true
|
required: true
|
||||||
conan_remote_name:
|
conan_remote_name:
|
||||||
description: 'The name of the Conan remote to use.'
|
description: "The name of the Conan remote to use."
|
||||||
required: true
|
required: true
|
||||||
conan_remote_url:
|
conan_remote_url:
|
||||||
description: 'The URL of the Conan endpoint to use.'
|
description: "The URL of the Conan endpoint to use."
|
||||||
required: true
|
required: true
|
||||||
conan_remote_username:
|
conan_remote_username:
|
||||||
description: 'The username for logging into the Conan remote. If not provided, the dependencies will not be uploaded.'
|
description: "The username for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ""
|
||||||
conan_remote_password:
|
conan_remote_password:
|
||||||
description: 'The password for logging into the Conan remote. If not provided, the dependencies will not be uploaded.'
|
description: "The password for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ""
|
||||||
force_build:
|
force_build:
|
||||||
description: 'Force building of all dependencies ("true", "false").'
|
description: 'Force building of all dependencies ("true", "false").'
|
||||||
required: false
|
required: false
|
||||||
default: 'false'
|
default: "false"
|
||||||
force_upload:
|
force_upload:
|
||||||
description: 'Force uploading of all dependencies ("true", "false").'
|
description: 'Force uploading of all dependencies ("true", "false").'
|
||||||
required: false
|
required: false
|
||||||
default: 'false'
|
default: "false"
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: composite
|
using: composite
|
||||||
|
|||||||
14
.github/actions/build-test/action.yml
vendored
14
.github/actions/build-test/action.yml
vendored
@@ -6,26 +6,26 @@ name: Build and Test
|
|||||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
||||||
inputs:
|
inputs:
|
||||||
build_dir:
|
build_dir:
|
||||||
description: 'The directory where to build.'
|
description: "The directory where to build."
|
||||||
required: true
|
required: true
|
||||||
build_only:
|
build_only:
|
||||||
description: 'Whether to only build or to build and test the code ("true", "false").'
|
description: 'Whether to only build or to build and test the code ("true", "false").'
|
||||||
required: false
|
required: false
|
||||||
default: 'false'
|
default: "false"
|
||||||
build_type:
|
build_type:
|
||||||
description: 'The build type to use ("Debug", "Release").'
|
description: 'The build type to use ("Debug", "Release").'
|
||||||
required: true
|
required: true
|
||||||
cmake_args:
|
cmake_args:
|
||||||
description: 'Additional arguments to pass to CMake.'
|
description: "Additional arguments to pass to CMake."
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ""
|
||||||
cmake_target:
|
cmake_target:
|
||||||
description: 'The CMake target to build.'
|
description: "The CMake target to build."
|
||||||
required: true
|
required: true
|
||||||
codecov_token:
|
codecov_token:
|
||||||
description: 'The Codecov token to use for uploading coverage reports.'
|
description: "The Codecov token to use for uploading coverage reports."
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ""
|
||||||
os:
|
os:
|
||||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
2
.github/scripts/levelization/README.md
vendored
2
.github/scripts/levelization/README.md
vendored
@@ -111,4 +111,4 @@ get those details locally.
|
|||||||
1. Run `levelization.sh`
|
1. Run `levelization.sh`
|
||||||
2. Grep the modules in `paths.txt`.
|
2. Grep the modules in `paths.txt`.
|
||||||
- For example, if a cycle is found `A ~= B`, simply `grep -w
|
- For example, if a cycle is found `A ~= B`, simply `grep -w
|
||||||
A .github/scripts/levelization/results/paths.txt | grep -w B`
|
A .github/scripts/levelization/results/paths.txt | grep -w B`
|
||||||
|
|||||||
20
.github/workflows/build-test.yml
vendored
20
.github/workflows/build-test.yml
vendored
@@ -9,25 +9,25 @@ on:
|
|||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
build_dir:
|
build_dir:
|
||||||
description: 'The directory where to build.'
|
description: "The directory where to build."
|
||||||
required: false
|
required: false
|
||||||
type: string
|
type: string
|
||||||
default: '.build'
|
default: ".build"
|
||||||
conan_remote_name:
|
conan_remote_name:
|
||||||
description: 'The name of the Conan remote to use.'
|
description: "The name of the Conan remote to use."
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
conan_remote_url:
|
conan_remote_url:
|
||||||
description: 'The URL of the Conan endpoint to use.'
|
description: "The URL of the Conan endpoint to use."
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
dependencies_force_build:
|
dependencies_force_build:
|
||||||
description: 'Force building of all dependencies.'
|
description: "Force building of all dependencies."
|
||||||
required: false
|
required: false
|
||||||
type: boolean
|
type: boolean
|
||||||
default: false
|
default: false
|
||||||
dependencies_force_upload:
|
dependencies_force_upload:
|
||||||
description: 'Force uploading of all dependencies.'
|
description: "Force uploading of all dependencies."
|
||||||
required: false
|
required: false
|
||||||
type: boolean
|
type: boolean
|
||||||
default: false
|
default: false
|
||||||
@@ -40,16 +40,16 @@ on:
|
|||||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||||
required: false
|
required: false
|
||||||
type: string
|
type: string
|
||||||
default: 'minimal'
|
default: "minimal"
|
||||||
secrets:
|
secrets:
|
||||||
codecov_token:
|
codecov_token:
|
||||||
description: 'The Codecov token to use for uploading coverage reports.'
|
description: "The Codecov token to use for uploading coverage reports."
|
||||||
required: false
|
required: false
|
||||||
conan_remote_username:
|
conan_remote_username:
|
||||||
description: 'The username for logging into the Conan remote. If not provided, the dependencies will not be uploaded.'
|
description: "The username for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||||
required: false
|
required: false
|
||||||
conan_remote_password:
|
conan_remote_password:
|
||||||
description: 'The password for logging into the Conan remote. If not provided, the dependencies will not be uploaded.'
|
description: "The password for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||||
required: false
|
required: false
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
|
|||||||
78
.github/workflows/check-missing-commits.yml
vendored
78
.github/workflows/check-missing-commits.yml
vendored
@@ -18,45 +18,45 @@ jobs:
|
|||||||
check:
|
check:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Check for missing commits
|
- name: Check for missing commits
|
||||||
env:
|
env:
|
||||||
MESSAGE: |
|
MESSAGE: |
|
||||||
|
|
||||||
If you are reading this, then the commits indicated above are missing
|
If you are reading this, then the commits indicated above are missing
|
||||||
from the "develop" and/or "release" branch. Do a reverse-merge as soon
|
from the "develop" and/or "release" branch. Do a reverse-merge as soon
|
||||||
as possible. See CONTRIBUTING.md for instructions.
|
as possible. See CONTRIBUTING.md for instructions.
|
||||||
run: |
|
run: |
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
# Branches are ordered by how "canonical" they are. Every commit in one
|
# Branches are ordered by how "canonical" they are. Every commit in one
|
||||||
# branch should be in all the branches behind it.
|
# branch should be in all the branches behind it.
|
||||||
order=(master release develop)
|
order=(master release develop)
|
||||||
branches=()
|
branches=()
|
||||||
for branch in "${order[@]}"; do
|
for branch in "${order[@]}"; do
|
||||||
# Check that the branches exist so that this job will work on forked
|
# Check that the branches exist so that this job will work on forked
|
||||||
# repos, which don't necessarily have master and release branches.
|
# repos, which don't necessarily have master and release branches.
|
||||||
echo "Checking if ${branch} exists."
|
echo "Checking if ${branch} exists."
|
||||||
if git ls-remote --exit-code --heads origin \
|
if git ls-remote --exit-code --heads origin \
|
||||||
refs/heads/${branch} > /dev/null; then
|
refs/heads/${branch} > /dev/null; then
|
||||||
branches+=(origin/${branch})
|
branches+=(origin/${branch})
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
prior=()
|
prior=()
|
||||||
for branch in "${branches[@]}"; do
|
for branch in "${branches[@]}"; do
|
||||||
if [[ ${#prior[@]} -ne 0 ]]; then
|
if [[ ${#prior[@]} -ne 0 ]]; then
|
||||||
echo "Checking ${prior[@]} for commits missing from ${branch}."
|
echo "Checking ${prior[@]} for commits missing from ${branch}."
|
||||||
git log --oneline --no-merges "${prior[@]}" \
|
git log --oneline --no-merges "${prior[@]}" \
|
||||||
^$branch | tee -a "missing-commits.txt"
|
^$branch | tee -a "missing-commits.txt"
|
||||||
echo
|
echo
|
||||||
|
fi
|
||||||
|
prior+=("${branch}")
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ $(cat missing-commits.txt | wc -l) -ne 0 ]]; then
|
||||||
|
echo "${MESSAGE}"
|
||||||
|
exit 1
|
||||||
fi
|
fi
|
||||||
prior+=("${branch}")
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ $(cat missing-commits.txt | wc -l) -ne 0 ]]; then
|
|
||||||
echo "${MESSAGE}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|||||||
10
.github/workflows/notify-clio.yml
vendored
10
.github/workflows/notify-clio.yml
vendored
@@ -8,22 +8,22 @@ on:
|
|||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
conan_remote_name:
|
conan_remote_name:
|
||||||
description: 'The name of the Conan remote to use.'
|
description: "The name of the Conan remote to use."
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
conan_remote_url:
|
conan_remote_url:
|
||||||
description: 'The URL of the Conan endpoint to use.'
|
description: "The URL of the Conan endpoint to use."
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
secrets:
|
secrets:
|
||||||
clio_notify_token:
|
clio_notify_token:
|
||||||
description: 'The GitHub token to notify Clio about new versions.'
|
description: "The GitHub token to notify Clio about new versions."
|
||||||
required: true
|
required: true
|
||||||
conan_remote_username:
|
conan_remote_username:
|
||||||
description: 'The username for logging into the Conan remote.'
|
description: "The username for logging into the Conan remote."
|
||||||
required: true
|
required: true
|
||||||
conan_remote_password:
|
conan_remote_password:
|
||||||
description: 'The password for logging into the Conan remote.'
|
description: "The password for logging into the Conan remote."
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
|
|||||||
40
.github/workflows/on-pr.yml
vendored
40
.github/workflows/on-pr.yml
vendored
@@ -7,28 +7,28 @@ name: PR
|
|||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- '.github/actions/build-deps/**'
|
- ".github/actions/build-deps/**"
|
||||||
- '.github/actions/build-test/**'
|
- ".github/actions/build-test/**"
|
||||||
- '.github/scripts/levelization/**'
|
- ".github/scripts/levelization/**"
|
||||||
- '.github/scripts/strategy-matrix/**'
|
- ".github/scripts/strategy-matrix/**"
|
||||||
- '.github/workflows/build-test.yml'
|
- ".github/workflows/build-test.yml"
|
||||||
- '.github/workflows/check-format.yml'
|
- ".github/workflows/check-format.yml"
|
||||||
- '.github/workflows/check-levelization.yml'
|
- ".github/workflows/check-levelization.yml"
|
||||||
- '.github/workflows/notify-clio.yml'
|
- ".github/workflows/notify-clio.yml"
|
||||||
- '.github/workflows/on-pr.yml'
|
- ".github/workflows/on-pr.yml"
|
||||||
# Keep the list of paths below in sync with those in the `on-trigger.yml`
|
# Keep the list of paths below in sync with those in the `on-trigger.yml`
|
||||||
# file.
|
# file.
|
||||||
- 'cmake/**'
|
- "cmake/**"
|
||||||
- 'conan/**'
|
- "conan/**"
|
||||||
- 'external/**'
|
- "external/**"
|
||||||
- 'include/**'
|
- "include/**"
|
||||||
- 'src/**'
|
- "src/**"
|
||||||
- 'tests/**'
|
- "tests/**"
|
||||||
- '.clang-format'
|
- ".clang-format"
|
||||||
- '.codecov.yml'
|
- ".codecov.yml"
|
||||||
- '.pre-commit-config.yaml'
|
- ".pre-commit-config.yaml"
|
||||||
- 'CMakeLists.txt'
|
- "CMakeLists.txt"
|
||||||
- 'conanfile.py'
|
- "conanfile.py"
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
- synchronize
|
- synchronize
|
||||||
|
|||||||
44
.github/workflows/on-trigger.yml
vendored
44
.github/workflows/on-trigger.yml
vendored
@@ -13,31 +13,31 @@ on:
|
|||||||
- release
|
- release
|
||||||
- master
|
- master
|
||||||
paths:
|
paths:
|
||||||
- '.github/actions/build-deps/**'
|
- ".github/actions/build-deps/**"
|
||||||
- '.github/actions/build-test/**'
|
- ".github/actions/build-test/**"
|
||||||
- '.github/scripts/strategy-matrix/**'
|
- ".github/scripts/strategy-matrix/**"
|
||||||
- '.github/workflows/build-test.yml'
|
- ".github/workflows/build-test.yml"
|
||||||
- '.github/workflows/check-missing-commits.yml'
|
- ".github/workflows/check-missing-commits.yml"
|
||||||
- '.github/workflows/on-trigger.yml'
|
- ".github/workflows/on-trigger.yml"
|
||||||
- '.github/workflows/publish-docs.yml'
|
- ".github/workflows/publish-docs.yml"
|
||||||
# Keep the list of paths below in sync with those in `on-pr.yml`.
|
# Keep the list of paths below in sync with those in `on-pr.yml`.
|
||||||
- 'cmake/**'
|
- "cmake/**"
|
||||||
- 'conan/**'
|
- "conan/**"
|
||||||
- 'external/**'
|
- "external/**"
|
||||||
- 'include/**'
|
- "include/**"
|
||||||
- 'src/**'
|
- "src/**"
|
||||||
- 'tests/**'
|
- "tests/**"
|
||||||
- '.clang-format'
|
- ".clang-format"
|
||||||
- '.codecov.yml'
|
- ".codecov.yml"
|
||||||
- '.pre-commit-config.yaml'
|
- ".pre-commit-config.yaml"
|
||||||
- 'CMakeLists.txt'
|
- "CMakeLists.txt"
|
||||||
- 'conanfile.py'
|
- "conanfile.py"
|
||||||
# Run at 06:32 UTC on every day of the week from Monday through Friday. This
|
# Run at 06:32 UTC on every day of the week from Monday through Friday. This
|
||||||
# will force all dependencies to be rebuilt, which is useful to verify that
|
# will force all dependencies to be rebuilt, which is useful to verify that
|
||||||
# all dependencies can be built successfully. Only the dependencies that
|
# all dependencies can be built successfully. Only the dependencies that
|
||||||
# are actually missing from the remote will be uploaded.
|
# are actually missing from the remote will be uploaded.
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '32 6 * * 1-5'
|
- cron: "32 6 * * 1-5"
|
||||||
# Run when manually triggered via the GitHub UI or API. If `force_upload` is
|
# Run when manually triggered via the GitHub UI or API. If `force_upload` is
|
||||||
# true, then the dependencies that were missing (`force_rebuild` is false) or
|
# true, then the dependencies that were missing (`force_rebuild` is false) or
|
||||||
# rebuilt (`force_rebuild` is true) will be uploaded, overwriting existing
|
# rebuilt (`force_rebuild` is true) will be uploaded, overwriting existing
|
||||||
@@ -45,12 +45,12 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
dependencies_force_build:
|
dependencies_force_build:
|
||||||
description: 'Force building of all dependencies.'
|
description: "Force building of all dependencies."
|
||||||
required: false
|
required: false
|
||||||
type: boolean
|
type: boolean
|
||||||
default: false
|
default: false
|
||||||
dependencies_force_upload:
|
dependencies_force_upload:
|
||||||
description: 'Force uploading of all dependencies.'
|
description: "Force uploading of all dependencies."
|
||||||
required: false
|
required: false
|
||||||
type: boolean
|
type: boolean
|
||||||
default: false
|
default: false
|
||||||
@@ -109,7 +109,7 @@ jobs:
|
|||||||
dependencies_force_build: ${{ needs.generate-outputs.outputs.dependencies_force_build == 'true' }}
|
dependencies_force_build: ${{ needs.generate-outputs.outputs.dependencies_force_build == 'true' }}
|
||||||
dependencies_force_upload: ${{ needs.generate-outputs.outputs.dependencies_force_upload == 'true' }}
|
dependencies_force_upload: ${{ needs.generate-outputs.outputs.dependencies_force_upload == 'true' }}
|
||||||
os: ${{ matrix.os }}
|
os: ${{ matrix.os }}
|
||||||
strategy_matrix: 'all'
|
strategy_matrix: "all"
|
||||||
secrets:
|
secrets:
|
||||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||||
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||||
|
|||||||
16
.github/workflows/publish-docs.yml
vendored
16
.github/workflows/publish-docs.yml
vendored
@@ -5,13 +5,13 @@ name: Build and publish documentation
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
paths:
|
paths:
|
||||||
- '.github/workflows/publish-docs.yml'
|
- ".github/workflows/publish-docs.yml"
|
||||||
- '*.md'
|
- "*.md"
|
||||||
- '**/*.md'
|
- "**/*.md"
|
||||||
- 'docs/**'
|
- "docs/**"
|
||||||
- 'include/**'
|
- "include/**"
|
||||||
- 'src/libxrpl/**'
|
- "src/libxrpl/**"
|
||||||
- 'src/xrpld/**'
|
- "src/xrpld/**"
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
@@ -22,7 +22,7 @@ defaults:
|
|||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
env:
|
env:
|
||||||
BUILD_DIR: .build
|
BUILD_DIR: .build
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
publish:
|
publish:
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ then
|
|||||||
name=$( basename $0 )
|
name=$( basename $0 )
|
||||||
cat <<- USAGE
|
cat <<- USAGE
|
||||||
Usage: $name <username>
|
Usage: $name <username>
|
||||||
|
|
||||||
Where <username> is the Github username of the upstream repo. e.g. XRPLF
|
Where <username> is the Github username of the upstream repo. e.g. XRPLF
|
||||||
USAGE
|
USAGE
|
||||||
exit 0
|
exit 0
|
||||||
@@ -83,4 +83,3 @@ fi
|
|||||||
_run git fetch --jobs=$(nproc) upstreams
|
_run git fetch --jobs=$(nproc) upstreams
|
||||||
|
|
||||||
exit 0
|
exit 0
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ then
|
|||||||
name=$( basename $0 )
|
name=$( basename $0 )
|
||||||
cat <<- USAGE
|
cat <<- USAGE
|
||||||
Usage: $name workbranch base/branch user/branch [user/branch [...]]
|
Usage: $name workbranch base/branch user/branch [user/branch [...]]
|
||||||
|
|
||||||
* workbranch will be created locally from base/branch
|
* workbranch will be created locally from base/branch
|
||||||
* base/branch and user/branch may be specified as user:branch to allow
|
* base/branch and user/branch may be specified as user:branch to allow
|
||||||
easy copying from Github PRs
|
easy copying from Github PRs
|
||||||
@@ -66,4 +66,3 @@ git push $push HEAD:$b
|
|||||||
git fetch $repo
|
git fetch $repo
|
||||||
-------------------------------------------------------------------
|
-------------------------------------------------------------------
|
||||||
PUSH
|
PUSH
|
||||||
|
|
||||||
|
|||||||
@@ -396,8 +396,8 @@
|
|||||||
# true - enables compression
|
# true - enables compression
|
||||||
# false - disables compression [default].
|
# false - disables compression [default].
|
||||||
#
|
#
|
||||||
# The rippled server can save bandwidth by compressing its peer-to-peer communications,
|
# The rippled server can save bandwidth by compressing its peer-to-peer communications,
|
||||||
# at a cost of greater CPU usage. If you enable link compression,
|
# at a cost of greater CPU usage. If you enable link compression,
|
||||||
# the server automatically compresses communications with peer servers
|
# the server automatically compresses communications with peer servers
|
||||||
# that also have link compression enabled.
|
# that also have link compression enabled.
|
||||||
# https://xrpl.org/enable-link-compression.html
|
# https://xrpl.org/enable-link-compression.html
|
||||||
@@ -1011,7 +1011,7 @@
|
|||||||
# that rippled is still in sync with the network,
|
# that rippled is still in sync with the network,
|
||||||
# and that the validated ledger is less than
|
# and that the validated ledger is less than
|
||||||
# 'age_threshold_seconds' old. If not, then continue
|
# 'age_threshold_seconds' old. If not, then continue
|
||||||
# sleeping for this number of seconds and
|
# sleeping for this number of seconds and
|
||||||
# checking until healthy.
|
# checking until healthy.
|
||||||
# Default is 5.
|
# Default is 5.
|
||||||
#
|
#
|
||||||
@@ -1113,7 +1113,7 @@
|
|||||||
# page_size Valid values: integer (MUST be power of 2 between 512 and 65536)
|
# page_size Valid values: integer (MUST be power of 2 between 512 and 65536)
|
||||||
# The default is 4096 bytes. This setting determines
|
# The default is 4096 bytes. This setting determines
|
||||||
# the size of a page in the transaction.db file.
|
# the size of a page in the transaction.db file.
|
||||||
# See https://www.sqlite.org/pragma.html#pragma_page_size
|
# See https://www.sqlite.org/pragma.html#pragma_page_size
|
||||||
# for more details about the available options.
|
# for more details about the available options.
|
||||||
#
|
#
|
||||||
# journal_size_limit Valid values: integer
|
# journal_size_limit Valid values: integer
|
||||||
|
|||||||
@@ -101,7 +101,7 @@ target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol)
|
|||||||
|
|
||||||
# Level 06
|
# Level 06
|
||||||
add_module(xrpl net)
|
add_module(xrpl net)
|
||||||
target_link_libraries(xrpl.libxrpl.net PUBLIC
|
target_link_libraries(xrpl.libxrpl.net PUBLIC
|
||||||
xrpl.libxrpl.basics
|
xrpl.libxrpl.basics
|
||||||
xrpl.libxrpl.json
|
xrpl.libxrpl.json
|
||||||
xrpl.libxrpl.protocol
|
xrpl.libxrpl.protocol
|
||||||
|
|||||||
@@ -5,8 +5,8 @@ skinparam roundcorner 20
|
|||||||
skinparam maxmessagesize 160
|
skinparam maxmessagesize 160
|
||||||
|
|
||||||
actor "Rippled Start" as RS
|
actor "Rippled Start" as RS
|
||||||
participant "Timer" as T
|
participant "Timer" as T
|
||||||
participant "NetworkOPs" as NOP
|
participant "NetworkOPs" as NOP
|
||||||
participant "ValidatorList" as VL #lightgreen
|
participant "ValidatorList" as VL #lightgreen
|
||||||
participant "Consensus" as GC
|
participant "Consensus" as GC
|
||||||
participant "ConsensusAdaptor" as CA #lightgreen
|
participant "ConsensusAdaptor" as CA #lightgreen
|
||||||
@@ -20,7 +20,7 @@ VL -> NOP
|
|||||||
NOP -> VL: update trusted validators
|
NOP -> VL: update trusted validators
|
||||||
activate VL
|
activate VL
|
||||||
VL -> VL: re-calculate quorum
|
VL -> VL: re-calculate quorum
|
||||||
hnote over VL#lightgreen: ignore negative listed validators\nwhen calculate quorum
|
hnote over VL#lightgreen: ignore negative listed validators\nwhen calculate quorum
|
||||||
VL -> NOP
|
VL -> NOP
|
||||||
deactivate VL
|
deactivate VL
|
||||||
NOP -> GC: start round
|
NOP -> GC: start round
|
||||||
@@ -36,14 +36,14 @@ activate GC
|
|||||||
end
|
end
|
||||||
|
|
||||||
alt phase == OPEN
|
alt phase == OPEN
|
||||||
alt should close ledger
|
alt should close ledger
|
||||||
GC -> GC: phase = ESTABLISH
|
GC -> GC: phase = ESTABLISH
|
||||||
GC -> CA: onClose
|
GC -> CA: onClose
|
||||||
activate CA
|
activate CA
|
||||||
alt sqn%256==0
|
alt sqn%256==0
|
||||||
CA -[#green]> RM: <font color=green>getValidations
|
CA -[#green]> RM: <font color=green>getValidations
|
||||||
CA -[#green]> CA: <font color=green>create UNLModify Tx
|
CA -[#green]> CA: <font color=green>create UNLModify Tx
|
||||||
hnote over CA#lightgreen: use validatations of the last 256 ledgers\nto figure out UNLModify Tx candidates.\nIf any, create UNLModify Tx, and add to TxSet.
|
hnote over CA#lightgreen: use validatations of the last 256 ledgers\nto figure out UNLModify Tx candidates.\nIf any, create UNLModify Tx, and add to TxSet.
|
||||||
end
|
end
|
||||||
CA -> GC
|
CA -> GC
|
||||||
GC -> CA: propose
|
GC -> CA: propose
|
||||||
@@ -61,14 +61,14 @@ else phase == ESTABLISH
|
|||||||
CA -> CA : build LCL
|
CA -> CA : build LCL
|
||||||
hnote over CA #lightgreen: copy negative UNL from parent ledger
|
hnote over CA #lightgreen: copy negative UNL from parent ledger
|
||||||
alt sqn%256==0
|
alt sqn%256==0
|
||||||
CA -[#green]> CA: <font color=green>Adjust negative UNL
|
CA -[#green]> CA: <font color=green>Adjust negative UNL
|
||||||
CA -[#green]> CA: <font color=green>apply UNLModify Tx
|
CA -[#green]> CA: <font color=green>apply UNLModify Tx
|
||||||
end
|
end
|
||||||
CA -> CA : validate and send validation message
|
CA -> CA : validate and send validation message
|
||||||
activate NOP
|
activate NOP
|
||||||
CA -> NOP : end consensus and\n<b>begin next consensus round
|
CA -> NOP : end consensus and\n<b>begin next consensus round
|
||||||
deactivate NOP
|
deactivate NOP
|
||||||
deactivate CA
|
deactivate CA
|
||||||
hnote over RM: receive validations
|
hnote over RM: receive validations
|
||||||
end
|
end
|
||||||
else phase == ACCEPTED
|
else phase == ACCEPTED
|
||||||
@@ -76,4 +76,4 @@ else phase == ACCEPTED
|
|||||||
end
|
end
|
||||||
deactivate GC
|
deactivate GC
|
||||||
|
|
||||||
@enduml
|
@enduml
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ class TimeoutCounter {
|
|||||||
#app_ : Application&
|
#app_ : Application&
|
||||||
}
|
}
|
||||||
|
|
||||||
TimeoutCounter o-- "1" Application
|
TimeoutCounter o-- "1" Application
|
||||||
': app_
|
': app_
|
||||||
|
|
||||||
Stoppable <.. Application
|
Stoppable <.. Application
|
||||||
@@ -14,13 +14,13 @@ class Application {
|
|||||||
-m_inboundLedgers : uptr<InboundLedgers>
|
-m_inboundLedgers : uptr<InboundLedgers>
|
||||||
}
|
}
|
||||||
|
|
||||||
Application *-- "1" LedgerReplayer
|
Application *-- "1" LedgerReplayer
|
||||||
': m_ledgerReplayer
|
': m_ledgerReplayer
|
||||||
Application *-- "1" InboundLedgers
|
Application *-- "1" InboundLedgers
|
||||||
': m_inboundLedgers
|
': m_inboundLedgers
|
||||||
|
|
||||||
Stoppable <.. InboundLedgers
|
Stoppable <.. InboundLedgers
|
||||||
Application "1" --o InboundLedgers
|
Application "1" --o InboundLedgers
|
||||||
': app_
|
': app_
|
||||||
|
|
||||||
class InboundLedgers {
|
class InboundLedgers {
|
||||||
@@ -28,9 +28,9 @@ class InboundLedgers {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Stoppable <.. LedgerReplayer
|
Stoppable <.. LedgerReplayer
|
||||||
InboundLedgers "1" --o LedgerReplayer
|
InboundLedgers "1" --o LedgerReplayer
|
||||||
': inboundLedgers_
|
': inboundLedgers_
|
||||||
Application "1" --o LedgerReplayer
|
Application "1" --o LedgerReplayer
|
||||||
': app_
|
': app_
|
||||||
|
|
||||||
class LedgerReplayer {
|
class LedgerReplayer {
|
||||||
@@ -42,17 +42,17 @@ class LedgerReplayer {
|
|||||||
-skipLists_ : hash_map<u256, wptr<SkipListAcquire>>
|
-skipLists_ : hash_map<u256, wptr<SkipListAcquire>>
|
||||||
}
|
}
|
||||||
|
|
||||||
LedgerReplayer *-- LedgerReplayTask
|
LedgerReplayer *-- LedgerReplayTask
|
||||||
': tasks_
|
': tasks_
|
||||||
LedgerReplayer o-- LedgerDeltaAcquire
|
LedgerReplayer o-- LedgerDeltaAcquire
|
||||||
': deltas_
|
': deltas_
|
||||||
LedgerReplayer o-- SkipListAcquire
|
LedgerReplayer o-- SkipListAcquire
|
||||||
': skipLists_
|
': skipLists_
|
||||||
|
|
||||||
TimeoutCounter <.. LedgerReplayTask
|
TimeoutCounter <.. LedgerReplayTask
|
||||||
InboundLedgers "1" --o LedgerReplayTask
|
InboundLedgers "1" --o LedgerReplayTask
|
||||||
': inboundLedgers_
|
': inboundLedgers_
|
||||||
LedgerReplayer "1" --o LedgerReplayTask
|
LedgerReplayer "1" --o LedgerReplayTask
|
||||||
': replayer_
|
': replayer_
|
||||||
|
|
||||||
class LedgerReplayTask {
|
class LedgerReplayTask {
|
||||||
@@ -63,15 +63,15 @@ class LedgerReplayTask {
|
|||||||
+addDelta(sptr<LedgerDeltaAcquire>)
|
+addDelta(sptr<LedgerDeltaAcquire>)
|
||||||
}
|
}
|
||||||
|
|
||||||
LedgerReplayTask *-- "1" SkipListAcquire
|
LedgerReplayTask *-- "1" SkipListAcquire
|
||||||
': skipListAcquirer_
|
': skipListAcquirer_
|
||||||
LedgerReplayTask *-- LedgerDeltaAcquire
|
LedgerReplayTask *-- LedgerDeltaAcquire
|
||||||
': deltas_
|
': deltas_
|
||||||
|
|
||||||
TimeoutCounter <.. SkipListAcquire
|
TimeoutCounter <.. SkipListAcquire
|
||||||
InboundLedgers "1" --o SkipListAcquire
|
InboundLedgers "1" --o SkipListAcquire
|
||||||
': inboundLedgers_
|
': inboundLedgers_
|
||||||
LedgerReplayer "1" --o SkipListAcquire
|
LedgerReplayer "1" --o SkipListAcquire
|
||||||
': replayer_
|
': replayer_
|
||||||
LedgerReplayTask --o SkipListAcquire : implicit via callback
|
LedgerReplayTask --o SkipListAcquire : implicit via callback
|
||||||
|
|
||||||
@@ -83,9 +83,9 @@ class SkipListAcquire {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TimeoutCounter <.. LedgerDeltaAcquire
|
TimeoutCounter <.. LedgerDeltaAcquire
|
||||||
InboundLedgers "1" --o LedgerDeltaAcquire
|
InboundLedgers "1" --o LedgerDeltaAcquire
|
||||||
': inboundLedgers_
|
': inboundLedgers_
|
||||||
LedgerReplayer "1" --o LedgerDeltaAcquire
|
LedgerReplayer "1" --o LedgerDeltaAcquire
|
||||||
': replayer_
|
': replayer_
|
||||||
LedgerReplayTask --o LedgerDeltaAcquire : implicit via callback
|
LedgerReplayTask --o LedgerDeltaAcquire : implicit via callback
|
||||||
|
|
||||||
@@ -95,4 +95,4 @@ class LedgerDeltaAcquire {
|
|||||||
-replayer_ : LedgerReplayer&
|
-replayer_ : LedgerReplayer&
|
||||||
-dataReadyCallbacks_ : vector<callback>
|
-dataReadyCallbacks_ : vector<callback>
|
||||||
}
|
}
|
||||||
@enduml
|
@enduml
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ deactivate lr
|
|||||||
loop
|
loop
|
||||||
lr -> lda : make_shared(ledgerId, ledgerSeq)
|
lr -> lda : make_shared(ledgerId, ledgerSeq)
|
||||||
return delta
|
return delta
|
||||||
lr -> lrt : addDelta(delta)
|
lr -> lrt : addDelta(delta)
|
||||||
lrt -> lda : addDataCallback(callback)
|
lrt -> lda : addDataCallback(callback)
|
||||||
return
|
return
|
||||||
return
|
return
|
||||||
@@ -62,7 +62,7 @@ deactivate peer
|
|||||||
lr -> lda : processData(ledgerHeader, txns)
|
lr -> lda : processData(ledgerHeader, txns)
|
||||||
lda -> lda : notify()
|
lda -> lda : notify()
|
||||||
note over lda: call the callbacks added by\naddDataCallback(callback).
|
note over lda: call the callbacks added by\naddDataCallback(callback).
|
||||||
lda -> lrt : callback(ledgerId)
|
lda -> lrt : callback(ledgerId)
|
||||||
lrt -> lrt : deltaReady(ledgerId)
|
lrt -> lrt : deltaReady(ledgerId)
|
||||||
lrt -> lrt : tryAdvance()
|
lrt -> lrt : tryAdvance()
|
||||||
loop as long as child can be built
|
loop as long as child can be built
|
||||||
@@ -82,4 +82,4 @@ deactivate peer
|
|||||||
deactivate peer
|
deactivate peer
|
||||||
|
|
||||||
|
|
||||||
@enduml
|
@enduml
|
||||||
|
|||||||
@@ -34,4 +34,4 @@ serializeBatch(
|
|||||||
msg.addBitString(txid);
|
msg.addBitString(txid);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace ripple
|
} // namespace ripple
|
||||||
|
|||||||
@@ -141,7 +141,7 @@ constexpr std::uint32_t const tfTransferable = 0x00000008;
|
|||||||
constexpr std::uint32_t const tfMutable = 0x00000010;
|
constexpr std::uint32_t const tfMutable = 0x00000010;
|
||||||
|
|
||||||
// MPTokenIssuanceCreate flags:
|
// MPTokenIssuanceCreate flags:
|
||||||
// NOTE - there is intentionally no flag here for lsfMPTLocked, which this transaction cannot mutate.
|
// NOTE - there is intentionally no flag here for lsfMPTLocked, which this transaction cannot mutate.
|
||||||
constexpr std::uint32_t const tfMPTCanLock = lsfMPTCanLock;
|
constexpr std::uint32_t const tfMPTCanLock = lsfMPTCanLock;
|
||||||
constexpr std::uint32_t const tfMPTRequireAuth = lsfMPTRequireAuth;
|
constexpr std::uint32_t const tfMPTRequireAuth = lsfMPTRequireAuth;
|
||||||
constexpr std::uint32_t const tfMPTCanEscrow = lsfMPTCanEscrow;
|
constexpr std::uint32_t const tfMPTCanEscrow = lsfMPTCanEscrow;
|
||||||
@@ -243,7 +243,7 @@ constexpr std::uint32_t tfUntilFailure = 0x00040000;
|
|||||||
constexpr std::uint32_t tfIndependent = 0x00080000;
|
constexpr std::uint32_t tfIndependent = 0x00080000;
|
||||||
/**
|
/**
|
||||||
* @note If nested Batch transactions are supported in the future, the tfInnerBatchTxn flag
|
* @note If nested Batch transactions are supported in the future, the tfInnerBatchTxn flag
|
||||||
* will need to be removed from this mask to allow Batch transaction to be inside
|
* will need to be removed from this mask to allow Batch transaction to be inside
|
||||||
* the sfRawTransactions array.
|
* the sfRawTransactions array.
|
||||||
*/
|
*/
|
||||||
constexpr std::uint32_t const tfBatchMask =
|
constexpr std::uint32_t const tfBatchMask =
|
||||||
|
|||||||
@@ -505,4 +505,3 @@ LEDGER_ENTRY(ltVAULT, 0x0084, Vault, vault, ({
|
|||||||
|
|
||||||
#undef EXPAND
|
#undef EXPAND
|
||||||
#undef LEDGER_ENTRY_DUPLICATE
|
#undef LEDGER_ENTRY_DUPLICATE
|
||||||
|
|
||||||
|
|||||||
@@ -145,4 +145,4 @@ Permission::permissionToTxType(uint32_t const& value) const
|
|||||||
return static_cast<TxType>(value - 1);
|
return static_cast<TxType>(value - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace ripple
|
} // namespace ripple
|
||||||
|
|||||||
@@ -1499,4 +1499,4 @@ class Delegate_test : public beast::unit_test::suite
|
|||||||
};
|
};
|
||||||
BEAST_DEFINE_TESTSUITE(Delegate, app, ripple);
|
BEAST_DEFINE_TESTSUITE(Delegate, app, ripple);
|
||||||
} // namespace test
|
} // namespace test
|
||||||
} // namespace ripple
|
} // namespace ripple
|
||||||
|
|||||||
@@ -621,4 +621,4 @@ public:
|
|||||||
|
|
||||||
BEAST_DEFINE_TESTSUITE_PRIO(NFTokenAuth, app, ripple, 2);
|
BEAST_DEFINE_TESTSUITE_PRIO(NFTokenAuth, app, ripple, 2);
|
||||||
|
|
||||||
} // namespace ripple
|
} // namespace ripple
|
||||||
|
|||||||
@@ -720,4 +720,4 @@ struct JumpCollector
|
|||||||
} // namespace test
|
} // namespace test
|
||||||
} // namespace ripple
|
} // namespace ripple
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
@@ -59,4 +59,4 @@ public:
|
|||||||
} // namespace delegate
|
} // namespace delegate
|
||||||
} // namespace jtx
|
} // namespace jtx
|
||||||
} // namespace test
|
} // namespace test
|
||||||
} // namespace ripple
|
} // namespace ripple
|
||||||
|
|||||||
@@ -64,4 +64,4 @@ entry(jtx::Env& env, jtx::Account const& account, jtx::Account const& authorize)
|
|||||||
} // namespace delegate
|
} // namespace delegate
|
||||||
} // namespace jtx
|
} // namespace jtx
|
||||||
} // namespace test
|
} // namespace test
|
||||||
} // namespace ripple
|
} // namespace ripple
|
||||||
|
|||||||
@@ -159,4 +159,4 @@ DelegateSet::deleteDelegate(
|
|||||||
return tesSUCCESS;
|
return tesSUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace ripple
|
} // namespace ripple
|
||||||
|
|||||||
@@ -53,4 +53,4 @@ public:
|
|||||||
|
|
||||||
} // namespace ripple
|
} // namespace ripple
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
Reference in New Issue
Block a user