Compare commits

..

1 Commits

Author SHA1 Message Date
Valentin Balaschenko
7ce692db58 initial hash removal 2025-05-16 15:48:32 +01:00
579 changed files with 15146 additions and 37291 deletions

View File

@@ -94,4 +94,3 @@ SpacesInSquareBrackets: false
Standard: Cpp11
TabWidth: 8
UseTab: Never
QualifierAlignment: Right

View File

@@ -7,13 +7,13 @@ comment:
show_carryforward_flags: false
coverage:
range: "70..85"
range: "60..80"
precision: 1
round: nearest
status:
project:
default:
target: 75%
target: 60%
threshold: 2%
patch:
default:

8
.github/CODEOWNERS vendored
View File

@@ -1,8 +0,0 @@
# Allow anyone to review any change by default.
*
# Require the rpc-reviewers team to review changes to the rpc code.
include/xrpl/protocol/ @xrplf/rpc-reviewers
src/libxrpl/protocol/ @xrplf/rpc-reviewers
src/xrpld/rpc/ @xrplf/rpc-reviewers
src/xrpld/app/misc/ @xrplf/rpc-reviewers

View File

@@ -6,29 +6,36 @@ inputs:
runs:
using: composite
steps:
- name: unlock Conan
shell: bash
run: conan remove --locks
- name: export custom recipes
shell: bash
run: |
conan export --version 1.1.10 external/snappy
conan export --version 4.0.3 external/soci
conan config set general.revisions_enabled=1
conan export external/snappy snappy/1.1.10@
conan export external/rocksdb rocksdb/9.7.3@
conan export external/soci soci/4.0.3@
conan export external/nudb nudb/2.0.8@
- name: add Ripple Conan remote
if: env.CONAN_URL != ''
shell: bash
run: |
if conan remote list | grep -q "ripple"; then
conan remote remove ripple
echo "Removed conan remote ripple"
fi
conan remote add --index 0 ripple "${CONAN_URL}"
echo "Added conan remote ripple at ${CONAN_URL}"
conan remote list
conan remote remove ripple || true
# Do not quote the URL. An empty string will be accepted (with
# a non-fatal warning), but a missing argument will not.
conan remote add ripple ${{ env.CONAN_URL }} --insert 0
- name: try to authenticate to Ripple Conan remote
if: env.CONAN_LOGIN_USERNAME_RIPPLE != '' && env.CONAN_PASSWORD_RIPPLE != ''
id: remote
shell: bash
run: |
echo "Authenticating to ripple remote..."
conan remote auth ripple --force
conan remote list-users
# `conan user` implicitly uses the environment variables
# CONAN_LOGIN_USERNAME_<REMOTE> and CONAN_PASSWORD_<REMOTE>.
# https://docs.conan.io/1/reference/commands/misc/user.html#using-environment-variables
# https://docs.conan.io/1/reference/env_vars.html#conan-login-username-conan-login-username-remote-name
# https://docs.conan.io/1/reference/env_vars.html#conan-password-conan-password-remote-name
echo outcome=$(conan user --remote ripple --password >&2 \
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
- name: list missing binaries
id: binaries
shell: bash
@@ -43,8 +50,8 @@ runs:
cd ${build_dir}
conan install \
--output-folder . \
--build '*' \
--options:host "&:tests=True" \
--options:host "&:xrpld=True" \
--settings:all build_type=${{ inputs.configuration }} \
--build missing \
--options tests=True \
--options xrpld=True \
--settings build_type=${{ inputs.configuration }} \
..

View File

@@ -9,25 +9,24 @@ jobs:
check:
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
runs-on: ubuntu-24.04
container: ghcr.io/xrplf/ci/tools-rippled-clang-format
env:
CLANG_VERSION: 18
steps:
# For jobs running in containers, $GITHUB_WORKSPACE and ${{ github.workspace }} might not be the
# same directory. The actions/checkout step is *supposed* to checkout into $GITHUB_WORKSPACE and
# then add it to safe.directory (see instructions at https://github.com/actions/checkout)
# but that's apparently not happening for some container images. We can't be sure what is actually
# happening, so let's pre-emptively add both directories to safe.directory. There's a
# Github issue opened in 2022 and not resolved in 2025 https://github.com/actions/runner/issues/2058 ¯\_(ツ)_/¯
- run: |
git config --global --add safe.directory $GITHUB_WORKSPACE
git config --global --add safe.directory ${{ github.workspace }}
- uses: actions/checkout@v4
- name: Format first-party sources
- name: Install clang-format
run: |
clang-format --version
find include src tests -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format -i {} +
codename=$( lsb_release --codename --short )
sudo tee /etc/apt/sources.list.d/llvm.list >/dev/null <<EOF
deb http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
deb-src http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
EOF
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add
sudo apt-get update
sudo apt-get install clang-format-${CLANG_VERSION}
- name: Format first-party sources
run: find include src tests -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format-${CLANG_VERSION} -i {} +
- name: Check for differences
id: assert
shell: bash
run: |
set -o pipefail
git diff --exit-code | tee "clang-format.patch"
@@ -59,6 +58,6 @@ jobs:
in your repo, commit, and push.
run: |
echo "${PREAMBLE}"
clang-format --version
clang-format-${CLANG_VERSION} --version
echo "${SUGGESTION}"
exit 1

View File

@@ -10,7 +10,7 @@ concurrency:
cancel-in-progress: true
jobs:
documentation:
job:
runs-on: ubuntu-latest
permissions:
contents: write

View File

@@ -1,6 +1,6 @@
name: Check libXRPL compatibility with Clio
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
on:

View File

@@ -15,18 +15,6 @@ on:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
# This part of Conan configuration is specific to this workflow only; we do not want
# to pollute conan/profiles directory with settings which might not work for others
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
CONAN_GLOBAL_CONF: |
core.download:parallel={{os.cpu_count()}}
core.upload:parallel={{os.cpu_count()}}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
jobs:
@@ -40,22 +28,23 @@ jobs:
- Ninja
configuration:
- Release
runs-on: [self-hosted, macOS, mac-runner-m1]
runs-on: [self-hosted, macOS]
env:
# The `build` action requires these variables.
build_dir: .build
NUM_PROCESSORS: 12
steps:
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
uses: actions/checkout@v4
- name: install Conan
run: |
brew install conan
brew install conan@1
echo '/opt/homebrew/opt/conan@1/bin' >> $GITHUB_PATH
- name: install Ninja
if: matrix.generator == 'Ninja'
run: brew install ninja
- name: install python
run: |
run: |
if which python > /dev/null 2>&1; then
echo "Python executable exists"
else
@@ -82,31 +71,16 @@ jobs:
nproc --version
echo -n "nproc returns: "
nproc
system_profiler SPHardwareDataType
sysctl -n hw.logicalcpu
clang --version
- name: configure Conan
run : |
echo "${CONAN_GLOBAL_CONF}" >> $(conan config home)/global.conf
conan config install conan/profiles/ -tf $(conan config home)/profiles/
conan profile show
- name: export custom recipes
shell: bash
run: |
conan export --version 1.1.10 external/snappy
conan export --version 4.0.3 external/soci
- name: add Ripple Conan remote
if: env.CONAN_URL != ''
shell: bash
run: |
if conan remote list | grep -q "ripple"; then
conan remote remove ripple
echo "Removed conan remote ripple"
fi
conan remote add --index 0 ripple "${CONAN_URL}"
echo "Added conan remote ripple at ${CONAN_URL}"
conan profile new default --detect || true
conan profile update settings.compiler.cppstd=20 default
- name: build dependencies
uses: ./.github/actions/dependencies
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
with:
configuration: ${{ matrix.configuration }}
- name: build
@@ -115,11 +89,9 @@ jobs:
generator: ${{ matrix.generator }}
configuration: ${{ matrix.configuration }}
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
- name: test
run: |
n=$(nproc)
echo "Using $n test jobs"
cd ${build_dir}
./rippled --unittest --unittest-jobs $n
ctest -j $n --output-on-failure
# TODO: Temporary disabled tests
# - name: test
# run: |
# n=$(nproc)
# echo "Using $n test jobs"
# ${build_dir}/rippled --unittest --unittest-jobs $n

View File

@@ -16,19 +16,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
# This part of Conan configuration is specific to this workflow only; we do not want
# to pollute conan/profiles directory with settings which might not work for others
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
CONAN_GLOBAL_CONF: |
core.download:parallel={{ os.cpu_count() }}
core.upload:parallel={{ os.cpu_count() }}
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
# This workflow has multiple job matrixes.
# They can be considered phases because most of the matrices ("test",
# "coverage", "conan", ) depend on the first ("dependencies").
@@ -67,45 +54,59 @@ jobs:
- Release
include:
- compiler: gcc
compiler_version: 12
distro: ubuntu
codename: jammy
profile:
version: 11
cc: /usr/bin/gcc
cxx: /usr/bin/g++
- compiler: clang
compiler_version: 16
distro: debian
codename: bookworm
profile:
version: 14
cc: /usr/bin/clang-14
cxx: /usr/bin/clang++-14
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/ci/${{ matrix.distro }}-${{ matrix.codename }}:${{ matrix.compiler }}-${{ matrix.compiler_version }}
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
env:
build_dir: .build
steps:
- name: upgrade conan
run: |
pip install --upgrade "conan<2"
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
uses: actions/checkout@v4
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
lsb_release -a || true
${{ matrix.compiler }}-${{ matrix.compiler_version }} --version
${{ matrix.profile.cc }} --version
conan --version
cmake --version
env | sort
- name: configure Conan
run: |
echo "${CONAN_GLOBAL_CONF}" >> $(conan config home)/global.conf
conan config install conan/profiles/ -tf $(conan config home)/profiles/
conan profile show
conan profile new default --detect
conan profile update settings.compiler.cppstd=20 default
conan profile update settings.compiler=${{ matrix.compiler }} default
conan profile update settings.compiler.version=${{ matrix.profile.version }} default
conan profile update settings.compiler.libcxx=libstdc++11 default
conan profile update env.CC=${{ matrix.profile.cc }} default
conan profile update env.CXX=${{ matrix.profile.cxx }} default
conan profile update conf.tools.build:compiler_executables='{"c": "${{ matrix.profile.cc }}", "cpp": "${{ matrix.profile.cxx }}"}' default
- name: archive profile
# Create this archive before dependencies are added to the local cache.
run: tar -czf conan.tar.gz -C ${CONAN_HOME} .
run: tar -czf conan.tar -C ~/.conan .
- name: build dependencies
uses: ./.github/actions/dependencies
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
with:
configuration: ${{ matrix.configuration }}
- name: upload archive
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
path: conan.tar.gz
path: conan.tar
if-no-files-found: error
test:
@@ -120,32 +121,26 @@ jobs:
configuration:
- Debug
- Release
include:
- compiler: gcc
compiler_version: 12
distro: ubuntu
codename: jammy
- compiler: clang
compiler_version: 16
distro: debian
codename: bookworm
cmake-args:
-
- "-Dunity=ON"
needs: dependencies
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/ci/${{ matrix.distro }}-${{ matrix.codename }}:${{ matrix.compiler }}-${{ matrix.compiler_version }}
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
env:
build_dir: .build
steps:
- name: upgrade conan
run: |
pip install --upgrade "conan<2"
- name: download cache
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
uses: actions/download-artifact@v4
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
- name: extract cache
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
mkdir -p ~/.conan
tar -xzf conan.tar -C ~/.conan
- name: check environment
run: |
env | sort
@@ -153,9 +148,11 @@ jobs:
conan --version
cmake --version
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
uses: actions/checkout@v4
- name: dependencies
uses: ./.github/actions/dependencies
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
with:
configuration: ${{ matrix.configuration }}
- name: build
@@ -164,21 +161,9 @@ jobs:
generator: Ninja
configuration: ${{ matrix.configuration }}
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
- name: check linking
run: |
cd ${build_dir}
ldd ./rippled
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
echo 'The binary is statically linked.'
else
echo 'The binary is dynamically linked.'
exit 1
fi
- name: test
run: |
cd ${build_dir}
./rippled --unittest --unittest-jobs $(nproc)
ctest -j $(nproc) --output-on-failure
${build_dir}/rippled --unittest --unittest-jobs $(nproc)
reference-fee-test:
strategy:
@@ -195,18 +180,21 @@ jobs:
- "-DUNIT_TEST_REFERENCE_FEE=1000"
needs: dependencies
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/ci/ubuntu-jammy:gcc-12
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
env:
build_dir: .build
steps:
- name: upgrade conan
run: |
pip install --upgrade "conan<2"
- name: download cache
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
uses: actions/download-artifact@v4
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
- name: extract cache
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
mkdir -p ~/.conan
tar -xzf conan.tar -C ~/.conan
- name: check environment
run: |
env | sort
@@ -214,9 +202,11 @@ jobs:
conan --version
cmake --version
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
uses: actions/checkout@v4
- name: dependencies
uses: ./.github/actions/dependencies
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
with:
configuration: ${{ matrix.configuration }}
- name: build
@@ -227,9 +217,7 @@ jobs:
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
- name: test
run: |
cd ${build_dir}
./rippled --unittest --unittest-jobs $(nproc)
ctest -j $(nproc) --output-on-failure
${build_dir}/rippled --unittest --unittest-jobs $(nproc)
coverage:
strategy:
@@ -243,18 +231,23 @@ jobs:
- Debug
needs: dependencies
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/ci/ubuntu-jammy:gcc-12
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
env:
build_dir: .build
steps:
- name: upgrade conan
run: |
pip install --upgrade "conan<2"
- name: download cache
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
uses: actions/download-artifact@v4
with:
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
- name: extract cache
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
mkdir -p ~/.conan
tar -xzf conan.tar -C ~/.conan
- name: install gcovr
run: pip install "gcovr>=7,<8"
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
@@ -262,11 +255,13 @@ jobs:
cmake --version
gcovr --version
env | sort
ls ${CONAN_HOME}
ls ~/.conan
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
uses: actions/checkout@v4
- name: dependencies
uses: ./.github/actions/dependencies
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
with:
configuration: ${{ matrix.configuration }}
- name: build
@@ -288,7 +283,7 @@ jobs:
run: |
mv "${build_dir}/coverage.xml" ./
- name: archive coverage report
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
uses: actions/upload-artifact@v4
with:
name: coverage.xml
path: coverage.xml
@@ -310,23 +305,22 @@ jobs:
conan:
needs: dependencies
runs-on: [self-hosted, heavy]
container:
image: ghcr.io/xrplf/ci/ubuntu-jammy:gcc-12
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
env:
build_dir: .build
platform: linux
compiler: gcc
compiler_version: 12
configuration: Release
steps:
- name: upgrade conan
run: |
pip install --upgrade "conan<2"
- name: download cache
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
uses: actions/download-artifact@v4
with:
name: ${{ env.platform }}-${{ env.compiler }}-${{ env.configuration }}
name: linux-gcc-${{ env.configuration }}
- name: extract cache
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
mkdir -p ~/.conan
tar -xzf conan.tar -C ~/.conan
- name: check environment
run: |
env | sort
@@ -334,66 +328,95 @@ jobs:
conan --version
cmake --version
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
uses: actions/checkout@v4
- name: dependencies
uses: ./.github/actions/dependencies
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
with:
configuration: ${{ env.configuration }}
- name: export
run: |
conan export . --version head
version=$(conan inspect --raw version .)
reference="xrpl/${version}@local/test"
conan remove -f ${reference} || true
conan export . local/test
echo "reference=${reference}" >> "${GITHUB_ENV}"
- name: build
run: |
cd tests/conan
mkdir ${build_dir} && cd ${build_dir}
conan install .. \
--settings:all build_type=${configuration} \
--output-folder . \
--build missing
mkdir ${build_dir}
cd ${build_dir}
conan install .. --output-folder . \
--require-override ${reference} --build missing
cmake .. \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=./build/${configuration}/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE=${configuration}
cmake --build .
./example | grep '^[[:digit:]]\+\.[[:digit:]]\+\.[[:digit:]]\+'
# NOTE we are not using dependencies built above because it lags with
# compiler versions. Instrumentation requires clang version 16 or
# later
instrumentation-build:
needs: dependencies
runs-on: [self-hosted, heavy]
container: ghcr.io/xrplf/ci/debian-bookworm:clang-16
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
env:
build_dir: .build
CLANG_RELEASE: 16
strategy:
fail-fast: false
runs-on: [self-hosted, heavy]
container: debian:bookworm
steps:
- name: download cache
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
name: linux-clang-Debug
- name: extract cache
- name: install prerequisites
env:
DEBIAN_FRONTEND: noninteractive
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
conan --version
cmake --version
env | sort
ls ${CONAN_HOME}
apt-get update
apt-get install --yes --no-install-recommends \
clang-${CLANG_RELEASE} clang++-${CLANG_RELEASE} \
python3-pip python-is-python3 make cmake git wget
apt-get clean
update-alternatives --install \
/usr/bin/clang clang /usr/bin/clang-${CLANG_RELEASE} 100 \
--slave /usr/bin/clang++ clang++ /usr/bin/clang++-${CLANG_RELEASE}
update-alternatives --auto clang
pip install --no-cache --break-system-packages "conan<2"
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: dependencies
uses: ./.github/actions/dependencies
with:
configuration: Debug
uses: actions/checkout@v4
- name: prepare environment
run: |
mkdir -p ${build_dir}
echo "SOURCE_DIR=$(pwd)" >> $GITHUB_ENV
echo "BUILD_DIR=$(pwd)/${build_dir}" >> $GITHUB_ENV
mkdir ${GITHUB_WORKSPACE}/.build
echo "SOURCE_DIR=$GITHUB_WORKSPACE" >> $GITHUB_ENV
echo "BUILD_DIR=$GITHUB_WORKSPACE/.build" >> $GITHUB_ENV
echo "CC=/usr/bin/clang" >> $GITHUB_ENV
echo "CXX=/usr/bin/clang++" >> $GITHUB_ENV
- name: configure Conan
run: |
conan profile new --detect default
conan profile update settings.compiler=clang default
conan profile update settings.compiler.version=${CLANG_RELEASE} default
conan profile update settings.compiler.libcxx=libstdc++11 default
conan profile update settings.compiler.cppstd=20 default
conan profile update options.rocksdb=False default
conan profile update \
'conf.tools.build:compiler_executables={"c": "/usr/bin/clang", "cpp": "/usr/bin/clang++"}' default
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS"' default
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
conan export external/snappy snappy/1.1.10@
conan export external/soci soci/4.0.3@
- name: build dependencies
run: |
cd ${BUILD_DIR}
conan install ${SOURCE_DIR} \
--output-folder ${BUILD_DIR} \
--install-folder ${BUILD_DIR} \
--build missing \
--settings build_type=Debug
- name: build with instrumentation
run: |
@@ -418,4 +441,3 @@ jobs:
run: |
cd ${BUILD_DIR}
./rippled -u --unittest-jobs $(( $(nproc)/4 ))
ctest -j $(nproc) --output-on-failure

View File

@@ -18,18 +18,6 @@ on:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
# This part of Conan configuration is specific to this workflow only; we do not want
# to pollute conan/profiles directory with settings which might not work for others
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
CONAN_GLOBAL_CONF: |
core.download:parallel={{os.cpu_count()}}
core.upload:parallel={{os.cpu_count()}}
tools.build:jobs=24
tools.build:verbosity=verbose
tools.compilation:verbosity=verbose
jobs:
@@ -54,11 +42,11 @@ jobs:
build_dir: .build
steps:
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
uses: actions/checkout@v4
- name: choose Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
uses: actions/setup-python@v5
with:
python-version: 3.13
python-version: 3.9
- name: learn Python cache directory
id: pip-cache
shell: bash
@@ -66,12 +54,12 @@ jobs:
python -m pip install --upgrade pip
echo "dir=$(pip cache dir)" | tee ${GITHUB_OUTPUT}
- name: restore Python cache directory
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684
uses: actions/cache@v4
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-${{ hashFiles('.github/workflows/windows.yml') }}
- name: install Conan
run: pip install wheel conan
run: pip install wheel 'conan<2'
- name: check environment
run: |
dir env:
@@ -82,26 +70,17 @@ jobs:
- name: configure Conan
shell: bash
run: |
echo "${CONAN_GLOBAL_CONF}" >> $(conan config home)/global.conf
conan config install conan/profiles/ -tf $(conan config home)/profiles/
conan profile show
- name: export custom recipes
shell: bash
run: |
conan export --version 1.1.10 external/snappy
conan export --version 4.0.3 external/soci
- name: add Ripple Conan remote
if: env.CONAN_URL != ''
shell: bash
run: |
if conan remote list | grep -q "ripple"; then
conan remote remove ripple
echo "Removed conan remote ripple"
fi
conan remote add --index 0 ripple "${CONAN_URL}"
echo "Added conan remote ripple at ${CONAN_URL}"
conan profile new default --detect
conan profile update settings.compiler.cppstd=20 default
conan profile update \
settings.compiler.runtime=MT${{ matrix.configuration.runtime }} \
default
- name: build dependencies
uses: ./.github/actions/dependencies
env:
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
with:
configuration: ${{ matrix.configuration.type }}
- name: build
@@ -116,6 +95,5 @@ jobs:
shell: bash
if: ${{ matrix.configuration.tests }}
run: |
cd ${build_dir}/${{ matrix.configuration.type }}
./rippled --unittest --unittest-jobs $(nproc)
ctest -j $(nproc) --output-on-failure
${build_dir}/${{ matrix.configuration.type }}/rippled --unittest \
--unittest-jobs $(nproc)

View File

@@ -1,6 +1,6 @@
# .pre-commit-config.yaml
repos:
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: v18.1.8
rev: v18.1.3
hooks:
- id: clang-format

View File

@@ -167,18 +167,43 @@ It does not explicitly link the C++ standard library,
which allows you to statically link it with GCC, if you want.
```
# Conan 1.x
conan export external/snappy snappy/1.1.10@
# Conan 2.x
conan export --version 1.1.10 external/snappy
```
Export our [Conan recipe for RocksDB](./external/rocksdb).
It does not override paths to dependencies when building with Visual Studio.
```
# Conan 1.x
conan export external/rocksdb rocksdb/9.7.3@
# Conan 2.x
conan export --version 9.7.3 external/rocksdb
```
Export our [Conan recipe for SOCI](./external/soci).
It patches their CMake to correctly import its dependencies.
```
# Conan 1.x
conan export external/soci soci/4.0.3@
# Conan 2.x
conan export --version 4.0.3 external/soci
```
Export our [Conan recipe for NuDB](./external/nudb).
It fixes some source files to add missing `#include`s.
```
# Conan 1.x
conan export external/nudb nudb/2.0.8@
# Conan 2.x
conan export --version 2.0.8 external/nudb
```
### Build and Test
1. Create a build directory and move into it.
@@ -263,7 +288,7 @@ It patches their CMake to correctly import its dependencies.
Single-config generators:
```
cmake --build . -j $(nproc)
cmake --build .
```
Multi-config generators:
@@ -370,13 +395,18 @@ and can be helpful for detecting `#include` omissions.
## Troubleshooting
### Conan
After any updates or changes to dependencies, you may need to do the following:
1. Remove your build directory.
2. Remove the Conan cache: `conan remove "*" -c`
3. Re-run [conan install](#build-and-test).
2. Remove the Conan cache:
```
rm -rf ~/.conan/data
```
4. Re-run [conan install](#build-and-test).
### 'protobuf/port_def.inc' file not found
@@ -394,6 +424,54 @@ For example, if you want to build Debug:
1. For conan install, pass `--settings build_type=Debug`
2. For cmake, pass `-DCMAKE_BUILD_TYPE=Debug`
### no std::result_of
If your compiler version is recent enough to have removed `std::result_of` as
part of C++20, e.g. Apple Clang 15.0, then you might need to add a preprocessor
definition to your build.
```
conan profile update 'options.boost:extra_b2_flags="define=BOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
conan profile update 'env.CFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
conan profile update 'conf.tools.build:cflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
```
### call to 'async_teardown' is ambiguous
If you are compiling with an early version of Clang 16, then you might hit
a [regression][6] when compiling C++20 that manifests as an [error in a Boost
header][7]. You can workaround it by adding this preprocessor definition:
```
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS"' default
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
```
### recompile with -fPIC
If you get a linker error suggesting that you recompile Boost with
position-independent code, such as:
```
/usr/bin/ld.gold: error: /home/username/.conan/data/boost/1.77.0/_/_/package/.../lib/libboost_container.a(alloc_lib.o):
requires unsupported dynamic reloc 11; recompile with -fPIC
```
Conan most likely downloaded a bad binary distribution of the dependency.
This seems to be a [bug][1] in Conan just for Boost 1.77.0 compiled with GCC
for Linux. The solution is to build the dependency locally by passing
`--build boost` when calling `conan install`.
```
conan install --build boost ...
```
## Add a Dependency
If you want to experiment with a new package, follow these steps:

View File

@@ -132,7 +132,6 @@ test.shamap > xrpl.protocol
test.toplevel > test.csf
test.toplevel > xrpl.json
test.unit_test > xrpl.basics
tests.libxrpl > xrpl.basics
xrpl.json > xrpl.basics
xrpl.protocol > xrpl.basics
xrpl.protocol > xrpl.json

View File

@@ -90,11 +90,6 @@ set_target_properties(OpenSSL::SSL PROPERTIES
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
)
set(SECP256K1_INSTALL TRUE)
set(SECP256K1_BUILD_BENCHMARK FALSE)
set(SECP256K1_BUILD_TESTS FALSE)
set(SECP256K1_BUILD_EXHAUSTIVE_TESTS FALSE)
set(SECP256K1_BUILD_CTIME_TESTS FALSE)
set(SECP256K1_BUILD_EXAMPLES FALSE)
add_subdirectory(external/secp256k1)
add_library(secp256k1::secp256k1 ALIAS secp256k1)
add_subdirectory(external/ed25519-donna)
@@ -149,8 +144,3 @@ set(PROJECT_EXPORT_SET RippleExports)
include(RippledCore)
include(RippledInstall)
include(RippledValidatorKeys)
if(tests)
include(CTest)
add_subdirectory(src/tests/libxrpl)
endif()

View File

@@ -1,5 +1,3 @@
[![codecov](https://codecov.io/gh/XRPLF/rippled/graph/badge.svg?token=WyFr5ajq3O)](https://codecov.io/gh/XRPLF/rippled)
# The XRP Ledger
The [XRP Ledger](https://xrpl.org/) is a decentralized cryptographic ledger powered by a network of peer-to-peer nodes. The XRP Ledger uses a novel Byzantine Fault Tolerant consensus algorithm to settle and record transactions in a secure distributed database without a central operator.

4817
RELEASENOTES.md Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -83,7 +83,7 @@ To report a qualifying bug, please send a detailed report to:
|Long Key ID | `0xCD49A0AFC57929BE` |
|Fingerprint | `24E6 3B02 37E0 FA9C 5E96 8974 CD49 A0AF C579 29BE` |
The full PGP key for this address, which is also available on several key servers (e.g. on [keyserver.ubuntu.com](https://keyserver.ubuntu.com)), is:
The full PGP key for this address, which is also available on several key servers (e.g. on [keys.gnupg.net](https://keys.gnupg.net)), is:
```
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQINBFUwGHYBEAC0wpGpBPkd8W1UdQjg9+cEFzeIEJRaoZoeuJD8mofwI5Ejnjdt

View File

@@ -26,7 +26,7 @@
#
# Examples:
# https://vl.ripple.com
# https://unl.xrplf.org
# https://vl.xrplf.org
# http://127.0.0.1:8000
# file:///etc/opt/ripple/vl.txt
#

View File

@@ -98,9 +98,6 @@
# 2024-04-03, Bronek Kozicki
# - add support for output formats: jacoco, clover, lcov
#
# 2025-05-12, Jingchen Wu
# - add -fprofile-update=atomic to ensure atomic profile generation
#
# USAGE:
#
# 1. Copy this file into your cmake modules path.
@@ -203,27 +200,15 @@ set(COVERAGE_COMPILER_FLAGS "-g --coverage"
CACHE INTERNAL "")
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
include(CheckCXXCompilerFlag)
include(CheckCCompilerFlag)
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
if(HAVE_cxx_fprofile_abs_path)
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
endif()
include(CheckCCompilerFlag)
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
if(HAVE_c_fprofile_abs_path)
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
endif()
check_cxx_compiler_flag(-fprofile-update HAVE_cxx_fprofile_update)
if(HAVE_cxx_fprofile_update)
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
endif()
check_c_compiler_flag(-fprofile-update HAVE_c_fprofile_update)
if(HAVE_c_fprofile_update)
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
endif()
endif()
set(CMAKE_Fortran_FLAGS_COVERAGE

View File

@@ -90,15 +90,28 @@ if (MSVC)
-errorreport:none
-machine:X64)
else ()
# HACK : because these need to come first, before any warning demotion
string (APPEND CMAKE_CXX_FLAGS " -Wall -Wdeprecated")
if (wextra)
string (APPEND CMAKE_CXX_FLAGS " -Wextra -Wno-unused-parameter")
endif ()
# not MSVC
target_compile_options (common
INTERFACE
-Wall
-Wdeprecated
$<$<BOOL:${wextra}>:-Wextra -Wno-unused-parameter>
$<$<BOOL:${werr}>:-Werror>
-fstack-protector
$<$<COMPILE_LANGUAGE:CXX>:
-frtti
-Wnon-virtual-dtor
>
-Wno-sign-compare
-Wno-unused-but-set-variable
-Wno-char-subscripts
-Wno-format
-Wno-unused-local-typedefs
-fstack-protector
$<$<BOOL:${is_gcc}>:
-Wno-unused-but-set-variable
-Wno-deprecated
>
$<$<NOT:$<CONFIG:Debug>>:-fno-strict-aliasing>
# tweak gcc optimization for debug
$<$<AND:$<BOOL:${is_gcc}>,$<CONFIG:Debug>>:-O0>

View File

@@ -53,9 +53,9 @@ set(download_script "${CMAKE_BINARY_DIR}/docs/download-cppreference.cmake")
file(WRITE
"${download_script}"
"file(DOWNLOAD \
https://github.com/PeterFeicht/cppreference-doc/releases/download/v20250209/html-book-20250209.zip \
http://upload.cppreference.com/mwiki/images/b/b2/html_book_20190607.zip \
${CMAKE_BINARY_DIR}/docs/cppreference.zip \
EXPECTED_HASH MD5=bda585f72fbca4b817b29a3d5746567b \
EXPECTED_HASH MD5=82b3a612d7d35a83e3cb1195a63689ab \
)\n \
execute_process( \
COMMAND \"${CMAKE_COMMAND}\" -E tar -xf cppreference.zip \

View File

@@ -2,6 +2,16 @@
convenience variables and sanity checks
#]===================================================================]
include(ProcessorCount)
if (NOT ep_procs)
ProcessorCount(ep_procs)
if (ep_procs GREATER 1)
# never use more than half of cores for EP builds
math (EXPR ep_procs "${ep_procs} / 2")
message (STATUS "Using ${ep_procs} cores for ExternalProject builds.")
endif ()
endif ()
get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
set (CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE)

View File

@@ -18,7 +18,7 @@ if(tests)
endif()
endif()
option(unity "Creates a build using UNITY support in cmake." OFF)
option(unity "Creates a build using UNITY support in cmake. This is the default" ON)
if(unity)
if(NOT is_ci)
set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")

View File

@@ -2,6 +2,7 @@ find_package(Boost 1.82 REQUIRED
COMPONENTS
chrono
container
context
coroutine
date_time
filesystem
@@ -23,7 +24,7 @@ endif()
target_link_libraries(ripple_boost
INTERFACE
Boost::headers
Boost::boost
Boost::chrono
Boost::container
Boost::coroutine

View File

@@ -1,41 +0,0 @@
include(isolate_headers)
function(xrpl_add_test name)
set(target ${PROJECT_NAME}.test.${name})
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp"
"${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp"
)
add_executable(${target} EXCLUDE_FROM_ALL ${ARGN} ${sources})
isolate_headers(
${target}
"${CMAKE_SOURCE_DIR}"
"${CMAKE_SOURCE_DIR}/tests/${name}"
PRIVATE
)
# Make sure the test isn't optimized away in unity builds
set_target_properties(${target} PROPERTIES
UNITY_BUILD_MODE GROUP
UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
add_test(NAME ${target} COMMAND ${target})
set_tests_properties(
${target} PROPERTIES
FIXTURES_REQUIRED ${target}_fixture
)
add_test(
NAME ${target}.build
COMMAND
${CMAKE_COMMAND}
--build ${CMAKE_BINARY_DIR}
--config $<CONFIG>
--target ${target}
)
set_tests_properties(${target}.build PROPERTIES
FIXTURES_SETUP ${target}_fixture
)
endfunction()

View File

@@ -1,34 +0,0 @@
{% set os = detect_api.detect_os() %}
{% set arch = detect_api.detect_arch() %}
{% set compiler, version, compiler_exe = detect_api.detect_default_compiler() %}
{% set compiler_version = version %}
{% if os == "Linux" %}
{% set compiler_version = detect_api.default_compiler_version(compiler, version) %}
{% endif %}
[settings]
os={{ os }}
arch={{ arch }}
build_type=Debug
compiler={{compiler}}
compiler.version={{ compiler_version }}
compiler.cppstd=20
{% if os == "Windows" %}
compiler.runtime=static
{% else %}
compiler.libcxx={{detect_api.detect_libcxx(compiler, version, compiler_exe)}}
{% endif %}
[conf]
{% if compiler == "clang" and compiler_version >= 19 %}
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
{% endif %}
{% if compiler == "apple-clang" and compiler_version >= 17 %}
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
{% endif %}
{% if compiler == "gcc" and compiler_version < 13 %}
tools.build:cxxflags=['-Wno-restrict']
{% endif %}
[tool_requires]
!cmake/*: cmake/[>=3 <4]

View File

@@ -1,4 +1,4 @@
from conan import ConanFile, __version__ as conan_version
from conan import ConanFile
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
import re
@@ -24,20 +24,18 @@ class Xrpl(ConanFile):
}
requires = [
'date/3.0.3',
'grpc/1.50.1',
'libarchive/3.8.1',
'nudb/2.0.9',
'openssl/1.1.1w',
'libarchive/3.7.6',
'nudb/2.0.8',
'openssl/1.1.1v',
'soci/4.0.3',
'xxhash/0.8.2',
'zlib/1.3.1',
]
test_requires = [
'doctest/2.4.11',
]
tool_requires = [
'protobuf/3.21.12',
'protobuf/3.21.9',
]
default_options = {
@@ -89,31 +87,26 @@ class Xrpl(ConanFile):
}
def set_version(self):
if self.version is None:
path = f'{self.recipe_folder}/src/libxrpl/protocol/BuildInfo.cpp'
regex = r'versionString\s?=\s?\"(.*)\"'
with open(path, encoding='utf-8') as file:
matches = (re.search(regex, line) for line in file)
match = next(m for m in matches if m)
self.version = match.group(1)
path = f'{self.recipe_folder}/src/libxrpl/protocol/BuildInfo.cpp'
regex = r'versionString\s?=\s?\"(.*)\"'
with open(path, 'r') as file:
matches = (re.search(regex, line) for line in file)
match = next(m for m in matches if m)
self.version = match.group(1)
def configure(self):
if self.settings.compiler == 'apple-clang':
self.options['boost'].visibility = 'global'
def requirements(self):
# Conan 2 requires transitive headers to be specified
transitive_headers_opt = {'transitive_headers': True} if conan_version.split('.')[0] == '2' else {}
self.requires('boost/1.86.0', force=True, **transitive_headers_opt)
self.requires('date/3.0.4', **transitive_headers_opt)
self.requires('boost/1.83.0', force=True)
self.requires('lz4/1.10.0', force=True)
self.requires('protobuf/3.21.12', force=True)
self.requires('sqlite3/3.49.1', force=True)
self.requires('protobuf/3.21.9', force=True)
self.requires('sqlite3/3.47.0', force=True)
if self.options.jemalloc:
self.requires('jemalloc/5.3.0')
if self.options.rocksdb:
self.requires('rocksdb/10.0.1')
self.requires('xxhash/0.8.3', **transitive_headers_opt)
self.requires('rocksdb/9.7.3')
exports_sources = (
'CMakeLists.txt',
@@ -168,17 +161,7 @@ class Xrpl(ConanFile):
# `include/`, not `include/ripple/proto/`.
libxrpl.includedirs = ['include', 'include/ripple/proto']
libxrpl.requires = [
'boost::headers',
'boost::chrono',
'boost::container',
'boost::coroutine',
'boost::date_time',
'boost::filesystem',
'boost::json',
'boost::program_options',
'boost::regex',
'boost::system',
'boost::thread',
'boost::boost',
'date::date',
'grpc::grpc++',
'libarchive::libarchive',

View File

@@ -23,7 +23,7 @@ direction.
```
apt update
apt install --yes curl git libssl-dev pipx python3.10-dev python3-pip make g++-11 libprotobuf-dev protobuf-compiler
apt install --yes curl git libssl-dev python3.10-dev python3-pip make g++-11 libprotobuf-dev protobuf-compiler
curl --location --remote-name \
"https://github.com/Kitware/CMake/releases/download/v3.25.1/cmake-3.25.1.tar.gz"
@@ -35,8 +35,7 @@ make --jobs $(nproc)
make install
cd ..
pipx install 'conan<2'
pipx ensurepath
pip3 install 'conan<2'
```
[1]: https://github.com/thejohnfreeman/rippled-docker/blob/master/ubuntu-22.04/install.sh

View File

@@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.18)
cmake_minimum_required(VERSION 3.25)
# Note, version set explicitly by rippled project
project(antithesis-sdk-cpp VERSION 0.4.4 LANGUAGES CXX)

View File

@@ -17,9 +17,6 @@ add_library(ed25519 STATIC
)
add_library(ed25519::ed25519 ALIAS ed25519)
target_link_libraries(ed25519 PUBLIC OpenSSL::SSL)
if(NOT MSVC)
target_compile_options(ed25519 PRIVATE -Wno-implicit-fallthrough)
endif()
include(GNUInstallDirs)

10
external/nudb/conandata.yml vendored Normal file
View File

@@ -0,0 +1,10 @@
sources:
"2.0.8":
url: "https://github.com/CPPAlliance/NuDB/archive/2.0.8.tar.gz"
sha256: "9b71903d8ba111cd893ab064b9a8b6ac4124ed8bd6b4f67250205bc43c7f13a8"
patches:
"2.0.8":
- patch_file: "patches/2.0.8-0001-add-include-stdexcept-for-msvc.patch"
patch_description: "Fix build for MSVC by including stdexcept"
patch_type: "portability"
patch_source: "https://github.com/cppalliance/NuDB/pull/100/files"

72
external/nudb/conanfile.py vendored Normal file
View File

@@ -0,0 +1,72 @@
import os
from conan import ConanFile
from conan.tools.build import check_min_cppstd
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get
from conan.tools.layout import basic_layout
required_conan_version = ">=1.52.0"
class NudbConan(ConanFile):
name = "nudb"
description = "A fast key/value insert-only database for SSD drives in C++11"
license = "BSL-1.0"
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/CPPAlliance/NuDB"
topics = ("header-only", "KVS", "insert-only")
package_type = "header-library"
settings = "os", "arch", "compiler", "build_type"
no_copy_source = True
@property
def _min_cppstd(self):
return 11
def export_sources(self):
export_conandata_patches(self)
def layout(self):
basic_layout(self, src_folder="src")
def requirements(self):
self.requires("boost/1.83.0")
def package_id(self):
self.info.clear()
def validate(self):
if self.settings.compiler.cppstd:
check_min_cppstd(self, self._min_cppstd)
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def build(self):
apply_conandata_patches(self)
def package(self):
copy(self, "LICENSE*",
dst=os.path.join(self.package_folder, "licenses"),
src=self.source_folder)
copy(self, "*",
dst=os.path.join(self.package_folder, "include"),
src=os.path.join(self.source_folder, "include"))
def package_info(self):
self.cpp_info.bindirs = []
self.cpp_info.libdirs = []
self.cpp_info.set_property("cmake_target_name", "NuDB")
self.cpp_info.set_property("cmake_target_aliases", ["NuDB::nudb"])
self.cpp_info.set_property("cmake_find_mode", "both")
self.cpp_info.components["core"].set_property("cmake_target_name", "nudb")
self.cpp_info.components["core"].names["cmake_find_package"] = "nudb"
self.cpp_info.components["core"].names["cmake_find_package_multi"] = "nudb"
self.cpp_info.components["core"].requires = ["boost::thread", "boost::system"]
# TODO: to remove in conan v2 once cmake_find_package_* generators removed
self.cpp_info.names["cmake_find_package"] = "NuDB"
self.cpp_info.names["cmake_find_package_multi"] = "NuDB"

View File

@@ -0,0 +1,24 @@
diff --git a/include/nudb/detail/stream.hpp b/include/nudb/detail/stream.hpp
index 6c07bf1..e0ce8ed 100644
--- a/include/nudb/detail/stream.hpp
+++ b/include/nudb/detail/stream.hpp
@@ -14,6 +14,7 @@
#include <cstdint>
#include <cstring>
#include <memory>
+#include <stdexcept>
namespace nudb {
namespace detail {
diff --git a/include/nudb/impl/context.ipp b/include/nudb/impl/context.ipp
index beb7058..ffde0b3 100644
--- a/include/nudb/impl/context.ipp
+++ b/include/nudb/impl/context.ipp
@@ -9,6 +9,7 @@
#define NUDB_IMPL_CONTEXT_IPP
#include <nudb/detail/store_base.hpp>
+#include <stdexcept>
namespace nudb {

12
external/rocksdb/conandata.yml vendored Normal file
View File

@@ -0,0 +1,12 @@
sources:
"9.7.3":
url: "https://github.com/facebook/rocksdb/archive/refs/tags/v9.7.3.tar.gz"
sha256: "acfabb989cbfb5b5c4d23214819b059638193ec33dad2d88373c46448d16d38b"
patches:
"9.7.3":
- patch_file: "patches/9.x.x-0001-exclude-thirdparty.patch"
patch_description: "Do not include thirdparty.inc"
patch_type: "portability"
- patch_file: "patches/9.7.3-0001-memory-leak.patch"
patch_description: "Fix a leak of obsolete blob files left open until DB::Close()"
patch_type: "portability"

235
external/rocksdb/conanfile.py vendored Normal file
View File

@@ -0,0 +1,235 @@
import os
import glob
import shutil
from conan import ConanFile
from conan.errors import ConanInvalidConfiguration
from conan.tools.build import check_min_cppstd
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
from conan.tools.files import apply_conandata_patches, collect_libs, copy, export_conandata_patches, get, rm, rmdir
from conan.tools.microsoft import check_min_vs, is_msvc, is_msvc_static_runtime
from conan.tools.scm import Version
required_conan_version = ">=1.53.0"
class RocksDBConan(ConanFile):
name = "rocksdb"
description = "A library that provides an embeddable, persistent key-value store for fast storage"
license = ("GPL-2.0-only", "Apache-2.0")
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/facebook/rocksdb"
topics = ("database", "leveldb", "facebook", "key-value")
package_type = "library"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
"lite": [True, False],
"with_gflags": [True, False],
"with_snappy": [True, False],
"with_lz4": [True, False],
"with_zlib": [True, False],
"with_zstd": [True, False],
"with_tbb": [True, False],
"with_jemalloc": [True, False],
"enable_sse": [False, "sse42", "avx2"],
"use_rtti": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
"lite": False,
"with_snappy": False,
"with_lz4": False,
"with_zlib": False,
"with_zstd": False,
"with_gflags": False,
"with_tbb": False,
"with_jemalloc": False,
"enable_sse": False,
"use_rtti": False,
}
@property
def _min_cppstd(self):
return "11" if Version(self.version) < "8.8.1" else "17"
@property
def _compilers_minimum_version(self):
return {} if self._min_cppstd == "11" else {
"apple-clang": "10",
"clang": "7",
"gcc": "7",
"msvc": "191",
"Visual Studio": "15",
}
def export_sources(self):
export_conandata_patches(self)
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
if self.settings.arch != "x86_64":
del self.options.with_tbb
if self.settings.build_type == "Debug":
self.options.use_rtti = True # Rtti are used in asserts for debug mode...
def configure(self):
if self.options.shared:
self.options.rm_safe("fPIC")
def layout(self):
cmake_layout(self, src_folder="src")
def requirements(self):
if self.options.with_gflags:
self.requires("gflags/2.2.2")
if self.options.with_snappy:
self.requires("snappy/1.1.10")
if self.options.with_lz4:
self.requires("lz4/1.10.0")
if self.options.with_zlib:
self.requires("zlib/[>=1.2.11 <2]")
if self.options.with_zstd:
self.requires("zstd/1.5.6")
if self.options.get_safe("with_tbb"):
self.requires("onetbb/2021.12.0")
if self.options.with_jemalloc:
self.requires("jemalloc/5.3.0")
def validate(self):
if self.settings.compiler.get_safe("cppstd"):
check_min_cppstd(self, self._min_cppstd)
minimum_version = self._compilers_minimum_version.get(str(self.settings.compiler), False)
if minimum_version and Version(self.settings.compiler.version) < minimum_version:
raise ConanInvalidConfiguration(
f"{self.ref} requires C++{self._min_cppstd}, which your compiler does not support."
)
if self.settings.arch not in ["x86_64", "ppc64le", "ppc64", "mips64", "armv8"]:
raise ConanInvalidConfiguration("Rocksdb requires 64 bits")
check_min_vs(self, "191")
if self.version == "6.20.3" and \
self.settings.os == "Linux" and \
self.settings.compiler == "gcc" and \
Version(self.settings.compiler.version) < "5":
raise ConanInvalidConfiguration("Rocksdb 6.20.3 is not compilable with gcc <5.") # See https://github.com/facebook/rocksdb/issues/3522
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def generate(self):
tc = CMakeToolchain(self)
tc.variables["FAIL_ON_WARNINGS"] = False
tc.variables["WITH_TESTS"] = False
tc.variables["WITH_TOOLS"] = False
tc.variables["WITH_CORE_TOOLS"] = False
tc.variables["WITH_BENCHMARK_TOOLS"] = False
tc.variables["WITH_FOLLY_DISTRIBUTED_MUTEX"] = False
if is_msvc(self):
tc.variables["WITH_MD_LIBRARY"] = not is_msvc_static_runtime(self)
tc.variables["ROCKSDB_INSTALL_ON_WINDOWS"] = self.settings.os == "Windows"
tc.variables["ROCKSDB_LITE"] = self.options.lite
tc.variables["WITH_GFLAGS"] = self.options.with_gflags
tc.variables["WITH_SNAPPY"] = self.options.with_snappy
tc.variables["WITH_LZ4"] = self.options.with_lz4
tc.variables["WITH_ZLIB"] = self.options.with_zlib
tc.variables["WITH_ZSTD"] = self.options.with_zstd
tc.variables["WITH_TBB"] = self.options.get_safe("with_tbb", False)
tc.variables["WITH_JEMALLOC"] = self.options.with_jemalloc
tc.variables["ROCKSDB_BUILD_SHARED"] = self.options.shared
tc.variables["ROCKSDB_LIBRARY_EXPORTS"] = self.settings.os == "Windows" and self.options.shared
tc.variables["ROCKSDB_DLL" ] = self.settings.os == "Windows" and self.options.shared
tc.variables["USE_RTTI"] = self.options.use_rtti
if not bool(self.options.enable_sse):
tc.variables["PORTABLE"] = True
tc.variables["FORCE_SSE42"] = False
elif self.options.enable_sse == "sse42":
tc.variables["PORTABLE"] = True
tc.variables["FORCE_SSE42"] = True
elif self.options.enable_sse == "avx2":
tc.variables["PORTABLE"] = False
tc.variables["FORCE_SSE42"] = False
# not available yet in CCI
tc.variables["WITH_NUMA"] = False
tc.generate()
deps = CMakeDeps(self)
if self.options.with_jemalloc:
deps.set_property("jemalloc", "cmake_file_name", "JeMalloc")
deps.set_property("jemalloc", "cmake_target_name", "JeMalloc::JeMalloc")
if self.options.with_zstd:
deps.set_property("zstd", "cmake_target_name", "zstd::zstd")
deps.generate()
def build(self):
apply_conandata_patches(self)
cmake = CMake(self)
cmake.configure()
cmake.build()
def _remove_static_libraries(self):
rm(self, "rocksdb.lib", os.path.join(self.package_folder, "lib"))
for lib in glob.glob(os.path.join(self.package_folder, "lib", "*.a")):
if not lib.endswith(".dll.a"):
os.remove(lib)
def _remove_cpp_headers(self):
for path in glob.glob(os.path.join(self.package_folder, "include", "rocksdb", "*")):
if path != os.path.join(self.package_folder, "include", "rocksdb", "c.h"):
if os.path.isfile(path):
os.remove(path)
else:
shutil.rmtree(path)
def package(self):
copy(self, "COPYING", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
copy(self, "LICENSE*", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
cmake = CMake(self)
cmake.install()
if self.options.shared:
self._remove_static_libraries()
self._remove_cpp_headers() # Force stable ABI for shared libraries
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
rmdir(self, os.path.join(self.package_folder, "lib", "pkgconfig"))
def package_info(self):
cmake_target = "rocksdb-shared" if self.options.shared else "rocksdb"
self.cpp_info.set_property("cmake_file_name", "RocksDB")
self.cpp_info.set_property("cmake_target_name", f"RocksDB::{cmake_target}")
# TODO: back to global scope in conan v2 once cmake_find_package* generators removed
self.cpp_info.components["librocksdb"].libs = collect_libs(self)
if self.settings.os == "Windows":
self.cpp_info.components["librocksdb"].system_libs = ["shlwapi", "rpcrt4"]
if self.options.shared:
self.cpp_info.components["librocksdb"].defines = ["ROCKSDB_DLL"]
elif self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.components["librocksdb"].system_libs = ["pthread", "m"]
if self.options.lite:
self.cpp_info.components["librocksdb"].defines.append("ROCKSDB_LITE")
# TODO: to remove in conan v2 once cmake_find_package* generators removed
self.cpp_info.names["cmake_find_package"] = "RocksDB"
self.cpp_info.names["cmake_find_package_multi"] = "RocksDB"
self.cpp_info.components["librocksdb"].names["cmake_find_package"] = cmake_target
self.cpp_info.components["librocksdb"].names["cmake_find_package_multi"] = cmake_target
self.cpp_info.components["librocksdb"].set_property("cmake_target_name", f"RocksDB::{cmake_target}")
if self.options.with_gflags:
self.cpp_info.components["librocksdb"].requires.append("gflags::gflags")
if self.options.with_snappy:
self.cpp_info.components["librocksdb"].requires.append("snappy::snappy")
if self.options.with_lz4:
self.cpp_info.components["librocksdb"].requires.append("lz4::lz4")
if self.options.with_zlib:
self.cpp_info.components["librocksdb"].requires.append("zlib::zlib")
if self.options.with_zstd:
self.cpp_info.components["librocksdb"].requires.append("zstd::zstd")
if self.options.get_safe("with_tbb"):
self.cpp_info.components["librocksdb"].requires.append("onetbb::onetbb")
if self.options.with_jemalloc:
self.cpp_info.components["librocksdb"].requires.append("jemalloc::jemalloc")

View File

@@ -0,0 +1,319 @@
diff --git a/HISTORY.md b/HISTORY.md
index 36d472229..05ad1a202 100644
--- a/HISTORY.md
+++ b/HISTORY.md
@@ -1,6 +1,10 @@
# Rocksdb Change Log
> NOTE: Entries for next release do not go here. Follow instructions in `unreleased_history/README.txt`
+## 9.7.4 (10/31/2024)
+### Bug Fixes
+* Fix a leak of obsolete blob files left open until DB::Close(). This bug was introduced in version 9.4.0.
+
## 9.7.3 (10/16/2024)
### Behavior Changes
* OPTIONS file to be loaded by remote worker is now preserved so that it does not get purged by the primary host. A similar technique as how we are preserving new SST files from getting purged is used for this. min_options_file_numbers_ is tracked like pending_outputs_ is tracked.
diff --git a/db/blob/blob_file_cache.cc b/db/blob/blob_file_cache.cc
index 5f340aadf..1b9faa238 100644
--- a/db/blob/blob_file_cache.cc
+++ b/db/blob/blob_file_cache.cc
@@ -42,6 +42,7 @@ Status BlobFileCache::GetBlobFileReader(
assert(blob_file_reader);
assert(blob_file_reader->IsEmpty());
+ // NOTE: sharing same Cache with table_cache
const Slice key = GetSliceForKey(&blob_file_number);
assert(cache_);
@@ -98,4 +99,13 @@ Status BlobFileCache::GetBlobFileReader(
return Status::OK();
}
+void BlobFileCache::Evict(uint64_t blob_file_number) {
+ // NOTE: sharing same Cache with table_cache
+ const Slice key = GetSliceForKey(&blob_file_number);
+
+ assert(cache_);
+
+ cache_.get()->Erase(key);
+}
+
} // namespace ROCKSDB_NAMESPACE
diff --git a/db/blob/blob_file_cache.h b/db/blob/blob_file_cache.h
index 740e67ada..6858d012b 100644
--- a/db/blob/blob_file_cache.h
+++ b/db/blob/blob_file_cache.h
@@ -36,6 +36,15 @@ class BlobFileCache {
uint64_t blob_file_number,
CacheHandleGuard<BlobFileReader>* blob_file_reader);
+ // Called when a blob file is obsolete to ensure it is removed from the cache
+ // to avoid effectively leaking the open file and assicated memory
+ void Evict(uint64_t blob_file_number);
+
+ // Used to identify cache entries for blob files (not normally useful)
+ static const Cache::CacheItemHelper* GetHelper() {
+ return CacheInterface::GetBasicHelper();
+ }
+
private:
using CacheInterface =
BasicTypedCacheInterface<BlobFileReader, CacheEntryRole::kMisc>;
diff --git a/db/column_family.h b/db/column_family.h
index e4b7adde8..86637736a 100644
--- a/db/column_family.h
+++ b/db/column_family.h
@@ -401,6 +401,7 @@ class ColumnFamilyData {
SequenceNumber earliest_seq);
TableCache* table_cache() const { return table_cache_.get(); }
+ BlobFileCache* blob_file_cache() const { return blob_file_cache_.get(); }
BlobSource* blob_source() const { return blob_source_.get(); }
// See documentation in compaction_picker.h
diff --git a/db/db_impl/db_impl.cc b/db/db_impl/db_impl.cc
index 261593423..06573ac2e 100644
--- a/db/db_impl/db_impl.cc
+++ b/db/db_impl/db_impl.cc
@@ -659,8 +659,9 @@ Status DBImpl::CloseHelper() {
// We need to release them before the block cache is destroyed. The block
// cache may be destroyed inside versions_.reset(), when column family data
// list is destroyed, so leaving handles in table cache after
- // versions_.reset() may cause issues.
- // Here we clean all unreferenced handles in table cache.
+ // versions_.reset() may cause issues. Here we clean all unreferenced handles
+ // in table cache, and (for certain builds/conditions) assert that no obsolete
+ // files are hanging around unreferenced (leak) in the table/blob file cache.
// Now we assume all user queries have finished, so only version set itself
// can possibly hold the blocks from block cache. After releasing unreferenced
// handles here, only handles held by version set left and inside
@@ -668,6 +669,9 @@ Status DBImpl::CloseHelper() {
// time a handle is released, we erase it from the cache too. By doing that,
// we can guarantee that after versions_.reset(), table cache is empty
// so the cache can be safely destroyed.
+#ifndef NDEBUG
+ TEST_VerifyNoObsoleteFilesCached(/*db_mutex_already_held=*/true);
+#endif // !NDEBUG
table_cache_->EraseUnRefEntries();
for (auto& txn_entry : recovered_transactions_) {
@@ -3227,6 +3231,8 @@ Status DBImpl::MultiGetImpl(
s = Status::Aborted();
break;
}
+ // This could be a long-running operation
+ ROCKSDB_THREAD_YIELD_HOOK();
}
// Post processing (decrement reference counts and record statistics)
diff --git a/db/db_impl/db_impl.h b/db/db_impl/db_impl.h
index 5e4fa310b..ccc0abfa7 100644
--- a/db/db_impl/db_impl.h
+++ b/db/db_impl/db_impl.h
@@ -1241,9 +1241,14 @@ class DBImpl : public DB {
static Status TEST_ValidateOptions(const DBOptions& db_options) {
return ValidateOptions(db_options);
}
-
#endif // NDEBUG
+ // In certain configurations, verify that the table/blob file cache only
+ // contains entries for live files, to check for effective leaks of open
+ // files. This can only be called when purging of obsolete files has
+ // "settled," such as during parts of DB Close().
+ void TEST_VerifyNoObsoleteFilesCached(bool db_mutex_already_held) const;
+
// persist stats to column family "_persistent_stats"
void PersistStats();
diff --git a/db/db_impl/db_impl_debug.cc b/db/db_impl/db_impl_debug.cc
index 790a50d7a..67f5b4aaf 100644
--- a/db/db_impl/db_impl_debug.cc
+++ b/db/db_impl/db_impl_debug.cc
@@ -9,6 +9,7 @@
#ifndef NDEBUG
+#include "db/blob/blob_file_cache.h"
#include "db/column_family.h"
#include "db/db_impl/db_impl.h"
#include "db/error_handler.h"
@@ -328,5 +329,49 @@ size_t DBImpl::TEST_EstimateInMemoryStatsHistorySize() const {
InstrumentedMutexLock l(&const_cast<DBImpl*>(this)->stats_history_mutex_);
return EstimateInMemoryStatsHistorySize();
}
+
+void DBImpl::TEST_VerifyNoObsoleteFilesCached(
+ bool db_mutex_already_held) const {
+ // This check is somewhat expensive and obscure to make a part of every
+ // unit test in every build variety. Thus, we only enable it for ASAN builds.
+ if (!kMustFreeHeapAllocations) {
+ return;
+ }
+
+ std::optional<InstrumentedMutexLock> l;
+ if (db_mutex_already_held) {
+ mutex_.AssertHeld();
+ } else {
+ l.emplace(&mutex_);
+ }
+
+ std::vector<uint64_t> live_files;
+ for (auto cfd : *versions_->GetColumnFamilySet()) {
+ if (cfd->IsDropped()) {
+ continue;
+ }
+ // Sneakily add both SST and blob files to the same list
+ cfd->current()->AddLiveFiles(&live_files, &live_files);
+ }
+ std::sort(live_files.begin(), live_files.end());
+
+ auto fn = [&live_files](const Slice& key, Cache::ObjectPtr, size_t,
+ const Cache::CacheItemHelper* helper) {
+ if (helper != BlobFileCache::GetHelper()) {
+ // Skip non-blob files for now
+ // FIXME: diagnose and fix the leaks of obsolete SST files revealed in
+ // unit tests.
+ return;
+ }
+ // See TableCache and BlobFileCache
+ assert(key.size() == sizeof(uint64_t));
+ uint64_t file_number;
+ GetUnaligned(reinterpret_cast<const uint64_t*>(key.data()), &file_number);
+ // Assert file is in sorted live_files
+ assert(
+ std::binary_search(live_files.begin(), live_files.end(), file_number));
+ };
+ table_cache_->ApplyToAllEntries(fn, {});
+}
} // namespace ROCKSDB_NAMESPACE
#endif // NDEBUG
diff --git a/db/db_iter.cc b/db/db_iter.cc
index e02586377..bf4749eb9 100644
--- a/db/db_iter.cc
+++ b/db/db_iter.cc
@@ -540,6 +540,8 @@ bool DBIter::FindNextUserEntryInternal(bool skipping_saved_key,
} else {
iter_.Next();
}
+ // This could be a long-running operation due to tombstones, etc.
+ ROCKSDB_THREAD_YIELD_HOOK();
} while (iter_.Valid());
valid_ = false;
diff --git a/db/table_cache.cc b/db/table_cache.cc
index 71fc29c32..8a5be75e8 100644
--- a/db/table_cache.cc
+++ b/db/table_cache.cc
@@ -164,6 +164,7 @@ Status TableCache::GetTableReader(
}
Cache::Handle* TableCache::Lookup(Cache* cache, uint64_t file_number) {
+ // NOTE: sharing same Cache with BlobFileCache
Slice key = GetSliceForFileNumber(&file_number);
return cache->Lookup(key);
}
@@ -179,6 +180,7 @@ Status TableCache::FindTable(
size_t max_file_size_for_l0_meta_pin, Temperature file_temperature) {
PERF_TIMER_GUARD_WITH_CLOCK(find_table_nanos, ioptions_.clock);
uint64_t number = file_meta.fd.GetNumber();
+ // NOTE: sharing same Cache with BlobFileCache
Slice key = GetSliceForFileNumber(&number);
*handle = cache_.Lookup(key);
TEST_SYNC_POINT_CALLBACK("TableCache::FindTable:0",
diff --git a/db/version_builder.cc b/db/version_builder.cc
index ed8ab8214..c98f53f42 100644
--- a/db/version_builder.cc
+++ b/db/version_builder.cc
@@ -24,6 +24,7 @@
#include <vector>
#include "cache/cache_reservation_manager.h"
+#include "db/blob/blob_file_cache.h"
#include "db/blob/blob_file_meta.h"
#include "db/dbformat.h"
#include "db/internal_stats.h"
@@ -744,12 +745,9 @@ class VersionBuilder::Rep {
return Status::Corruption("VersionBuilder", oss.str());
}
- // Note: we use C++11 for now but in C++14, this could be done in a more
- // elegant way using generalized lambda capture.
- VersionSet* const vs = version_set_;
- const ImmutableCFOptions* const ioptions = ioptions_;
-
- auto deleter = [vs, ioptions](SharedBlobFileMetaData* shared_meta) {
+ auto deleter = [vs = version_set_, ioptions = ioptions_,
+ bc = cfd_ ? cfd_->blob_file_cache()
+ : nullptr](SharedBlobFileMetaData* shared_meta) {
if (vs) {
assert(ioptions);
assert(!ioptions->cf_paths.empty());
@@ -758,6 +756,9 @@ class VersionBuilder::Rep {
vs->AddObsoleteBlobFile(shared_meta->GetBlobFileNumber(),
ioptions->cf_paths.front().path);
}
+ if (bc) {
+ bc->Evict(shared_meta->GetBlobFileNumber());
+ }
delete shared_meta;
};
@@ -766,7 +767,7 @@ class VersionBuilder::Rep {
blob_file_number, blob_file_addition.GetTotalBlobCount(),
blob_file_addition.GetTotalBlobBytes(),
blob_file_addition.GetChecksumMethod(),
- blob_file_addition.GetChecksumValue(), deleter);
+ blob_file_addition.GetChecksumValue(), std::move(deleter));
mutable_blob_file_metas_.emplace(
blob_file_number, MutableBlobFileMetaData(std::move(shared_meta)));
diff --git a/db/version_set.h b/db/version_set.h
index 9336782b1..024f869e7 100644
--- a/db/version_set.h
+++ b/db/version_set.h
@@ -1514,7 +1514,6 @@ class VersionSet {
void GetLiveFilesMetaData(std::vector<LiveFileMetaData>* metadata);
void AddObsoleteBlobFile(uint64_t blob_file_number, std::string path) {
- // TODO: Erase file from BlobFileCache?
obsolete_blob_files_.emplace_back(blob_file_number, std::move(path));
}
diff --git a/include/rocksdb/version.h b/include/rocksdb/version.h
index 2a19796b8..0afa2cab1 100644
--- a/include/rocksdb/version.h
+++ b/include/rocksdb/version.h
@@ -13,7 +13,7 @@
// minor or major version number planned for release.
#define ROCKSDB_MAJOR 9
#define ROCKSDB_MINOR 7
-#define ROCKSDB_PATCH 3
+#define ROCKSDB_PATCH 4
// Do not use these. We made the mistake of declaring macros starting with
// double underscore. Now we have to live with our choice. We'll deprecate these
diff --git a/port/port.h b/port/port.h
index 13aa56d47..141716e5b 100644
--- a/port/port.h
+++ b/port/port.h
@@ -19,3 +19,19 @@
#elif defined(OS_WIN)
#include "port/win/port_win.h"
#endif
+
+#ifdef OS_LINUX
+// A temporary hook into long-running RocksDB threads to support modifying their
+// priority etc. This should become a public API hook once the requirements
+// are better understood.
+extern "C" void RocksDbThreadYield() __attribute__((__weak__));
+#define ROCKSDB_THREAD_YIELD_HOOK() \
+ { \
+ if (RocksDbThreadYield) { \
+ RocksDbThreadYield(); \
+ } \
+ }
+#else
+#define ROCKSDB_THREAD_YIELD_HOOK() \
+ {}
+#endif

View File

@@ -0,0 +1,30 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 93b884d..b715cb6 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -106,14 +106,9 @@ endif()
include(CMakeDependentOption)
if(MSVC)
- option(WITH_GFLAGS "build with GFlags" OFF)
option(WITH_XPRESS "build with windows built in compression" OFF)
- option(ROCKSDB_SKIP_THIRDPARTY "skip thirdparty.inc" OFF)
-
- if(NOT ROCKSDB_SKIP_THIRDPARTY)
- include(${CMAKE_CURRENT_SOURCE_DIR}/thirdparty.inc)
- endif()
-else()
+endif()
+if(TRUE)
if(CMAKE_SYSTEM_NAME MATCHES "FreeBSD" AND NOT CMAKE_SYSTEM_NAME MATCHES "kFreeBSD")
# FreeBSD has jemalloc as default malloc
# but it does not have all the jemalloc files in include/...
@@ -126,7 +121,7 @@ else()
endif()
endif()
- if(MINGW)
+ if(MSVC OR MINGW)
option(WITH_GFLAGS "build with GFlags" OFF)
else()
option(WITH_GFLAGS "build with GFlags" ON)

View File

@@ -70,7 +70,7 @@ class SociConan(ConanFile):
if self.options.with_postgresql:
self.requires("libpq/15.5")
if self.options.with_boost:
self.requires("boost/1.86.0")
self.requires("boost/1.83.0")
@property
def _minimum_compilers_version(self):
@@ -154,7 +154,7 @@ class SociConan(ConanFile):
self.cpp_info.components["soci_core"].set_property("cmake_target_name", "SOCI::soci_core{}".format(target_suffix))
self.cpp_info.components["soci_core"].libs = ["{}soci_core{}".format(lib_prefix, lib_suffix)]
if self.options.with_boost:
self.cpp_info.components["soci_core"].requires.append("boost::headers")
self.cpp_info.components["soci_core"].requires.append("boost::boost")
# soci_empty
if self.options.empty:

View File

@@ -367,7 +367,7 @@ get(Section const& section,
}
inline std::string
get(Section const& section, std::string const& name, char const* defaultValue)
get(Section const& section, std::string const& name, const char* defaultValue)
{
try
{

View File

@@ -25,7 +25,6 @@
#include <cstdint>
#include <cstring>
#include <memory>
namespace ripple {

View File

@@ -55,7 +55,7 @@ lz4Compress(void const* in, std::size_t inSize, BufferFactory&& bf)
auto compressed = bf(outCapacity);
auto compressedSize = LZ4_compress_default(
reinterpret_cast<char const*>(in),
reinterpret_cast<const char*>(in),
reinterpret_cast<char*>(compressed),
inSize,
outCapacity);
@@ -89,7 +89,7 @@ lz4Decompress(
Throw<std::runtime_error>("lz4Decompress: integer overflow (output)");
if (LZ4_decompress_safe(
reinterpret_cast<char const*>(in),
reinterpret_cast<const char*>(in),
reinterpret_cast<char*>(decompressed),
inSize,
decompressedSize) != decompressedSize)

View File

@@ -22,18 +22,8 @@
#include <xrpl/basics/contract.h>
#if defined(__clang__)
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated"
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
#endif
#include <boost/outcome.hpp>
#if defined(__clang__)
#pragma clang diagnostic pop
#endif
#include <stdexcept>
namespace ripple {
@@ -103,7 +93,7 @@ public:
{
}
constexpr E const&
constexpr const E&
value() const&
{
return val_;
@@ -121,7 +111,7 @@ public:
return std::move(val_);
}
constexpr E const&&
constexpr const E&&
value() const&&
{
return std::move(val_);

View File

@@ -26,7 +26,6 @@
#include <boost/beast/core/string.hpp>
#include <boost/filesystem.hpp>
#include <fstream>
#include <map>
#include <memory>
#include <mutex>

View File

@@ -29,6 +29,7 @@
#include <array>
#include <cstdint>
#include <optional>
#include <sstream>
#include <string>
namespace ripple {

View File

@@ -37,28 +37,6 @@
namespace ripple {
namespace detail {
template <typename T>
constexpr bool IsStatelessLambdaV = std::is_empty_v<T> && std::is_constructible_v<T>;
template<class Lambda, int=(Lambda{}(), 0)>
constexpr std::true_type IsConstexpr(Lambda);
constexpr std::false_type IsConstexpr(...);
template <typename T>
constexpr bool IsConstexprInvocableV = IsStatelessLambdaV<T> && decltype(IsConstexpr(T{})){};
template <typename Lambda, bool ConstInvocable = IsConstexprInvocableV<Lambda>>
constexpr bool ShouldTakeConstReferenceV = false;
template <typename Lambda>
constexpr bool ShouldTakeConstReferenceV<Lambda, true> = Lambda{}();
template <typename Lambda>
constexpr bool ShouldTakeConstReferenceV<Lambda, false> = true;
}
/** Map/cache combination.
This class implements a cache and a map. The cache keeps objects alive
in the map. The map allows multiple code paths that reference objects
@@ -137,18 +115,9 @@ public:
sweep();
bool
del(key_type const& key, bool valid);
del(const key_type& key, bool valid);
public:
// We take a const reference if R (the replaceCallback) is a stateless
// lambda and can be evaluated at compile time, and it's evaluated to true,
// because there's no chance to update the parameter.
template <class R>
using SharedPointerTypeReference = std::conditional_t<
detail::ShouldTakeConstReferenceV<R>,
SharedPointerType const&,
SharedPointerType&>;
/** Replace aliased objects with originals.
Due to concurrency it is possible for two separate objects with
@@ -158,27 +127,27 @@ public:
@param key The key corresponding to the object
@param data A shared pointer to the data corresponding to the object.
@param replaceCallback Function that decides if cache should be replaced
@param replace Function that decides if cache should be replaced
@return `true` If the key already existed.
*/
template <class R>
bool
canonicalize(
key_type const& key,
SharedPointerTypeReference<R> data,
const key_type& key,
SharedPointerType& data,
R&& replaceCallback);
bool
canonicalize_replace_cache(
key_type const& key,
const key_type& key,
SharedPointerType const& data);
bool
canonicalize_replace_client(key_type const& key, SharedPointerType& data);
canonicalize_replace_client(const key_type& key, SharedPointerType& data);
SharedPointerType
fetch(key_type const& key);
fetch(const key_type& key);
/** Insert the element into the container.
If the key already exists, nothing happens.
@@ -199,7 +168,7 @@ public:
// simply return an iterator.
//
bool
retrieve(key_type const& key, T& data);
retrieve(const key_type& key, T& data);
mutex_type&
peekMutex();
@@ -353,10 +322,10 @@ private:
std::string m_name;
// Desired number of cache entries (0 = ignore)
int const m_target_size;
const int m_target_size;
// Desired maximum cache age
clock_type::duration const m_target_age;
const clock_type::duration m_target_age;
// Number of items cached
int m_cache_count;

View File

@@ -365,7 +365,7 @@ TaggedCache<
SharedPointerType,
Hash,
KeyEqual,
Mutex>::del(key_type const& key, bool valid)
Mutex>::del(const key_type& key, bool valid)
{
// Remove from cache, if !valid, remove from map too. Returns true if
// removed from cache
@@ -414,8 +414,8 @@ TaggedCache<
KeyEqual,
Mutex>::
canonicalize(
key_type const& key,
SharedPointerTypeReference<R> data,
const key_type& key,
SharedPointerType& data,
R&& replaceCallback)
{
// Return canonical value, store if needed, refresh in cache
@@ -457,7 +457,7 @@ TaggedCache<
{
entry.ptr = data;
}
else if constexpr (std::assignable_from<decltype(data), decltype(entry.ptr.getStrong())>)
else
{
data = entry.ptr.getStrong();
}
@@ -473,7 +473,7 @@ TaggedCache<
{
entry.ptr = data;
}
else if constexpr (std::assignable_from<decltype(data), decltype(entry.ptr.getStrong())>)
else
{
entry.ptr.convertToStrong();
data = cachedData;
@@ -509,11 +509,11 @@ TaggedCache<
KeyEqual,
Mutex>::
canonicalize_replace_cache(
key_type const& key,
const key_type& key,
SharedPointerType const& data)
{
return canonicalize(
key, data, []() { return true; });
key, const_cast<SharedPointerType&>(data), []() { return true; });
}
template <
@@ -535,7 +535,7 @@ TaggedCache<
Hash,
KeyEqual,
Mutex>::
canonicalize_replace_client(key_type const& key, SharedPointerType& data)
canonicalize_replace_client(const key_type& key, SharedPointerType& data)
{
return canonicalize(key, data, []() { return false; });
}
@@ -558,7 +558,7 @@ TaggedCache<
SharedPointerType,
Hash,
KeyEqual,
Mutex>::fetch(key_type const& key)
Mutex>::fetch(const key_type& key)
{
std::lock_guard<mutex_type> l(m_mutex);
auto ret = initialFetch(key, l);
@@ -656,7 +656,7 @@ TaggedCache<
SharedPointerType,
Hash,
KeyEqual,
Mutex>::retrieve(key_type const& key, T& data)
Mutex>::retrieve(const key_type& key, T& data)
{
// retrieve the value of the stored data
auto entry = fetch(key);

View File

@@ -20,6 +20,7 @@
#ifndef RIPPLE_ALGORITHM_H_INCLUDED
#define RIPPLE_ALGORITHM_H_INCLUDED
#include <iterator>
#include <utility>
namespace ripple {

View File

@@ -374,7 +374,7 @@ public:
}
base_uint&
operator^=(base_uint const& b)
operator^=(const base_uint& b)
{
for (int i = 0; i < WIDTH; i++)
data_[i] ^= b.data_[i];
@@ -383,7 +383,7 @@ public:
}
base_uint&
operator&=(base_uint const& b)
operator&=(const base_uint& b)
{
for (int i = 0; i < WIDTH; i++)
data_[i] &= b.data_[i];
@@ -392,7 +392,7 @@ public:
}
base_uint&
operator|=(base_uint const& b)
operator|=(const base_uint& b)
{
for (int i = 0; i < WIDTH; i++)
data_[i] |= b.data_[i];
@@ -415,11 +415,11 @@ public:
return *this;
}
base_uint const
const base_uint
operator++(int)
{
// postfix operator
base_uint const ret = *this;
const base_uint ret = *this;
++(*this);
return ret;
@@ -441,11 +441,11 @@ public:
return *this;
}
base_uint const
const base_uint
operator--(int)
{
// postfix operator
base_uint const ret = *this;
const base_uint ret = *this;
--(*this);
return ret;
@@ -466,7 +466,7 @@ public:
}
base_uint&
operator+=(base_uint const& b)
operator+=(const base_uint& b)
{
std::uint64_t carry = 0;
@@ -511,7 +511,7 @@ public:
}
[[nodiscard]] constexpr bool
parseHex(char const* str)
parseHex(const char* str)
{
return parseHex(std::string_view{str});
}

View File

@@ -43,7 +43,7 @@ struct less
using result_type = bool;
constexpr bool
operator()(T const& left, T const& right) const
operator()(const T& left, const T& right) const
{
return std::less<T>()(left, right);
}
@@ -55,7 +55,7 @@ struct equal_to
using result_type = bool;
constexpr bool
operator()(T const& left, T const& right) const
operator()(const T& left, const T& right) const
{
return std::equal_to<T>()(left, right);
}

View File

@@ -24,8 +24,12 @@
#include <xrpl/beast/hash/xxhasher.h>
#include <cstdint>
#include <functional>
#include <mutex>
#include <random>
#include <type_traits>
#include <unordered_map>
#include <unordered_set>
#include <utility>
namespace ripple {

View File

@@ -23,6 +23,7 @@
#include <cstdint>
#include <limits>
#include <optional>
#include <utility>
namespace ripple {
auto constexpr muldiv_max = std::numeric_limits<std::uint64_t>::max();

View File

@@ -52,7 +52,7 @@ template <
typename Value,
typename Hash,
typename Pred = std::equal_to<Key>,
typename Alloc = std::allocator<std::pair<Key const, Value>>>
typename Alloc = std::allocator<std::pair<const Key, Value>>>
class partitioned_unordered_map
{
std::size_t partitions_;

View File

@@ -24,8 +24,10 @@
#include <boost/operators.hpp>
#include <functional>
#include <iostream>
#include <type_traits>
#include <utility>
namespace ripple {
@@ -74,13 +76,13 @@ public:
}
bool
operator<(tagged_integer const& rhs) const noexcept
operator<(const tagged_integer& rhs) const noexcept
{
return m_value < rhs.m_value;
}
bool
operator==(tagged_integer const& rhs) const noexcept
operator==(const tagged_integer& rhs) const noexcept
{
return m_value == rhs.m_value;
}
@@ -142,14 +144,14 @@ public:
}
tagged_integer&
operator<<=(tagged_integer const& rhs) noexcept
operator<<=(const tagged_integer& rhs) noexcept
{
m_value <<= rhs.m_value;
return *this;
}
tagged_integer&
operator>>=(tagged_integer const& rhs) noexcept
operator>>=(const tagged_integer& rhs) noexcept
{
m_value >>= rhs.m_value;
return *this;

View File

@@ -20,6 +20,9 @@
#ifndef BEAST_CHRONO_ABSTRACT_CLOCK_H_INCLUDED
#define BEAST_CHRONO_ABSTRACT_CLOCK_H_INCLUDED
#include <chrono>
#include <string>
namespace beast {
/** Abstract interface to a clock.

View File

@@ -23,8 +23,6 @@
#include <xrpl/beast/clock/abstract_clock.h>
#include <xrpl/beast/utility/instrumentation.h>
#include <chrono>
namespace beast {
/** Manual clock implementation.

View File

@@ -22,7 +22,6 @@
#include <xrpl/beast/container/aged_container.h>
#include <chrono>
#include <type_traits>
namespace beast {

View File

@@ -20,6 +20,8 @@
#ifndef BEAST_CONTAINER_DETAIL_AGED_ASSOCIATIVE_CONTAINER_H_INCLUDED
#define BEAST_CONTAINER_DETAIL_AGED_ASSOCIATIVE_CONTAINER_H_INCLUDED
#include <type_traits>
namespace beast {
namespace detail {

View File

@@ -33,6 +33,7 @@
#include <algorithm>
#include <functional>
#include <initializer_list>
#include <iterator>
#include <memory>
#include <type_traits>
#include <utility>

View File

@@ -3257,6 +3257,7 @@ operator==(aged_unordered_container<
{
if (size() != other.size())
return false;
using EqRng = std::pair<const_iterator, const_iterator>;
for (auto iter(cbegin()), last(cend()); iter != last;)
{
auto const& k(extract(*iter));

View File

@@ -29,9 +29,11 @@
#include <charconv>
#include <cstdlib>
#include <iterator>
#include <limits>
#include <string>
#include <type_traits>
#include <typeinfo>
#include <utility>
namespace beast {

View File

@@ -24,36 +24,14 @@
#include <boost/container/flat_set.hpp>
#include <boost/endian/conversion.hpp>
/*
Workaround for overzealous clang warning, which trips on libstdc++ headers
In file included from
/usr/lib/gcc/x86_64-linux-gnu/12/../../../../include/c++/12/bits/stl_algo.h:61:
/usr/lib/gcc/x86_64-linux-gnu/12/../../../../include/c++/12/bits/stl_tempbuf.h:263:8:
error: 'get_temporary_buffer<std::pair<ripple::Quality, const
std::vector<std::unique_ptr<ripple::Step>> *>>' is deprecated
[-Werror,-Wdeprecated-declarations] 263 |
std::get_temporary_buffer<value_type>(_M_original_len));
^
*/
#if defined(__clang__)
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated"
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
#endif
#include <functional>
#if defined(__clang__)
#pragma clang diagnostic pop
#endif
#include <array>
#include <chrono>
#include <cstdint>
#include <cstring>
#include <functional>
#include <map>
#include <memory>
#include <set>
#include <string>
#include <system_error>
#include <tuple>

View File

@@ -30,7 +30,7 @@ namespace beast {
template <class Hasher = xxhasher>
struct uhash
{
uhash() = default;
explicit uhash() = default;
using result_type = typename Hasher::result_type;

View File

@@ -29,7 +29,11 @@
#include <boost/asio/ip/address.hpp>
#include <boost/functional/hash.hpp>
#include <cstdint>
#include <ios>
#include <sstream>
#include <string>
#include <typeinfo>
//------------------------------------------------------------------------------

View File

@@ -24,6 +24,12 @@
#include <boost/asio/ip/address_v4.hpp>
#include <cstdint>
#include <functional>
#include <ios>
#include <string>
#include <utility>
namespace beast {
namespace IP {

View File

@@ -24,6 +24,12 @@
#include <boost/asio/ip/address_v6.hpp>
#include <cstdint>
#include <functional>
#include <ios>
#include <string>
#include <utility>
namespace beast {
namespace IP {

View File

@@ -25,6 +25,7 @@
#include <xrpl/beast/net/IPAddress.h>
#include <cstdint>
#include <ios>
#include <optional>
#include <string>
@@ -214,7 +215,7 @@ namespace std {
template <>
struct hash<::beast::IP::Endpoint>
{
hash() = default;
explicit hash() = default;
std::size_t
operator()(::beast::IP::Endpoint const& endpoint) const
@@ -229,7 +230,7 @@ namespace boost {
template <>
struct hash<::beast::IP::Endpoint>
{
hash() = default;
explicit hash() = default;
std::size_t
operator()(::beast::IP::Endpoint const& endpoint) const

View File

@@ -28,8 +28,10 @@
#include <algorithm>
#include <cctype>
#include <cstdint>
#include <iterator>
#include <string>
#include <utility>
#include <vector>
namespace beast {

View File

@@ -13,6 +13,7 @@
#include <boost/optional.hpp>
#include <condition_variable>
#include <functional>
#include <mutex>
#include <thread>
#include <vector>

View File

@@ -16,6 +16,7 @@
#include <algorithm>
#include <chrono>
#include <functional>
#include <iomanip>
#include <iostream>
#include <sstream>

View File

@@ -13,6 +13,7 @@
#include <boost/assert.hpp>
#include <mutex>
#include <ostream>
#include <string>
namespace beast {

View File

@@ -31,28 +31,36 @@ namespace beast {
template <class Generator>
void
rngfill(void* const buffer, std::size_t const bytes, Generator& g)
rngfill(void* buffer, std::size_t bytes, Generator& g)
{
using result_type = typename Generator::result_type;
constexpr std::size_t result_size = sizeof(result_type);
std::uint8_t* const buffer_start = static_cast<std::uint8_t*>(buffer);
std::size_t const complete_iterations = bytes / result_size;
std::size_t const bytes_remaining = bytes % result_size;
for (std::size_t count = 0; count < complete_iterations; ++count)
while (bytes >= sizeof(result_type))
{
result_type const v = g();
std::size_t const offset = count * result_size;
std::memcpy(buffer_start + offset, &v, result_size);
auto const v = g();
std::memcpy(buffer, &v, sizeof(v));
buffer = reinterpret_cast<std::uint8_t*>(buffer) + sizeof(v);
bytes -= sizeof(v);
}
if (bytes_remaining > 0)
XRPL_ASSERT(
bytes < sizeof(result_type), "beast::rngfill(void*) : maximum bytes");
#ifdef __GNUC__
// gcc 11.1 (falsely) warns about an array-bounds overflow in release mode.
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Warray-bounds"
#endif
if (bytes > 0)
{
result_type const v = g();
std::size_t const offset = complete_iterations * result_size;
std::memcpy(buffer_start + offset, &v, bytes_remaining);
auto const v = g();
std::memcpy(buffer, &v, bytes);
}
#ifdef __GNUC__
#pragma GCC diagnostic pop
#endif
}
template <

View File

@@ -37,9 +37,9 @@ class temp_dir
public:
#if !GENERATING_DOCS
temp_dir(temp_dir const&) = delete;
temp_dir(const temp_dir&) = delete;
temp_dir&
operator=(temp_dir const&) = delete;
operator=(const temp_dir&) = delete;
#endif
/// Construct a temporary directory.

View File

@@ -39,7 +39,7 @@ class Reader
{
public:
using Char = char;
using Location = Char const*;
using Location = const Char*;
/** \brief Constructs a Reader allowing all features
* for parsing.
@@ -64,7 +64,7 @@ public:
* error occurred.
*/
bool
parse(char const* beginDoc, char const* endDoc, Value& root);
parse(const char* beginDoc, const char* endDoc, Value& root);
/// \brief Parse from input stream.
/// \see Json::operator>>(std::istream&, Json::Value&).
@@ -133,7 +133,7 @@ private:
using Errors = std::deque<ErrorInfo>;
bool
expectToken(TokenType type, Token& token, char const* message);
expectToken(TokenType type, Token& token, const char* message);
bool
readToken(Token& token);
void

View File

@@ -20,7 +20,6 @@
#ifndef RIPPLE_JSON_JSON_VALUE_H_INCLUDED
#define RIPPLE_JSON_JSON_VALUE_H_INCLUDED
#include <xrpl/basics/Number.h>
#include <xrpl/json/json_forwards.h>
#include <cstring>
@@ -62,24 +61,24 @@ enum ValueType {
class StaticString
{
public:
constexpr explicit StaticString(char const* czstring) : str_(czstring)
constexpr explicit StaticString(const char* czstring) : str_(czstring)
{
}
constexpr
operator char const*() const
operator const char*() const
{
return str_;
}
constexpr char const*
constexpr const char*
c_str() const
{
return str_;
}
private:
char const* str_;
const char* str_;
};
inline bool
@@ -157,10 +156,10 @@ public:
using Int = Json::Int;
using ArrayIndex = UInt;
static Value const null;
static Int const minInt;
static Int const maxInt;
static UInt const maxUInt;
static const Value null;
static const Int minInt;
static const Int maxInt;
static const UInt maxUInt;
private:
class CZString
@@ -172,24 +171,24 @@ private:
duplicateOnCopy
};
CZString(int index);
CZString(char const* cstr, DuplicationPolicy allocate);
CZString(CZString const& other);
CZString(const char* cstr, DuplicationPolicy allocate);
CZString(const CZString& other);
~CZString();
CZString&
operator=(CZString const& other) = delete;
operator=(const CZString& other) = delete;
bool
operator<(CZString const& other) const;
operator<(const CZString& other) const;
bool
operator==(CZString const& other) const;
operator==(const CZString& other) const;
int
index() const;
char const*
const char*
c_str() const;
bool
isStaticString() const;
private:
char const* cstr_;
const char* cstr_;
int index_;
};
@@ -216,8 +215,7 @@ public:
Value(Int value);
Value(UInt value);
Value(double value);
Value(char const* value);
Value(ripple::Number const& value);
Value(const char* value);
/** \brief Constructs a value from a static string.
* Like other value string constructor but do not duplicate the string for
@@ -229,10 +227,10 @@ public:
* Json::Value aValue( StaticString("some text") );
* \endcode
*/
Value(StaticString const& value);
Value(const StaticString& value);
Value(std::string const& value);
Value(bool value);
Value(Value const& other);
Value(const Value& other);
~Value();
Value&
@@ -249,7 +247,7 @@ public:
ValueType
type() const;
char const*
const char*
asCString() const;
/** Returns the unquoted string value. */
std::string
@@ -319,12 +317,12 @@ public:
/// Access an array element (zero based index )
/// (You may need to say 'value[0u]' to get your compiler to distinguish
/// this from the operator[] which takes a string.)
Value const&
const Value&
operator[](UInt index) const;
/// If the array contains at least index+1 elements, returns the element
/// value, otherwise returns defaultValue.
Value
get(UInt index, Value const& defaultValue) const;
get(UInt index, const Value& defaultValue) const;
/// Return true if index < size().
bool
isValidIndex(UInt index) const;
@@ -332,25 +330,25 @@ public:
///
/// Equivalent to jsonvalue[jsonvalue.size()] = value;
Value&
append(Value const& value);
append(const Value& value);
Value&
append(Value&& value);
/// Access an object value by name, create a null member if it does not
/// exist.
Value&
operator[](char const* key);
operator[](const char* key);
/// Access an object value by name, returns null if there is no member with
/// that name.
Value const&
operator[](char const* key) const;
const Value&
operator[](const char* key) const;
/// Access an object value by name, create a null member if it does not
/// exist.
Value&
operator[](std::string const& key);
/// Access an object value by name, returns null if there is no member with
/// that name.
Value const&
const Value&
operator[](std::string const& key) const;
/** \brief Access an object value by name, create a null member if it does
not exist.
@@ -366,16 +364,14 @@ public:
* \endcode
*/
Value&
operator[](StaticString const& key);
Value const&
operator[](StaticString const& key) const;
operator[](const StaticString& key);
/// Return the member named key if it exist, defaultValue otherwise.
Value
get(char const* key, Value const& defaultValue) const;
get(const char* key, const Value& defaultValue) const;
/// Return the member named key if it exist, defaultValue otherwise.
Value
get(std::string const& key, Value const& defaultValue) const;
get(std::string const& key, const Value& defaultValue) const;
/// \brief Remove and return the named member.
///
@@ -384,14 +380,14 @@ public:
/// \pre type() is objectValue or nullValue
/// \post type() is unchanged
Value
removeMember(char const* key);
removeMember(const char* key);
/// Same as removeMember(const char*)
Value
removeMember(std::string const& key);
/// Return true if the object has a member named key.
bool
isMember(char const* key) const;
isMember(const char* key) const;
/// Return true if the object has a member named key.
bool
isMember(std::string const& key) const;
@@ -418,13 +414,13 @@ public:
end();
friend bool
operator==(Value const&, Value const&);
operator==(const Value&, const Value&);
friend bool
operator<(Value const&, Value const&);
operator<(const Value&, const Value&);
private:
Value&
resolveReference(char const* key, bool isStatic);
resolveReference(const char* key, bool isStatic);
private:
union ValueHolder
@@ -440,38 +436,32 @@ private:
int allocated_ : 1; // Notes: if declared as bool, bitfield is useless.
};
inline Value
to_json(ripple::Number const& number)
{
return to_string(number);
}
bool
operator==(Value const&, Value const&);
operator==(const Value&, const Value&);
inline bool
operator!=(Value const& x, Value const& y)
operator!=(const Value& x, const Value& y)
{
return !(x == y);
}
bool
operator<(Value const&, Value const&);
operator<(const Value&, const Value&);
inline bool
operator<=(Value const& x, Value const& y)
operator<=(const Value& x, const Value& y)
{
return !(y < x);
}
inline bool
operator>(Value const& x, Value const& y)
operator>(const Value& x, const Value& y)
{
return y < x;
}
inline bool
operator>=(Value const& x, Value const& y)
operator>=(const Value& x, const Value& y)
{
return !(x < y);
}
@@ -492,11 +482,11 @@ public:
virtual ~ValueAllocator() = default;
virtual char*
makeMemberName(char const* memberName) = 0;
makeMemberName(const char* memberName) = 0;
virtual void
releaseMemberName(char* memberName) = 0;
virtual char*
duplicateStringValue(char const* value, unsigned int length = unknown) = 0;
duplicateStringValue(const char* value, unsigned int length = unknown) = 0;
virtual void
releaseStringValue(char* value) = 0;
};
@@ -513,16 +503,16 @@ public:
ValueIteratorBase();
explicit ValueIteratorBase(Value::ObjectValues::iterator const& current);
explicit ValueIteratorBase(const Value::ObjectValues::iterator& current);
bool
operator==(SelfType const& other) const
operator==(const SelfType& other) const
{
return isEqual(other);
}
bool
operator!=(SelfType const& other) const
operator!=(const SelfType& other) const
{
return !isEqual(other);
}
@@ -538,7 +528,7 @@ public:
/// Return the member name of the referenced Value. "" if it is not an
/// objectValue.
char const*
const char*
memberName() const;
protected:
@@ -552,13 +542,13 @@ protected:
decrement();
difference_type
computeDistance(SelfType const& other) const;
computeDistance(const SelfType& other) const;
bool
isEqual(SelfType const& other) const;
isEqual(const SelfType& other) const;
void
copy(SelfType const& other);
copy(const SelfType& other);
private:
Value::ObjectValues::iterator current_;
@@ -576,8 +566,8 @@ class ValueConstIterator : public ValueIteratorBase
public:
using size_t = unsigned int;
using difference_type = int;
using reference = Value const&;
using pointer = Value const*;
using reference = const Value&;
using pointer = const Value*;
using SelfType = ValueConstIterator;
ValueConstIterator() = default;
@@ -585,11 +575,11 @@ public:
private:
/*! \internal Use by Value to create an iterator.
*/
explicit ValueConstIterator(Value::ObjectValues::iterator const& current);
explicit ValueConstIterator(const Value::ObjectValues::iterator& current);
public:
SelfType&
operator=(ValueIteratorBase const& other);
operator=(const ValueIteratorBase& other);
SelfType
operator++(int)
@@ -642,17 +632,17 @@ public:
using SelfType = ValueIterator;
ValueIterator() = default;
ValueIterator(ValueConstIterator const& other);
ValueIterator(ValueIterator const& other);
ValueIterator(const ValueConstIterator& other);
ValueIterator(const ValueIterator& other);
private:
/*! \internal Use by Value to create an iterator.
*/
explicit ValueIterator(Value::ObjectValues::iterator const& current);
explicit ValueIterator(const Value::ObjectValues::iterator& current);
public:
SelfType&
operator=(SelfType const& other);
operator=(const SelfType& other);
SelfType
operator++(int)

View File

@@ -39,7 +39,7 @@ public:
{
}
virtual std::string
write(Value const& root) = 0;
write(const Value& root) = 0;
};
/** \brief Outputs a Value in <a HREF="http://www.json.org">JSON</a> format
@@ -60,11 +60,11 @@ public:
public: // overridden from Writer
std::string
write(Value const& root) override;
write(const Value& root) override;
private:
void
writeValue(Value const& value);
writeValue(const Value& value);
std::string document_;
};
@@ -101,15 +101,15 @@ public: // overridden from Writer
* JSON document that represents the root value.
*/
std::string
write(Value const& root) override;
write(const Value& root) override;
private:
void
writeValue(Value const& value);
writeValue(const Value& value);
void
writeArrayValue(Value const& value);
writeArrayValue(const Value& value);
bool
isMultineArray(Value const& value);
isMultineArray(const Value& value);
void
pushValue(std::string const& value);
void
@@ -168,15 +168,15 @@ public:
* return a value.
*/
void
write(std::ostream& out, Value const& root);
write(std::ostream& out, const Value& root);
private:
void
writeValue(Value const& value);
writeValue(const Value& value);
void
writeArrayValue(Value const& value);
writeArrayValue(const Value& value);
bool
isMultineArray(Value const& value);
isMultineArray(const Value& value);
void
pushValue(std::string const& value);
void
@@ -207,12 +207,12 @@ valueToString(double value);
std::string
valueToString(bool value);
std::string
valueToQuotedString(char const* value);
valueToQuotedString(const char* value);
/// \brief Output using the StyledStreamWriter.
/// \see Json::operator>>()
std::ostream&
operator<<(std::ostream&, Value const& root);
operator<<(std::ostream&, const Value& root);
//------------------------------------------------------------------------------

View File

@@ -37,7 +37,7 @@ pretty(Value const&);
/** Output using the StyledStreamWriter. @see Json::operator>>(). */
std::ostream&
operator<<(std::ostream&, Value const& root);
operator<<(std::ostream&, const Value& root);
} // namespace Json

View File

@@ -48,6 +48,14 @@ class STObject;
class STAmount;
class Rules;
/** Calculate AMM account ID.
*/
AccountID
ammAccountID(
std::uint16_t prefix,
uint256 const& parentHash,
uint256 const& ammID);
/** Calculate Liquidity Provider Token (LPT) Currency.
*/
Currency

View File

@@ -29,6 +29,7 @@
#include <xrpl/protocol/json_get_or_throw.h>
#include <cstddef>
#include <mutex>
#include <optional>
#include <string>
@@ -148,7 +149,7 @@ namespace std {
template <>
struct hash<ripple::AccountID> : ripple::AccountID::hasher
{
hash() = default;
explicit hash() = default;
};
} // namespace std

View File

@@ -20,6 +20,7 @@
#ifndef RIPPLE_PROTOCOL_APIVERSION_H_INCLUDED
#define RIPPLE_PROTOCOL_APIVERSION_H_INCLUDED
#include <functional>
#include <type_traits>
#include <utility>

View File

@@ -20,7 +20,6 @@
#ifndef RIPPLE_PROTOCOL_ASSET_H_INCLUDED
#define RIPPLE_PROTOCOL_ASSET_H_INCLUDED
#include <xrpl/basics/Number.h>
#include <xrpl/basics/base_uint.h>
#include <xrpl/protocol/Issue.h>
#include <xrpl/protocol/MPTIssue.h>
@@ -28,7 +27,6 @@
namespace ripple {
class Asset;
class STAmount;
template <typename TIss>
concept ValidIssueType =
@@ -94,9 +92,6 @@ public:
void
setJson(Json::Value& jv) const;
STAmount
operator()(Number const&) const;
bool
native() const
{
@@ -119,14 +114,6 @@ public:
equalTokens(Asset const& lhs, Asset const& rhs);
};
inline Json::Value
to_json(Asset const& asset)
{
Json::Value jv;
asset.setJson(jv);
return jv;
}
template <ValidIssueType TIss>
constexpr bool
Asset::holds() const
@@ -232,6 +219,9 @@ validJSONAsset(Json::Value const& jv);
Asset
assetFromJson(Json::Value const& jv);
Json::Value
to_json(Asset const& asset);
} // namespace ripple
#endif // RIPPLE_PROTOCOL_ASSET_H_INCLUDED

View File

@@ -1,37 +0,0 @@
//------------------------------------------------------------------------------
/*
This file is part of rippled: https://github.com/ripple/rippled
Copyright (c) 2024 Ripple Labs Inc.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#include <xrpl/protocol/HashPrefix.h>
#include <xrpl/protocol/STVector256.h>
#include <xrpl/protocol/Serializer.h>
namespace ripple {
inline void
serializeBatch(
Serializer& msg,
std::uint32_t const& flags,
std::vector<uint256> const& txids)
{
msg.add32(HashPrefix::batch);
msg.add32(flags);
msg.add32(std::uint32_t(txids.size()));
for (auto const& txid : txids)
msg.addBitString(txid);
}
} // namespace ripple

View File

@@ -21,7 +21,6 @@
#define RIPPLE_PROTOCOL_BOOK_H_INCLUDED
#include <xrpl/basics/CountedObject.h>
#include <xrpl/basics/base_uint.h>
#include <xrpl/protocol/Issue.h>
#include <boost/utility/base_from_member.hpp>
@@ -37,17 +36,12 @@ class Book final : public CountedObject<Book>
public:
Issue in;
Issue out;
std::optional<uint256> domain;
Book()
{
}
Book(
Issue const& in_,
Issue const& out_,
std::optional<uint256> const& domain_)
: in(in_), out(out_), domain(domain_)
Book(Issue const& in_, Issue const& out_) : in(in_), out(out_)
{
}
};
@@ -67,8 +61,6 @@ hash_append(Hasher& h, Book const& b)
{
using beast::hash_append;
hash_append(h, b.in, b.out);
if (b.domain)
hash_append(h, *(b.domain));
}
Book
@@ -79,8 +71,7 @@ reversed(Book const& book);
[[nodiscard]] inline constexpr bool
operator==(Book const& lhs, Book const& rhs)
{
return (lhs.in == rhs.in) && (lhs.out == rhs.out) &&
(lhs.domain == rhs.domain);
return (lhs.in == rhs.in) && (lhs.out == rhs.out);
}
/** @} */
@@ -91,18 +82,7 @@ operator<=>(Book const& lhs, Book const& rhs)
{
if (auto const c{lhs.in <=> rhs.in}; c != 0)
return c;
if (auto const c{lhs.out <=> rhs.out}; c != 0)
return c;
// Manually compare optionals
if (lhs.domain && rhs.domain)
return *lhs.domain <=> *rhs.domain; // Compare values if both exist
if (!lhs.domain && rhs.domain)
return std::weak_ordering::less; // Empty is considered less
if (lhs.domain && !rhs.domain)
return std::weak_ordering::greater; // Non-empty is greater
return std::weak_ordering::equivalent; // Both are empty
return lhs.out <=> rhs.out;
}
/** @} */
@@ -124,7 +104,7 @@ private:
boost::base_from_member<std::hash<ripple::AccountID>, 1>;
public:
hash() = default;
explicit hash() = default;
using value_type = std::size_t;
using argument_type = ripple::Issue;
@@ -146,14 +126,12 @@ template <>
struct hash<ripple::Book>
{
private:
using issue_hasher = std::hash<ripple::Issue>;
using uint256_hasher = ripple::uint256::hasher;
using hasher = std::hash<ripple::Issue>;
issue_hasher m_issue_hasher;
uint256_hasher m_uint256_hasher;
hasher m_hasher;
public:
hash() = default;
explicit hash() = default;
using value_type = std::size_t;
using argument_type = ripple::Book;
@@ -161,12 +139,8 @@ public:
value_type
operator()(argument_type const& value) const
{
value_type result(m_issue_hasher(value.in));
boost::hash_combine(result, m_issue_hasher(value.out));
if (value.domain)
boost::hash_combine(result, m_uint256_hasher(*value.domain));
value_type result(m_hasher(value.in));
boost::hash_combine(result, m_hasher(value.out));
return result;
}
};
@@ -180,7 +154,7 @@ namespace boost {
template <>
struct hash<ripple::Issue> : std::hash<ripple::Issue>
{
hash() = default;
explicit hash() = default;
using Base = std::hash<ripple::Issue>;
// VFALCO NOTE broken in vs2012
@@ -190,7 +164,7 @@ struct hash<ripple::Issue> : std::hash<ripple::Issue>
template <>
struct hash<ripple::Book> : std::hash<ripple::Book>
{
hash() = default;
explicit hash() = default;
using Base = std::hash<ripple::Book>;
// VFALCO NOTE broken in vs2012

View File

@@ -120,7 +120,7 @@ enum error_code_i {
rpcSRC_ACT_MALFORMED = 65,
rpcSRC_ACT_MISSING = 66,
rpcSRC_ACT_NOT_FOUND = 67,
rpcDELEGATE_ACT_NOT_FOUND = 68,
// unused 68,
rpcSRC_CUR_MALFORMED = 69,
rpcSRC_ISR_MALFORMED = 70,
rpcSTREAM_MALFORMED = 71,
@@ -154,10 +154,7 @@ enum error_code_i {
// Simulate
rpcTX_SIGNED = 96,
// Pathfinding
rpcDOMAIN_MALFORMED = 97,
rpcLAST = rpcDOMAIN_MALFORMED // rpcLAST should always equal the last code.
rpcLAST = rpcTX_SIGNED // rpcLAST should always equal the last code.
};
/** Codes returned in the `warnings` array of certain RPC commands.
@@ -169,8 +166,6 @@ enum warning_code_i {
warnRPC_AMENDMENT_BLOCKED = 1002,
warnRPC_EXPIRED_VALIDATOR_LIST = 1003,
// unused = 1004
warnRPC_FIELDS_DEPRECATED = 2004, // rippled needs to maintain
// compatibility with Clio on this code.
};
//------------------------------------------------------------------------------

View File

@@ -24,6 +24,7 @@
#include <boost/container/flat_map.hpp>
#include <array>
#include <bitset>
#include <map>
#include <optional>
@@ -53,18 +54,6 @@
* then change the macro parameter in features.macro to
* `VoteBehavior::DefaultYes`. The communication process is beyond
* the scope of these instructions.
* 5) If a supported feature (`Supported::yes`) was _ever_ in a released
* version, it can never be changed back to `Supported::no`, because
* it _may_ still become enabled at any time. This would cause newer
* versions of `rippled` to become amendment blocked.
* Instead, to prevent newer versions from voting on the feature, use
* `VoteBehavior::Obsolete`. Obsolete features can not be voted for
* by any versions of `rippled` built with that setting, but will still
* work correctly if they get enabled. If a feature remains obsolete
* for long enough that _all_ clients that could vote for it are
* amendment blocked, the feature can be removed from the code
* as if it was unsupported.
*
*
* When a feature has been enabled for several years, the conditional code

View File

@@ -27,9 +27,14 @@
#include <boost/multiprecision/cpp_int.hpp>
#include <boost/operators.hpp>
#include <cmath>
#include <ios>
#include <iosfwd>
#include <limits>
#include <optional>
#include <sstream>
#include <string>
#include <utility>
namespace ripple {
@@ -331,7 +336,7 @@ public:
// Output Fees as just their numeric value.
template <class Char, class Traits, class UnitTag, class T>
std::basic_ostream<Char, Traits>&
operator<<(std::basic_ostream<Char, Traits>& os, TaggedFee<UnitTag, T> const& q)
operator<<(std::basic_ostream<Char, Traits>& os, const TaggedFee<UnitTag, T>& q)
{
return os << q.value();
}

View File

@@ -88,9 +88,6 @@ enum class HashPrefix : std::uint32_t {
/** Credentials signature */
credential = detail::make_hash_prefix('C', 'R', 'D'),
/** Batch */
batch = detail::make_hash_prefix('B', 'C', 'H'),
};
template <class Hasher>

View File

@@ -28,6 +28,7 @@
#include <cstdint>
#include <string>
#include <utility>
namespace ripple {
@@ -98,12 +99,6 @@ public:
static IOUAmount
minPositiveAmount();
friend std::ostream&
operator<<(std::ostream& os, IOUAmount const& x)
{
return os << to_string(x);
}
};
inline IOUAmount::IOUAmount(beast::Zero)

View File

@@ -32,7 +32,6 @@
#include <xrpl/protocol/jss.h>
#include <cstdint>
#include <set>
namespace ripple {
@@ -280,10 +279,6 @@ amm(Asset const& issue1, Asset const& issue2) noexcept;
Keylet
amm(uint256 const& amm) noexcept;
/** A keylet for Delegate object */
Keylet
delegate(AccountID const& account, AccountID const& authorizedAccount) noexcept;
Keylet
bridge(STXChainBridge const& bridge, STXChainBridge::ChainType chainType);
@@ -335,15 +330,6 @@ mptoken(uint256 const& mptokenKey)
Keylet
mptoken(uint256 const& issuanceKey, AccountID const& holder) noexcept;
Keylet
vault(AccountID const& owner, std::uint32_t seq) noexcept;
inline Keylet
vault(uint256 const& vaultKey)
{
return {ltVAULT, vaultKey};
}
Keylet
permissionedDomain(AccountID const& account, std::uint32_t seq) noexcept;

View File

@@ -24,6 +24,9 @@
#include <xrpl/json/json_value.h>
#include <xrpl/protocol/UintTypes.h>
#include <functional>
#include <type_traits>
namespace ripple {
/** A currency issued by an account.

View File

@@ -145,15 +145,13 @@ enum LedgerSpecificFlags {
0x10000000, // True, reject new paychans
lsfDisallowIncomingTrustline =
0x20000000, // True, reject new trustlines (only if no issued assets)
lsfAllowTrustLineLocking =
0x40000000, // True, enable trustline locking
// 0x40000000 is available
lsfAllowTrustLineClawback =
0x80000000, // True, enable clawback
// ltOFFER
lsfPassive = 0x00010000,
lsfSell = 0x00020000, // True, offer was placed as a sell.
lsfHybrid = 0x00040000, // True, offer is hybrid.
// ltRIPPLE_STATE
lsfLowReserve = 0x00010000, // True, if entry counts toward reserve.
@@ -193,9 +191,6 @@ enum LedgerSpecificFlags {
// ltCREDENTIAL
lsfAccepted = 0x00010000,
// ltVAULT
lsfVaultPrivate = 0x00010000,
};
//------------------------------------------------------------------------------

View File

@@ -24,12 +24,15 @@
#include <xrpl/basics/contract.h>
#include <xrpl/basics/safe_cast.h>
#include <xrpl/beast/utility/Zero.h>
#include <xrpl/json/json_value.h>
#include <boost/multiprecision/cpp_int.hpp>
#include <boost/operators.hpp>
#include <cstdint>
#include <optional>
#include <string>
#include <type_traits>
namespace ripple {

View File

@@ -42,11 +42,8 @@ public:
AccountID const&
getIssuer() const;
constexpr MPTID const&
getMptID() const
{
return mptID_;
}
MPTID const&
getMptID() const;
std::string
getText() const;

View File

@@ -28,6 +28,7 @@
#include <concepts>
#include <cstdlib>
#include <functional>
#include <limits>
#include <type_traits>
#include <utility>
@@ -79,7 +80,7 @@ struct MultiApiJson
}
void
set(char const* key, auto const& v)
set(const char* key, auto const& v)
requires std::constructible_from<Json::Value, decltype(v)>
{
for (auto& a : this->val)
@@ -90,7 +91,7 @@ struct MultiApiJson
enum IsMemberResult : int { none = 0, some, all };
[[nodiscard]] IsMemberResult
isMember(char const* key) const
isMember(const char* key) const
{
int count = 0;
for (auto& a : this->val)

View File

@@ -28,8 +28,6 @@
namespace ripple {
namespace RPC {
/**
Adds common synthetic fields to transaction-related JSON responses
@@ -42,7 +40,6 @@ insertNFTSyntheticInJson(
TxMeta const&);
/** @} */
} // namespace RPC
} // namespace ripple
#endif

View File

@@ -1,96 +0,0 @@
//------------------------------------------------------------------------------
/*
This file is part of rippled: https://github.com/ripple/rippled
Copyright (c) 2025 Ripple Labs Inc.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#ifndef RIPPLE_PROTOCOL_PERMISSION_H_INCLUDED
#define RIPPLE_PROTOCOL_PERMISSION_H_INCLUDED
#include <xrpl/protocol/TxFormats.h>
#include <optional>
#include <string>
#include <unordered_map>
namespace ripple {
/**
* We have both transaction type permissions and granular type permissions.
* Since we will reuse the TransactionFormats to parse the Transaction
* Permissions, only the GranularPermissionType is defined here. To prevent
* conflicts with TxType, the GranularPermissionType is always set to a value
* greater than the maximum value of uint16.
*/
enum GranularPermissionType : std::uint32_t {
#pragma push_macro("PERMISSION")
#undef PERMISSION
#define PERMISSION(type, txType, value) type = value,
#include <xrpl/protocol/detail/permissions.macro>
#undef PERMISSION
#pragma pop_macro("PERMISSION")
};
enum Delegation { delegatable, notDelegatable };
class Permission
{
private:
Permission();
std::unordered_map<std::uint16_t, Delegation> delegatableTx_;
std::unordered_map<std::string, GranularPermissionType>
granularPermissionMap_;
std::unordered_map<GranularPermissionType, std::string> granularNameMap_;
std::unordered_map<GranularPermissionType, TxType> granularTxTypeMap_;
public:
static Permission const&
getInstance();
Permission(Permission const&) = delete;
Permission&
operator=(Permission const&) = delete;
std::optional<std::uint32_t>
getGranularValue(std::string const& name) const;
std::optional<std::string>
getGranularName(GranularPermissionType const& value) const;
std::optional<TxType>
getGranularTxType(GranularPermissionType const& gpType) const;
bool
isDelegatable(std::uint32_t const& permissionValue) const;
// for tx level permission, permission value is equal to tx type plus one
uint32_t
txToPermissionType(TxType const& type) const;
// tx type value is permission value minus one
TxType
permissionToTxType(uint32_t const& value) const;
};
} // namespace ripple
#endif

View File

@@ -116,16 +116,6 @@ std::size_t constexpr maxMPTokenMetadataLength = 1024;
/** The maximum amount of MPTokenIssuance */
std::uint64_t constexpr maxMPTokenAmount = 0x7FFF'FFFF'FFFF'FFFFull;
/** The maximum length of Data payload */
std::size_t constexpr maxDataPayloadLength = 256;
/** Vault withdrawal policies */
std::uint8_t constexpr vaultStrategyFirstComeFirstServe = 1;
/** Maximum recursion depth for vault shares being put as an asset inside
* another vault; counted from 0 */
std::uint8_t constexpr maxAssetCheckDepth = 5;
/** A ledger index. */
using LedgerIndex = std::uint32_t;
@@ -165,13 +155,6 @@ std::size_t constexpr maxPriceScale = 20;
*/
std::size_t constexpr maxTrim = 25;
/** The maximum number of delegate permissions an account can grant
*/
std::size_t constexpr permissionMaxSize = 10;
/** The maximum number of transactions that can be in a batch. */
std::size_t constexpr maxBatchTxCount = 8;
} // namespace ripple
#endif

View File

@@ -32,6 +32,7 @@
#include <cstring>
#include <optional>
#include <ostream>
#include <utility>
namespace ripple {

View File

@@ -113,8 +113,8 @@ public:
// have lower unsigned integer representations.
using value_type = std::uint64_t;
static int const minTickSize = 3;
static int const maxTickSize = 16;
static const int minTickSize = 3;
static const int maxTickSize = 16;
private:
// This has the same representation as STAmount, see the comment on the

View File

@@ -28,9 +28,6 @@
namespace ripple {
bool
isFeatureEnabled(uint256 const& feature);
class DigestAwareReadView;
/** Rules controlling protocol behavior. */

View File

@@ -25,6 +25,7 @@
#include <cstdint>
#include <map>
#include <utility>
namespace ripple {
@@ -181,22 +182,22 @@ public:
private_access_tag_t,
SerializedTypeID tid,
int fv,
char const* fn,
const char* fn,
int meta = sMD_Default,
IsSigning signing = IsSigning::yes);
explicit SField(private_access_tag_t, int fc);
static SField const&
static const SField&
getField(int fieldCode);
static SField const&
static const SField&
getField(std::string const& fieldName);
static SField const&
static const SField&
getField(int type, int value)
{
return getField(field_code(type, value));
}
static SField const&
static const SField&
getField(SerializedTypeID type, int value)
{
return getField(field_code(type, value));
@@ -283,19 +284,19 @@ public:
}
bool
operator==(SField const& f) const
operator==(const SField& f) const
{
return fieldCode == f.fieldCode;
}
bool
operator!=(SField const& f) const
operator!=(const SField& f) const
{
return fieldCode != f.fieldCode;
}
static int
compare(SField const& f1, SField const& f2);
compare(const SField& f1, const SField& f2);
static std::map<int, SField const*> const&
getKnownCodeToField()

Some files were not shown because too many files have changed in this diff Show More