mirror of
https://github.com/XRPLF/rippled.git
synced 2025-12-06 17:27:55 +00:00
Compare commits
16 Commits
Bronek/upg
...
Bronek/pro
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e874c4061e | ||
|
|
892876af5e | ||
|
|
94decc753b | ||
|
|
991891625a | ||
|
|
69314e6832 | ||
|
|
dbeb841b5a | ||
|
|
4eae037fee | ||
|
|
a2a5a97d70 | ||
|
|
b5a63b39d3 | ||
|
|
6419f9a253 | ||
|
|
096ab3a86d | ||
|
|
64f1f2d580 | ||
|
|
3f9b724ed8 | ||
|
|
cad9eba7ed | ||
|
|
dbb989921c | ||
|
|
a0cf51d454 |
34
.github/actions/dependencies/action.yml
vendored
34
.github/actions/dependencies/action.yml
vendored
@@ -6,29 +6,17 @@ inputs:
|
|||||||
runs:
|
runs:
|
||||||
using: composite
|
using: composite
|
||||||
steps:
|
steps:
|
||||||
- name: export custom recipes
|
- name: add Conan remote
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
conan export --version 1.1.10 external/snappy
|
|
||||||
conan export --version 4.0.3 external/soci
|
|
||||||
- name: add Ripple Conan remote
|
|
||||||
if: env.CONAN_URL != ''
|
if: env.CONAN_URL != ''
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
if conan remote list | grep -q "ripple"; then
|
if conan remote list | grep -q 'xrplf'; then
|
||||||
conan remote remove ripple
|
conan remote update --index 0 --url ${CONAN_URL} xrplf
|
||||||
echo "Removed conan remote ripple"
|
echo "Updated Conan remote 'xrplf' to ${CONAN_URL}."
|
||||||
|
else
|
||||||
|
conan remote add --index 0 xrplf ${CONAN_URL}
|
||||||
|
echo "Added new Conan remote 'xrplf' at ${CONAN_URL}."
|
||||||
fi
|
fi
|
||||||
conan remote add --index 0 ripple "${CONAN_URL}"
|
|
||||||
echo "Added conan remote ripple at ${CONAN_URL}"
|
|
||||||
- name: try to authenticate to Ripple Conan remote
|
|
||||||
if: env.CONAN_LOGIN_USERNAME_RIPPLE != '' && env.CONAN_PASSWORD_RIPPLE != ''
|
|
||||||
id: remote
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "Authenticating to ripple remote..."
|
|
||||||
conan remote auth ripple --force
|
|
||||||
conan remote list-users
|
|
||||||
- name: list missing binaries
|
- name: list missing binaries
|
||||||
id: binaries
|
id: binaries
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -48,3 +36,11 @@ runs:
|
|||||||
--options:host "&:xrpld=True" \
|
--options:host "&:xrpld=True" \
|
||||||
--settings:all build_type=${{ inputs.configuration }} \
|
--settings:all build_type=${{ inputs.configuration }} \
|
||||||
..
|
..
|
||||||
|
- name: upload dependencies
|
||||||
|
if: ${{ env.CONAN_URL != '' && env.CONAN_LOGIN_USERNAME_XRPLF != '' && env.CONAN_PASSWORD_XRPLF != '' && github.ref_type == 'branch' && github.ref_name == github.event.repository.default_branch }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "Logging into Conan remote 'xrplf' at ${CONAN_URL}."
|
||||||
|
conan remote login xrplf "${{ env.CONAN_LOGIN_USERNAME_XRPLF }}" --password "${{ env.CONAN_PASSWORD_XRPLF }}"
|
||||||
|
echo "Uploading dependencies for configuration '${{ inputs.configuration }}'."
|
||||||
|
conan upload --all --confirm --remote xrplf . --settings build_type=${{ inputs.configuration }}
|
||||||
|
|||||||
16
.github/workflows/libxrpl.yml
vendored
16
.github/workflows/libxrpl.yml
vendored
@@ -1,8 +1,8 @@
|
|||||||
name: Check libXRPL compatibility with Clio
|
name: Check libXRPL compatibility with Clio
|
||||||
env:
|
env:
|
||||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
|
CONAN_URL: https://conan.ripplex.io
|
||||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
CONAN_LOGIN_USERNAME_XRPLF: ${{ secrets.CONAN_USERNAME }}
|
||||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
CONAN_PASSWORD_XRPLF: ${{ secrets.CONAN_TOKEN }}
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
@@ -43,20 +43,20 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
conan export . ${{ steps.channel.outputs.channel }}
|
conan export . ${{ steps.channel.outputs.channel }}
|
||||||
- name: Add Ripple Conan remote
|
- name: Add Conan remote
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
conan remote list
|
conan remote list
|
||||||
conan remote remove ripple || true
|
conan remote remove xrplf || true
|
||||||
# Do not quote the URL. An empty string will be accepted (with a non-fatal warning), but a missing argument will not.
|
# Do not quote the URL. An empty string will be accepted (with a non-fatal warning), but a missing argument will not.
|
||||||
conan remote add ripple ${{ env.CONAN_URL }} --insert 0
|
conan remote add xrplf ${{ env.CONAN_URL }} --insert 0
|
||||||
- name: Parse new version
|
- name: Parse new version
|
||||||
id: version
|
id: version
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
echo version="$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" \
|
echo version="$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" \
|
||||||
| awk -F '"' '{print $2}')" | tee ${GITHUB_OUTPUT}
|
| awk -F '"' '{print $2}')" | tee ${GITHUB_OUTPUT}
|
||||||
- name: Try to authenticate to Ripple Conan remote
|
- name: Try to authenticate to Conan remote
|
||||||
id: remote
|
id: remote
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
@@ -64,7 +64,7 @@ jobs:
|
|||||||
# https://docs.conan.io/1/reference/commands/misc/user.html#using-environment-variables
|
# https://docs.conan.io/1/reference/commands/misc/user.html#using-environment-variables
|
||||||
# https://docs.conan.io/1/reference/env_vars.html#conan-login-username-conan-login-username-remote-name
|
# https://docs.conan.io/1/reference/env_vars.html#conan-login-username-conan-login-username-remote-name
|
||||||
# https://docs.conan.io/1/reference/env_vars.html#conan-password-conan-password-remote-name
|
# https://docs.conan.io/1/reference/env_vars.html#conan-password-conan-password-remote-name
|
||||||
echo outcome=$(conan user --remote ripple --password >&2 \
|
echo outcome=$(conan user --remote xrplf --password >&2 \
|
||||||
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
|
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
|
||||||
- name: Upload new package
|
- name: Upload new package
|
||||||
id: upload
|
id: upload
|
||||||
|
|||||||
23
.github/workflows/macos.yml
vendored
23
.github/workflows/macos.yml
vendored
@@ -18,9 +18,9 @@ concurrency:
|
|||||||
# This part of Conan configuration is specific to this workflow only; we do not want
|
# This part of Conan configuration is specific to this workflow only; we do not want
|
||||||
# to pollute conan/profiles directory with settings which might not work for others
|
# to pollute conan/profiles directory with settings which might not work for others
|
||||||
env:
|
env:
|
||||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
|
CONAN_URL: https://conan.ripplex.io
|
||||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
CONAN_LOGIN_USERNAME_XRPLF: ${{ secrets.CONAN_USERNAME }}
|
||||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
CONAN_PASSWORD_XRPLF: ${{ secrets.CONAN_TOKEN }}
|
||||||
CONAN_GLOBAL_CONF: |
|
CONAN_GLOBAL_CONF: |
|
||||||
core.download:parallel={{os.cpu_count()}}
|
core.download:parallel={{os.cpu_count()}}
|
||||||
core.upload:parallel={{os.cpu_count()}}
|
core.upload:parallel={{os.cpu_count()}}
|
||||||
@@ -87,24 +87,9 @@ jobs:
|
|||||||
clang --version
|
clang --version
|
||||||
- name: configure Conan
|
- name: configure Conan
|
||||||
run : |
|
run : |
|
||||||
echo "${CONAN_GLOBAL_CONF}" >> $(conan config home)/global.conf
|
echo "${CONAN_GLOBAL_CONF}" > $(conan config home)/global.conf
|
||||||
conan config install conan/profiles/ -tf $(conan config home)/profiles/
|
conan config install conan/profiles/ -tf $(conan config home)/profiles/
|
||||||
conan profile show
|
conan profile show
|
||||||
- name: export custom recipes
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
conan export --version 1.1.10 external/snappy
|
|
||||||
conan export --version 4.0.3 external/soci
|
|
||||||
- name: add Ripple Conan remote
|
|
||||||
if: env.CONAN_URL != ''
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
if conan remote list | grep -q "ripple"; then
|
|
||||||
conan remote remove ripple
|
|
||||||
echo "Removed conan remote ripple"
|
|
||||||
fi
|
|
||||||
conan remote add --index 0 ripple "${CONAN_URL}"
|
|
||||||
echo "Added conan remote ripple at ${CONAN_URL}"
|
|
||||||
- name: build dependencies
|
- name: build dependencies
|
||||||
uses: ./.github/actions/dependencies
|
uses: ./.github/actions/dependencies
|
||||||
with:
|
with:
|
||||||
|
|||||||
6
.github/workflows/nix.yml
vendored
6
.github/workflows/nix.yml
vendored
@@ -19,9 +19,9 @@ concurrency:
|
|||||||
# This part of Conan configuration is specific to this workflow only; we do not want
|
# This part of Conan configuration is specific to this workflow only; we do not want
|
||||||
# to pollute conan/profiles directory with settings which might not work for others
|
# to pollute conan/profiles directory with settings which might not work for others
|
||||||
env:
|
env:
|
||||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
|
CONAN_URL: https://conan.ripplex.io
|
||||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
CONAN_LOGIN_USERNAME_XRPLF: ${{ secrets.CONAN_USERNAME }}
|
||||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
CONAN_PASSWORD_XRPLF: ${{ secrets.CONAN_TOKEN }}
|
||||||
CONAN_GLOBAL_CONF: |
|
CONAN_GLOBAL_CONF: |
|
||||||
core.download:parallel={{ os.cpu_count() }}
|
core.download:parallel={{ os.cpu_count() }}
|
||||||
core.upload:parallel={{ os.cpu_count() }}
|
core.upload:parallel={{ os.cpu_count() }}
|
||||||
|
|||||||
23
.github/workflows/windows.yml
vendored
23
.github/workflows/windows.yml
vendored
@@ -21,9 +21,9 @@ concurrency:
|
|||||||
# This part of Conan configuration is specific to this workflow only; we do not want
|
# This part of Conan configuration is specific to this workflow only; we do not want
|
||||||
# to pollute conan/profiles directory with settings which might not work for others
|
# to pollute conan/profiles directory with settings which might not work for others
|
||||||
env:
|
env:
|
||||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
|
CONAN_URL: https://conan.ripplex.io
|
||||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
CONAN_LOGIN_USERNAME_XRPLF: ${{ secrets.CONAN_USERNAME }}
|
||||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
CONAN_PASSWORD_XRPLF: ${{ secrets.CONAN_TOKEN }}
|
||||||
CONAN_GLOBAL_CONF: |
|
CONAN_GLOBAL_CONF: |
|
||||||
core.download:parallel={{os.cpu_count()}}
|
core.download:parallel={{os.cpu_count()}}
|
||||||
core.upload:parallel={{os.cpu_count()}}
|
core.upload:parallel={{os.cpu_count()}}
|
||||||
@@ -82,24 +82,9 @@ jobs:
|
|||||||
- name: configure Conan
|
- name: configure Conan
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
echo "${CONAN_GLOBAL_CONF}" >> $(conan config home)/global.conf
|
echo "${CONAN_GLOBAL_CONF}" > $(conan config home)/global.conf
|
||||||
conan config install conan/profiles/ -tf $(conan config home)/profiles/
|
conan config install conan/profiles/ -tf $(conan config home)/profiles/
|
||||||
conan profile show
|
conan profile show
|
||||||
- name: export custom recipes
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
conan export --version 1.1.10 external/snappy
|
|
||||||
conan export --version 4.0.3 external/soci
|
|
||||||
- name: add Ripple Conan remote
|
|
||||||
if: env.CONAN_URL != ''
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
if conan remote list | grep -q "ripple"; then
|
|
||||||
conan remote remove ripple
|
|
||||||
echo "Removed conan remote ripple"
|
|
||||||
fi
|
|
||||||
conan remote add --index 0 ripple "${CONAN_URL}"
|
|
||||||
echo "Added conan remote ripple at ${CONAN_URL}"
|
|
||||||
- name: build dependencies
|
- name: build dependencies
|
||||||
uses: ./.github/actions/dependencies
|
uses: ./.github/actions/dependencies
|
||||||
with:
|
with:
|
||||||
|
|||||||
413
BUILD.md
413
BUILD.md
@@ -3,29 +3,29 @@
|
|||||||
| These instructions assume you have a C++ development environment ready with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up on Linux, macOS, or Windows, [see this guide](./docs/build/environment.md). |
|
| These instructions assume you have a C++ development environment ready with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up on Linux, macOS, or Windows, [see this guide](./docs/build/environment.md). |
|
||||||
|
|
||||||
> These instructions also assume a basic familiarity with Conan and CMake.
|
> These instructions also assume a basic familiarity with Conan and CMake.
|
||||||
> If you are unfamiliar with Conan,
|
> If you are unfamiliar with Conan, you can read our
|
||||||
> you can read our [crash course](./docs/build/conan.md)
|
> [crash course](./docs/build/conan.md) or the official [Getting Started][3]
|
||||||
> or the official [Getting Started][3] walkthrough.
|
> walkthrough.
|
||||||
|
|
||||||
## Branches
|
## Branches
|
||||||
|
|
||||||
For a stable release, choose the `master` branch or one of the [tagged
|
For a stable release, choose the `master` branch or one of the [tagged
|
||||||
releases](https://github.com/ripple/rippled/releases).
|
releases](https://github.com/ripple/rippled/releases).
|
||||||
|
|
||||||
```
|
```bash
|
||||||
git checkout master
|
git checkout master
|
||||||
```
|
```
|
||||||
|
|
||||||
For the latest release candidate, choose the `release` branch.
|
For the latest release candidate, choose the `release` branch.
|
||||||
|
|
||||||
```
|
```bash
|
||||||
git checkout release
|
git checkout release
|
||||||
```
|
```
|
||||||
|
|
||||||
For the latest set of untested features, or to contribute, choose the `develop`
|
For the latest set of untested features, or to contribute, choose the `develop`
|
||||||
branch.
|
branch.
|
||||||
|
|
||||||
```
|
```bash
|
||||||
git checkout develop
|
git checkout develop
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -33,151 +33,295 @@ git checkout develop
|
|||||||
|
|
||||||
See [System Requirements](https://xrpl.org/system-requirements.html).
|
See [System Requirements](https://xrpl.org/system-requirements.html).
|
||||||
|
|
||||||
Building rippled generally requires git, Python, Conan, CMake, and a C++ compiler. Some guidance on setting up such a [C++ development environment can be found here](./docs/build/environment.md).
|
Building rippled generally requires git, Python, Conan, CMake, and a C++
|
||||||
|
compiler. Some guidance on setting up such a [C++ development environment can be
|
||||||
|
found here](./docs/build/environment.md).
|
||||||
|
|
||||||
- [Python 3.7](https://www.python.org/downloads/)
|
- [Python 3.11](https://www.python.org/downloads/), or higher
|
||||||
- [Conan 1.60](https://conan.io/downloads.html)[^1]
|
- [Conan 2.17](https://conan.io/downloads.html)[^1], or higher
|
||||||
- [CMake 3.16](https://cmake.org/download/)
|
- [CMake 3.22](https://cmake.org/download/)[^2], or higher
|
||||||
|
|
||||||
[^1]: It is possible to build with Conan 2.x,
|
[^1]: It is possible to build with Conan 1.60+, but the instructions are
|
||||||
but the instructions are significantly different,
|
significantly different, which is why we are not recommending it.
|
||||||
which is why we are not recommending it yet.
|
|
||||||
Notably, the `conan profile update` command is removed in 2.x.
|
[^2]: CMake 4 is not yet supported by all dependencies required by this project.
|
||||||
Profiles must be edited by hand.
|
If you are affected by this issue, follow [conan workaround for cmake
|
||||||
|
4](#workaround-for-cmake-4)
|
||||||
|
|
||||||
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||||
The [minimum compiler versions][2] required are:
|
The [minimum compiler versions][2] required are:
|
||||||
|
|
||||||
| Compiler | Version |
|
| Compiler | Version |
|
||||||
|-------------|---------|
|
|-------------|-----|
|
||||||
| GCC | 11 |
|
| GCC | 12 |
|
||||||
| Clang | 13 |
|
| Clang | 16 |
|
||||||
| Apple Clang | 13.1.6 |
|
| Apple Clang | 16 |
|
||||||
| MSVC | 19.23 |
|
| MSVC | 19.44[^3] |
|
||||||
|
|
||||||
### Linux
|
### Linux
|
||||||
|
|
||||||
The Ubuntu operating system has received the highest level of
|
The Ubuntu Linux distribution has received the highest level of quality
|
||||||
quality assurance, testing, and support.
|
assurance, testing, and support. We also support Red Hat and use Debian
|
||||||
|
internally.
|
||||||
|
|
||||||
Here are [sample instructions for setting up a C++ development environment on Linux](./docs/build/environment.md#linux).
|
Here are [sample instructions for setting up a C++ development environment on
|
||||||
|
Linux](./docs/build/environment.md#linux).
|
||||||
|
|
||||||
### Mac
|
### Mac
|
||||||
|
|
||||||
Many rippled engineers use macOS for development.
|
Many rippled engineers use macOS for development.
|
||||||
|
|
||||||
Here are [sample instructions for setting up a C++ development environment on macOS](./docs/build/environment.md#macos).
|
Here are [sample instructions for setting up a C++ development environment on
|
||||||
|
macOS](./docs/build/environment.md#macos).
|
||||||
|
|
||||||
### Windows
|
### Windows
|
||||||
|
|
||||||
Windows is not recommended for production use at this time.
|
Windows is used by some engineers for development only.
|
||||||
|
|
||||||
- Additionally, 32-bit Windows development is not supported.
|
[^3]: Windows is not recommended for production use.
|
||||||
|
|
||||||
[Boost]: https://www.boost.org/
|
|
||||||
|
|
||||||
## Steps
|
## Steps
|
||||||
|
|
||||||
### Set Up Conan
|
### Set Up Conan
|
||||||
|
|
||||||
After you have a [C++ development environment](./docs/build/environment.md) ready with Git, Python, Conan, CMake, and a C++ compiler, you may need to set up your Conan profile.
|
After you have a [C++ development environment](./docs/build/environment.md) ready with Git, Python,
|
||||||
|
Conan, CMake, and a C++ compiler, you may need to set up your Conan profile.
|
||||||
|
|
||||||
These instructions assume a basic familiarity with Conan and CMake.
|
These instructions assume a basic familiarity with Conan and CMake. If you are
|
||||||
|
unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official
|
||||||
|
[Getting Started][3] walkthrough.
|
||||||
|
|
||||||
If you are unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official [Getting Started][3] walkthrough.
|
#### Default profile
|
||||||
|
We recommend that you import the provided `conan/profiles/default` profile:
|
||||||
|
|
||||||
You'll need at least one Conan profile:
|
```bash
|
||||||
|
conan config install conan/profiles/ -tf $(conan config home)/profiles/
|
||||||
```
|
|
||||||
conan profile new default --detect
|
|
||||||
```
|
|
||||||
|
|
||||||
Update the compiler settings:
|
|
||||||
|
|
||||||
```
|
|
||||||
conan profile update settings.compiler.cppstd=20 default
|
|
||||||
```
|
|
||||||
|
|
||||||
Configure Conan (1.x only) to use recipe revisions:
|
|
||||||
|
|
||||||
```
|
|
||||||
conan config set general.revisions_enabled=1
|
|
||||||
```
|
|
||||||
|
|
||||||
**Linux** developers will commonly have a default Conan [profile][] that compiles
|
|
||||||
with GCC and links with libstdc++.
|
|
||||||
If you are linking with libstdc++ (see profile setting `compiler.libcxx`),
|
|
||||||
then you will need to choose the `libstdc++11` ABI:
|
|
||||||
|
|
||||||
```
|
|
||||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
Ensure inter-operability between `boost::string_view` and `std::string_view` types:
|
|
||||||
|
|
||||||
```
|
|
||||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_BEAST_USE_STD_STRING_VIEW"]' default
|
|
||||||
conan profile update 'env.CXXFLAGS="-DBOOST_BEAST_USE_STD_STRING_VIEW"' default
|
|
||||||
```
|
```
|
||||||
|
|
||||||
If you have other flags in the `conf.tools.build` or `env.CXXFLAGS` sections, make sure to retain the existing flags and append the new ones. You can check them with:
|
You can check your Conan profile by running:
|
||||||
```
|
|
||||||
conan profile show default
|
```bash
|
||||||
|
conan profile show
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Custom profile
|
||||||
|
|
||||||
**Windows** developers may need to use the x64 native build tools.
|
If the default profile does not work for you and you do not yet have a Conan
|
||||||
An easy way to do that is to run the shortcut "x64 Native Tools Command
|
profile, you can create one by running:
|
||||||
Prompt" for the version of Visual Studio that you have installed.
|
|
||||||
|
|
||||||
Windows developers must also build `rippled` and its dependencies for the x64
|
```bash
|
||||||
architecture:
|
conan profile detect
|
||||||
|
|
||||||
```
|
|
||||||
conan profile update settings.arch=x86_64 default
|
|
||||||
```
|
|
||||||
|
|
||||||
### Multiple compilers
|
|
||||||
|
|
||||||
When `/usr/bin/g++` exists on a platform, it is the default cpp compiler. This
|
|
||||||
default works for some users.
|
|
||||||
|
|
||||||
However, if this compiler cannot build rippled or its dependencies, then you can
|
|
||||||
install another compiler and set Conan and CMake to use it.
|
|
||||||
Update the `conf.tools.build:compiler_executables` setting in order to set the correct variables (`CMAKE_<LANG>_COMPILER`) in the
|
|
||||||
generated CMake toolchain file.
|
|
||||||
For example, on Ubuntu 20, you may have gcc at `/usr/bin/gcc` and g++ at `/usr/bin/g++`; if that is the case, you can select those compilers with:
|
|
||||||
```
|
|
||||||
conan profile update 'conf.tools.build:compiler_executables={"c": "/usr/bin/gcc", "cpp": "/usr/bin/g++"}' default
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Replace `/usr/bin/gcc` and `/usr/bin/g++` with paths to the desired compilers.
|
You may need to make changes to the profile to suit your environment. You can
|
||||||
|
refer to the provided `conan/profiles/default` profile for inspiration, and you
|
||||||
|
may also need to apply the required [tweaks](#conan-profile-tweaks) to this
|
||||||
|
default profile.
|
||||||
|
|
||||||
It should choose the compiler for dependencies as well,
|
### Patched recipes
|
||||||
but not all of them have a Conan recipe that respects this setting (yet).
|
|
||||||
For the rest, you can set these environment variables.
|
|
||||||
Replace `<path>` with paths to the desired compilers:
|
|
||||||
|
|
||||||
- `conan profile update env.CC=<path> default`
|
The recipes in Conan Center occasionally need to be patched for compatibility
|
||||||
- `conan profile update env.CXX=<path> default`
|
with the latest version of `rippled`. We maintain a fork of the Conan Center
|
||||||
|
[here](https://github.com/XRPLF/conan-center-index/) containing the patches.
|
||||||
|
|
||||||
Export our [Conan recipe for Snappy](./external/snappy).
|
To ensure our patched recipes are used, you must add our Conan remote at a
|
||||||
It does not explicitly link the C++ standard library,
|
higher index than the default Conan Center remote, so it is consulted first. You
|
||||||
which allows you to statically link it with GCC, if you want.
|
can do this by running:
|
||||||
|
|
||||||
```
|
```bash
|
||||||
# Conan 2.x
|
conan remote add --index 0 xrplf "https://conan.ripplex.io"
|
||||||
conan export --version 1.1.10 external/snappy
|
```
|
||||||
|
|
||||||
|
Alternatively, you can pull the patched recipes into the repository and use them
|
||||||
|
locally:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd external
|
||||||
|
git init
|
||||||
|
git remote add origin git@github.com:XRPLF/conan-center-index.git
|
||||||
|
git sparse-checkout init
|
||||||
|
git sparse-checkout set recipes/snappy
|
||||||
|
git sparse-checkout add recipes/soci
|
||||||
|
git fetch origin master
|
||||||
|
git checkout master
|
||||||
|
conan export --version 1.1.10 external/recipes/snappy
|
||||||
|
conan export --version 4.0.3 external/recipes/soci
|
||||||
|
```
|
||||||
|
|
||||||
|
In the case we switch to a newer version of a dependency that still requires a
|
||||||
|
patch, it will be necessary for you to pull in the changes and re-export the
|
||||||
|
updated dependencies with the newer version. However, if we switch to a newer
|
||||||
|
version that no longer requires a patch, no action is required on your part, as
|
||||||
|
the new recipe will be automatically pulled from the official Conan Center.
|
||||||
|
|
||||||
|
### Conan profile tweaks
|
||||||
|
|
||||||
|
#### Missing compiler version
|
||||||
|
|
||||||
|
If you see an error similar to the following after running `conan profile show`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ERROR: Invalid setting '17' is not a valid 'settings.compiler.version' value.
|
||||||
|
Possible values are ['5.0', '5.1', '6.0', '6.1', '7.0', '7.3', '8.0', '8.1',
|
||||||
|
'9.0', '9.1', '10.0', '11.0', '12.0', '13', '13.0', '13.1', '14', '14.0', '15',
|
||||||
|
'15.0', '16', '16.0']
|
||||||
|
Read "http://docs.conan.io/2/knowledge/faq.html#error-invalid-setting"
|
||||||
|
```
|
||||||
|
|
||||||
|
you need to amend the list of compiler versions in
|
||||||
|
`$(conan config home)/settings.yml`, by appending the required version number(s)
|
||||||
|
to the `version` array specific for your compiler. For example:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
apple-clang:
|
||||||
|
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0",
|
||||||
|
"9.1", "10.0", "11.0", "12.0", "13", "13.0", "13.1", "14",
|
||||||
|
"14.0", "15", "15.0", "16", "16.0", "17", "17.0"]
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Multiple compilers
|
||||||
|
|
||||||
|
If you have multiple compilers installed, make sure to select the one to use in
|
||||||
|
your default Conan configuration **before** running `conan profile detect`, by
|
||||||
|
setting the `CC` and `CXX` environment variables.
|
||||||
|
|
||||||
|
For example, if you are running MacOS and have [homebrew
|
||||||
|
LLVM@18](https://formulae.brew.sh/formula/llvm@18), and want to use it as a
|
||||||
|
compiler in the new Conan profile:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export CC=$(brew --prefix llvm@18)/bin/clang
|
||||||
|
export CXX=$(brew --prefix llvm@18)/bin/clang++
|
||||||
|
conan profile detect
|
||||||
```
|
```
|
||||||
|
|
||||||
Export our [Conan recipe for SOCI](./external/soci).
|
You should also explicitly set the path to the compiler in the profile file,
|
||||||
It patches their CMake to correctly import its dependencies.
|
which helps to avoid errors when `CC` and/or `CXX` are set and disagree with the
|
||||||
|
selected Conan profile. For example:
|
||||||
|
|
||||||
```
|
```text
|
||||||
# Conan 2.x
|
[conf]
|
||||||
conan export --version 4.0.3 external/soci
|
tools.build:compiler_executables={'c':'/usr/bin/gcc','cpp':'/usr/bin/g++'}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Multiple profiles
|
||||||
|
|
||||||
|
You can manage multiple Conan profiles in the directory
|
||||||
|
`$(conan config home)/profiles`, for example renaming `default` to a different
|
||||||
|
name and then creating a new `default` profile for a different compiler.
|
||||||
|
|
||||||
|
#### Select language
|
||||||
|
|
||||||
|
The default profile created by Conan will typically select different C++ dialect
|
||||||
|
than C++20 used by this project. You should set `20` in the profile line
|
||||||
|
starting with `compiler.cppstd=`. For example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sed -i.bak -e 's|^compiler\.cppstd=.*$|compiler.cppstd=20|' $(conan config home)/profiles/default
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Select standard library in Linux
|
||||||
|
|
||||||
|
**Linux** developers will commonly have a default Conan [profile][] that
|
||||||
|
compiles with GCC and links with libstdc++. If you are linking with libstdc++
|
||||||
|
(see profile setting `compiler.libcxx`), then you will need to choose the
|
||||||
|
`libstdc++11` ABI:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sed -i.bak -e 's|^compiler\.libcxx=.*$|compiler.libcxx=libstdc++11|' $(conan config home)/profiles/default
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Select architecture and runtime in Windows
|
||||||
|
|
||||||
|
**Windows** developers may need to use the x64 native build tools. An easy way
|
||||||
|
to do that is to run the shortcut "x64 Native Tools Command Prompt" for the
|
||||||
|
version of Visual Studio that you have installed.
|
||||||
|
|
||||||
|
Windows developers must also build `rippled` and its dependencies for the x64
|
||||||
|
architecture:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sed -i.bak -e 's|^arch=.*$|arch=x86_64|' $(conan config home)/profiles/default
|
||||||
|
```
|
||||||
|
|
||||||
|
**Windows** developers also must select static runtime:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sed -i.bak -e 's|^compiler\.runtime=.*$|compiler.runtime=static|' $(conan config home)/profiles/default
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Workaround for CMake 4
|
||||||
|
|
||||||
|
If your system CMake is version 4 rather than 3, you may have to configure Conan
|
||||||
|
profile to use CMake version 3 for dependencies, by adding the following two
|
||||||
|
lines to your profile:
|
||||||
|
|
||||||
|
```text
|
||||||
|
[tool_requires]
|
||||||
|
!cmake/*: cmake/[>=3 <4]
|
||||||
|
```
|
||||||
|
|
||||||
|
This will force Conan to download and use a locally cached CMake 3 version, and
|
||||||
|
is needed because some of the dependencies used by this project do not support
|
||||||
|
CMake 4.
|
||||||
|
|
||||||
|
#### Clang workaround for grpc
|
||||||
|
|
||||||
|
If your compiler is clang, version 19 or later, or apple-clang, version 17 or
|
||||||
|
later, you may encounter a compilation error while building the `grpc`
|
||||||
|
dependency:
|
||||||
|
|
||||||
|
```text
|
||||||
|
In file included from .../lib/promise/try_seq.h:26:
|
||||||
|
.../lib/promise/detail/basic_seq.h:499:38: error: a template argument list is expected after a name prefixed by the template keyword [-Wmissing-template-arg-list-after-template-kw]
|
||||||
|
499 | Traits::template CallSeqFactory(f_, *cur_, std::move(arg)));
|
||||||
|
| ^
|
||||||
|
```
|
||||||
|
|
||||||
|
The workaround for this error is to add two lines to profile:
|
||||||
|
|
||||||
|
```text
|
||||||
|
[conf]
|
||||||
|
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Workaround for gcc 12
|
||||||
|
|
||||||
|
If your compiler is gcc, version 12, and you have enabled `werr` option, you may
|
||||||
|
encounter a compilation error such as:
|
||||||
|
|
||||||
|
```text
|
||||||
|
/usr/include/c++/12/bits/char_traits.h:435:56: error: 'void* __builtin_memcpy(void*, const void*, long unsigned int)' accessing 9223372036854775810 or more bytes at offsets [2, 9223372036854775807] and 1 may overlap up to 9223372036854775813 bytes at offset -3 [-Werror=restrict]
|
||||||
|
435 | return static_cast<char_type*>(__builtin_memcpy(__s1, __s2, __n));
|
||||||
|
| ~~~~~~~~~~~~~~~~^~~~~~~~~~~~~~~~~
|
||||||
|
cc1plus: all warnings being treated as errors
|
||||||
|
```
|
||||||
|
|
||||||
|
The workaround for this error is to add two lines to your profile:
|
||||||
|
|
||||||
|
```text
|
||||||
|
[conf]
|
||||||
|
tools.build:cxxflags=['-Wno-restrict']
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Workaround for clang 16
|
||||||
|
|
||||||
|
If your compiler is clang, version 16, you may encounter compilation error such
|
||||||
|
as:
|
||||||
|
|
||||||
|
```text
|
||||||
|
In file included from .../boost/beast/websocket/stream.hpp:2857:
|
||||||
|
.../boost/beast/websocket/impl/read.hpp:695:17: error: call to 'async_teardown' is ambiguous
|
||||||
|
async_teardown(impl.role, impl.stream(),
|
||||||
|
^~~~~~~~~~~~~~
|
||||||
|
```
|
||||||
|
|
||||||
|
The workaround for this error is to add two lines to your profile:
|
||||||
|
|
||||||
|
```text
|
||||||
|
[conf]
|
||||||
|
tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
|
||||||
|
```
|
||||||
|
|
||||||
### Build and Test
|
### Build and Test
|
||||||
|
|
||||||
@@ -245,7 +389,6 @@ It patches their CMake to correctly import its dependencies.
|
|||||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release -Dxrpld=ON -Dtests=ON ..
|
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release -Dxrpld=ON -Dtests=ON ..
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
Multi-config generators:
|
Multi-config generators:
|
||||||
|
|
||||||
```
|
```
|
||||||
@@ -257,13 +400,13 @@ It patches their CMake to correctly import its dependencies.
|
|||||||
5. Build `rippled`.
|
5. Build `rippled`.
|
||||||
|
|
||||||
For a single-configuration generator, it will build whatever configuration
|
For a single-configuration generator, it will build whatever configuration
|
||||||
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator,
|
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator, you
|
||||||
you must pass the option `--config` to select the build configuration.
|
must pass the option `--config` to select the build configuration.
|
||||||
|
|
||||||
Single-config generators:
|
Single-config generators:
|
||||||
|
|
||||||
```
|
```
|
||||||
cmake --build . -j $(nproc)
|
cmake --build .
|
||||||
```
|
```
|
||||||
|
|
||||||
Multi-config generators:
|
Multi-config generators:
|
||||||
@@ -278,18 +421,22 @@ It patches their CMake to correctly import its dependencies.
|
|||||||
Single-config generators:
|
Single-config generators:
|
||||||
|
|
||||||
```
|
```
|
||||||
./rippled --unittest
|
./rippled --unittest --unittest-jobs N
|
||||||
```
|
```
|
||||||
|
|
||||||
Multi-config generators:
|
Multi-config generators:
|
||||||
|
|
||||||
```
|
```
|
||||||
./Release/rippled --unittest
|
./Release/rippled --unittest --unittest-jobs N
|
||||||
./Debug/rippled --unittest
|
./Debug/rippled --unittest --unittest-jobs N
|
||||||
```
|
```
|
||||||
|
|
||||||
The location of `rippled` in your build directory depends on your CMake
|
Replace the `--unittest-jobs` parameter N with the desired unit tests
|
||||||
generator. Pass `--help` to see the rest of the command line options.
|
concurrency. Recommended setting is half of the number of available CPU
|
||||||
|
cores.
|
||||||
|
|
||||||
|
The location of `rippled` binary in your build directory depends on your
|
||||||
|
CMake generator. Pass `--help` to see the rest of the command line options.
|
||||||
|
|
||||||
|
|
||||||
## Coverage report
|
## Coverage report
|
||||||
@@ -347,7 +494,7 @@ cmake --build . --target coverage
|
|||||||
After the `coverage` target is completed, the generated coverage report will be
|
After the `coverage` target is completed, the generated coverage report will be
|
||||||
stored inside the build directory, as either of:
|
stored inside the build directory, as either of:
|
||||||
|
|
||||||
- file named `coverage.`_extension_ , with a suitable extension for the report format, or
|
- file named `coverage.`_extension_, with a suitable extension for the report format, or
|
||||||
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
|
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
|
||||||
|
|
||||||
|
|
||||||
@@ -355,12 +502,14 @@ stored inside the build directory, as either of:
|
|||||||
|
|
||||||
| Option | Default Value | Description |
|
| Option | Default Value | Description |
|
||||||
| --- | ---| ---|
|
| --- | ---| ---|
|
||||||
| `assert` | OFF | Enable assertions.
|
| `assert` | OFF | Enable assertions. |
|
||||||
| `coverage` | OFF | Prepare the coverage report. |
|
| `coverage` | OFF | Prepare the coverage report. |
|
||||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||||
| `tests` | OFF | Build tests. |
|
| `tests` | OFF | Build tests. |
|
||||||
| `unity` | ON | Configure a unity build. |
|
| `unity` | OFF | Configure a unity build. |
|
||||||
| `xrpld` | OFF | Build the xrpld (`rippled`) application, and not just the libxrpl library. |
|
| `xrpld` | OFF | Build the xrpld (`rippled`) application, and not just the libxrpl library. |
|
||||||
|
| `werr` | OFF | Treat compilation warnings as errors |
|
||||||
|
| `wextra` | OFF | Enable additional compilation warnings |
|
||||||
|
|
||||||
[Unity builds][5] may be faster for the first build
|
[Unity builds][5] may be faster for the first build
|
||||||
(at the cost of much more memory) since they concatenate sources into fewer
|
(at the cost of much more memory) since they concatenate sources into fewer
|
||||||
@@ -375,12 +524,28 @@ and can be helpful for detecting `#include` omissions.
|
|||||||
After any updates or changes to dependencies, you may need to do the following:
|
After any updates or changes to dependencies, you may need to do the following:
|
||||||
|
|
||||||
1. Remove your build directory.
|
1. Remove your build directory.
|
||||||
2. Remove the Conan cache: `conan remove "*" -c`
|
2. Remove individual libraries from the Conan cache, e.g.
|
||||||
3. Re-run [conan install](#build-and-test).
|
|
||||||
|
|
||||||
### 'protobuf/port_def.inc' file not found
|
```bash
|
||||||
|
conan remove 'grpc/*'
|
||||||
|
```
|
||||||
|
|
||||||
If `cmake --build .` results in an error due to a missing a protobuf file, then you might have generated CMake files for a different `build_type` than the `CMAKE_BUILD_TYPE` you passed to conan.
|
**or**
|
||||||
|
|
||||||
|
Remove all libraries from Conan cache:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
conan remove '*'
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Re-run [conan export](#patched-recipes) if needed.
|
||||||
|
4. Re-run [conan install](#build-and-test).
|
||||||
|
|
||||||
|
### `protobuf/port_def.inc` file not found
|
||||||
|
|
||||||
|
If `cmake --build .` results in an error due to a missing a protobuf file, then
|
||||||
|
you might have generated CMake files for a different `build_type` than the
|
||||||
|
`CMAKE_BUILD_TYPE` you passed to Conan.
|
||||||
|
|
||||||
```
|
```
|
||||||
/rippled/.build/pb-xrpl.libpb/xrpl/proto/ripple.pb.h:10:10: fatal error: 'google/protobuf/port_def.inc' file not found
|
/rippled/.build/pb-xrpl.libpb/xrpl/proto/ripple.pb.h:10:10: fatal error: 'google/protobuf/port_def.inc' file not found
|
||||||
|
|||||||
84
docs/build/environment.md
vendored
84
docs/build/environment.md
vendored
@@ -10,37 +10,35 @@ platforms: Linux, macOS, or Windows.
|
|||||||
|
|
||||||
Package ecosystems vary across Linux distributions,
|
Package ecosystems vary across Linux distributions,
|
||||||
so there is no one set of instructions that will work for every Linux user.
|
so there is no one set of instructions that will work for every Linux user.
|
||||||
These instructions are written for Ubuntu 22.04.
|
The instructions below are written for Debian 12 (Bookworm).
|
||||||
They are largely copied from the [script][1] used to configure our Docker
|
|
||||||
container for continuous integration.
|
|
||||||
That script handles many more responsibilities.
|
|
||||||
These instructions are just the bare minimum to build one configuration of
|
|
||||||
rippled.
|
|
||||||
You can check that codebase for other Linux distributions and versions.
|
|
||||||
If you cannot find yours there,
|
|
||||||
then we hope that these instructions can at least guide you in the right
|
|
||||||
direction.
|
|
||||||
|
|
||||||
```
|
```
|
||||||
apt update
|
export GCC_RELEASE=12
|
||||||
apt install --yes curl git libssl-dev pipx python3.10-dev python3-pip make g++-11 libprotobuf-dev protobuf-compiler
|
sudo apt update
|
||||||
|
sudo apt install --yes gcc-${GCC_RELEASE} g++-${GCC_RELEASE} python3-pip \
|
||||||
|
python-is-python3 python3-venv python3-dev curl wget ca-certificates \
|
||||||
|
git build-essential cmake ninja-build libc6-dev
|
||||||
|
sudo pip install --break-system-packages conan
|
||||||
|
|
||||||
curl --location --remote-name \
|
sudo update-alternatives --install /usr/bin/cc cc /usr/bin/gcc-${GCC_RELEASE} 999
|
||||||
"https://github.com/Kitware/CMake/releases/download/v3.25.1/cmake-3.25.1.tar.gz"
|
sudo update-alternatives --install \
|
||||||
tar -xzf cmake-3.25.1.tar.gz
|
/usr/bin/gcc gcc /usr/bin/gcc-${GCC_RELEASE} 100 \
|
||||||
rm cmake-3.25.1.tar.gz
|
--slave /usr/bin/g++ g++ /usr/bin/g++-${GCC_RELEASE} \
|
||||||
cd cmake-3.25.1
|
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-${GCC_RELEASE} \
|
||||||
./bootstrap --parallel=$(nproc)
|
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-${GCC_RELEASE} \
|
||||||
make --jobs $(nproc)
|
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-${GCC_RELEASE} \
|
||||||
make install
|
--slave /usr/bin/gcov gcov /usr/bin/gcov-${GCC_RELEASE} \
|
||||||
cd ..
|
--slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-${GCC_RELEASE} \
|
||||||
|
--slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-${GCC_RELEASE} \
|
||||||
pipx install 'conan<2'
|
--slave /usr/bin/lto-dump lto-dump /usr/bin/lto-dump-${GCC_RELEASE}
|
||||||
pipx ensurepath
|
sudo update-alternatives --auto cc
|
||||||
|
sudo update-alternatives --auto gcc
|
||||||
```
|
```
|
||||||
|
|
||||||
[1]: https://github.com/thejohnfreeman/rippled-docker/blob/master/ubuntu-22.04/install.sh
|
If you use different Linux distribution, hope the instruction above can guide
|
||||||
|
you in the right direction. We try to maintain compatibility with all recent
|
||||||
|
compiler releases, so if you use a rolling distribution like e.g. Arch or CentOS
|
||||||
|
then there is a chance that everything will "just work".
|
||||||
|
|
||||||
## macOS
|
## macOS
|
||||||
|
|
||||||
@@ -53,6 +51,34 @@ minimum required (see [BUILD.md][]).
|
|||||||
clang --version
|
clang --version
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Install Xcode Specific Version (Optional)
|
||||||
|
|
||||||
|
If you develop other applications using XCode you might be consistently updating to the newest version of Apple Clang.
|
||||||
|
This will likely cause issues building rippled. You may want to install a specific version of Xcode:
|
||||||
|
|
||||||
|
1. **Download Xcode**
|
||||||
|
|
||||||
|
- Visit [Apple Developer Downloads](https://developer.apple.com/download/more/)
|
||||||
|
- Sign in with your Apple Developer account
|
||||||
|
- Search for an Xcode version that includes **Apple Clang (Expected Version)**
|
||||||
|
- Download the `.xip` file
|
||||||
|
|
||||||
|
2. **Install and Configure Xcode**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Extract the .xip file and rename for version management
|
||||||
|
# Example: Xcode_16.2.app
|
||||||
|
|
||||||
|
# Move to Applications directory
|
||||||
|
sudo mv Xcode_16.2.app /Applications/
|
||||||
|
|
||||||
|
# Set as default toolchain (persistent)
|
||||||
|
sudo xcode-select -s /Applications/Xcode_16.2.app/Contents/Developer
|
||||||
|
|
||||||
|
# Set as environment variable (temporary)
|
||||||
|
export DEVELOPER_DIR=/Applications/Xcode_16.2.app/Contents/Developer
|
||||||
|
```
|
||||||
|
|
||||||
The command line developer tools should include Git too:
|
The command line developer tools should include Git too:
|
||||||
|
|
||||||
```
|
```
|
||||||
@@ -72,10 +98,10 @@ and use it to install Conan:
|
|||||||
brew update
|
brew update
|
||||||
brew install xz
|
brew install xz
|
||||||
brew install pyenv
|
brew install pyenv
|
||||||
pyenv install 3.10-dev
|
pyenv install 3.11
|
||||||
pyenv global 3.10-dev
|
pyenv global 3.11
|
||||||
eval "$(pyenv init -)"
|
eval "$(pyenv init -)"
|
||||||
pip install 'conan<2'
|
pip install 'conan'
|
||||||
```
|
```
|
||||||
|
|
||||||
Install CMake with Homebrew too:
|
Install CMake with Homebrew too:
|
||||||
|
|||||||
8
external/README.md
vendored
8
external/README.md
vendored
@@ -1,14 +1,10 @@
|
|||||||
# External Conan recipes
|
# External Conan recipes
|
||||||
|
|
||||||
The subdirectories in this directory contain either copies or Conan recipes
|
The subdirectories in this directory contain copies of external libraries used
|
||||||
of external libraries used by rippled.
|
by rippled.
|
||||||
The Conan recipes include patches we have not yet pushed upstream.
|
|
||||||
|
|
||||||
| Folder | Upstream | Description |
|
| Folder | Upstream | Description |
|
||||||
|:----------------|:---------------------------------------------|:------------|
|
|:----------------|:---------------------------------------------|:------------|
|
||||||
| `antithesis-sdk`| [Project](https://github.com/antithesishq/antithesis-sdk-cpp/) | [Antithesis](https://antithesis.com/docs/using_antithesis/sdk/cpp/overview.html) SDK for C++ |
|
| `antithesis-sdk`| [Project](https://github.com/antithesishq/antithesis-sdk-cpp/) | [Antithesis](https://antithesis.com/docs/using_antithesis/sdk/cpp/overview.html) SDK for C++ |
|
||||||
| `ed25519-donna` | [Project](https://github.com/floodyberry/ed25519-donna) | [Ed25519](http://ed25519.cr.yp.to/) digital signatures |
|
| `ed25519-donna` | [Project](https://github.com/floodyberry/ed25519-donna) | [Ed25519](http://ed25519.cr.yp.to/) digital signatures |
|
||||||
| `rocksdb` | [Recipe](https://github.com/conan-io/conan-center-index/tree/master/recipes/rocksdb) | Fast key/value database. (Supports rotational disks better than NuDB.) |
|
|
||||||
| `secp256k1` | [Project](https://github.com/bitcoin-core/secp256k1) | ECDSA digital signatures using the **secp256k1** curve |
|
| `secp256k1` | [Project](https://github.com/bitcoin-core/secp256k1) | ECDSA digital signatures using the **secp256k1** curve |
|
||||||
| `snappy` | [Recipe](https://github.com/conan-io/conan-center-index/tree/master/recipes/snappy) | "Snappy" lossless compression algorithm. |
|
|
||||||
| `soci` | [Recipe](https://github.com/conan-io/conan-center-index/tree/master/recipes/soci) | Abstraction layer for database access. |
|
|
||||||
|
|||||||
40
external/snappy/conandata.yml
vendored
40
external/snappy/conandata.yml
vendored
@@ -1,40 +0,0 @@
|
|||||||
sources:
|
|
||||||
"1.1.10":
|
|
||||||
url: "https://github.com/google/snappy/archive/1.1.10.tar.gz"
|
|
||||||
sha256: "49d831bffcc5f3d01482340fe5af59852ca2fe76c3e05df0e67203ebbe0f1d90"
|
|
||||||
"1.1.9":
|
|
||||||
url: "https://github.com/google/snappy/archive/1.1.9.tar.gz"
|
|
||||||
sha256: "75c1fbb3d618dd3a0483bff0e26d0a92b495bbe5059c8b4f1c962b478b6e06e7"
|
|
||||||
"1.1.8":
|
|
||||||
url: "https://github.com/google/snappy/archive/1.1.8.tar.gz"
|
|
||||||
sha256: "16b677f07832a612b0836178db7f374e414f94657c138e6993cbfc5dcc58651f"
|
|
||||||
"1.1.7":
|
|
||||||
url: "https://github.com/google/snappy/archive/1.1.7.tar.gz"
|
|
||||||
sha256: "3dfa02e873ff51a11ee02b9ca391807f0c8ea0529a4924afa645fbf97163f9d4"
|
|
||||||
patches:
|
|
||||||
"1.1.10":
|
|
||||||
- patch_file: "patches/1.1.10-0001-fix-inlining-failure.patch"
|
|
||||||
patch_description: "disable inlining for compilation error"
|
|
||||||
patch_type: "portability"
|
|
||||||
- patch_file: "patches/1.1.9-0002-no-Werror.patch"
|
|
||||||
patch_description: "disable 'warning as error' options"
|
|
||||||
patch_type: "portability"
|
|
||||||
- patch_file: "patches/1.1.10-0003-fix-clobber-list-older-llvm.patch"
|
|
||||||
patch_description: "disable inline asm on apple-clang"
|
|
||||||
patch_type: "portability"
|
|
||||||
- patch_file: "patches/1.1.9-0004-rtti-by-default.patch"
|
|
||||||
patch_description: "remove 'disable rtti'"
|
|
||||||
patch_type: "conan"
|
|
||||||
"1.1.9":
|
|
||||||
- patch_file: "patches/1.1.9-0001-fix-inlining-failure.patch"
|
|
||||||
patch_description: "disable inlining for compilation error"
|
|
||||||
patch_type: "portability"
|
|
||||||
- patch_file: "patches/1.1.9-0002-no-Werror.patch"
|
|
||||||
patch_description: "disable 'warning as error' options"
|
|
||||||
patch_type: "portability"
|
|
||||||
- patch_file: "patches/1.1.9-0003-fix-clobber-list-older-llvm.patch"
|
|
||||||
patch_description: "disable inline asm on apple-clang"
|
|
||||||
patch_type: "portability"
|
|
||||||
- patch_file: "patches/1.1.9-0004-rtti-by-default.patch"
|
|
||||||
patch_description: "remove 'disable rtti'"
|
|
||||||
patch_type: "conan"
|
|
||||||
89
external/snappy/conanfile.py
vendored
89
external/snappy/conanfile.py
vendored
@@ -1,89 +0,0 @@
|
|||||||
from conan import ConanFile
|
|
||||||
from conan.tools.build import check_min_cppstd
|
|
||||||
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
|
|
||||||
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, rmdir
|
|
||||||
from conan.tools.scm import Version
|
|
||||||
import os
|
|
||||||
|
|
||||||
required_conan_version = ">=1.54.0"
|
|
||||||
|
|
||||||
|
|
||||||
class SnappyConan(ConanFile):
|
|
||||||
name = "snappy"
|
|
||||||
description = "A fast compressor/decompressor"
|
|
||||||
topics = ("google", "compressor", "decompressor")
|
|
||||||
url = "https://github.com/conan-io/conan-center-index"
|
|
||||||
homepage = "https://github.com/google/snappy"
|
|
||||||
license = "BSD-3-Clause"
|
|
||||||
|
|
||||||
package_type = "library"
|
|
||||||
settings = "os", "arch", "compiler", "build_type"
|
|
||||||
options = {
|
|
||||||
"shared": [True, False],
|
|
||||||
"fPIC": [True, False],
|
|
||||||
}
|
|
||||||
default_options = {
|
|
||||||
"shared": False,
|
|
||||||
"fPIC": True,
|
|
||||||
}
|
|
||||||
|
|
||||||
def export_sources(self):
|
|
||||||
export_conandata_patches(self)
|
|
||||||
|
|
||||||
def config_options(self):
|
|
||||||
if self.settings.os == 'Windows':
|
|
||||||
del self.options.fPIC
|
|
||||||
|
|
||||||
def configure(self):
|
|
||||||
if self.options.shared:
|
|
||||||
self.options.rm_safe("fPIC")
|
|
||||||
|
|
||||||
def layout(self):
|
|
||||||
cmake_layout(self, src_folder="src")
|
|
||||||
|
|
||||||
def validate(self):
|
|
||||||
if self.settings.compiler.get_safe("cppstd"):
|
|
||||||
check_min_cppstd(self, 11)
|
|
||||||
|
|
||||||
def source(self):
|
|
||||||
get(self, **self.conan_data["sources"][self.version], strip_root=True)
|
|
||||||
|
|
||||||
def generate(self):
|
|
||||||
tc = CMakeToolchain(self)
|
|
||||||
tc.variables["SNAPPY_BUILD_TESTS"] = False
|
|
||||||
if Version(self.version) >= "1.1.8":
|
|
||||||
tc.variables["SNAPPY_FUZZING_BUILD"] = False
|
|
||||||
tc.variables["SNAPPY_REQUIRE_AVX"] = False
|
|
||||||
tc.variables["SNAPPY_REQUIRE_AVX2"] = False
|
|
||||||
tc.variables["SNAPPY_INSTALL"] = True
|
|
||||||
if Version(self.version) >= "1.1.9":
|
|
||||||
tc.variables["SNAPPY_BUILD_BENCHMARKS"] = False
|
|
||||||
tc.generate()
|
|
||||||
|
|
||||||
def build(self):
|
|
||||||
apply_conandata_patches(self)
|
|
||||||
cmake = CMake(self)
|
|
||||||
cmake.configure()
|
|
||||||
cmake.build()
|
|
||||||
|
|
||||||
def package(self):
|
|
||||||
copy(self, "COPYING", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
|
|
||||||
cmake = CMake(self)
|
|
||||||
cmake.install()
|
|
||||||
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
|
|
||||||
|
|
||||||
def package_info(self):
|
|
||||||
self.cpp_info.set_property("cmake_file_name", "Snappy")
|
|
||||||
self.cpp_info.set_property("cmake_target_name", "Snappy::snappy")
|
|
||||||
# TODO: back to global scope in conan v2 once cmake_find_package* generators removed
|
|
||||||
self.cpp_info.components["snappylib"].libs = ["snappy"]
|
|
||||||
if not self.options.shared:
|
|
||||||
if self.settings.os in ["Linux", "FreeBSD"]:
|
|
||||||
self.cpp_info.components["snappylib"].system_libs.append("m")
|
|
||||||
|
|
||||||
# TODO: to remove in conan v2 once cmake_find_package* generators removed
|
|
||||||
self.cpp_info.names["cmake_find_package"] = "Snappy"
|
|
||||||
self.cpp_info.names["cmake_find_package_multi"] = "Snappy"
|
|
||||||
self.cpp_info.components["snappylib"].names["cmake_find_package"] = "snappy"
|
|
||||||
self.cpp_info.components["snappylib"].names["cmake_find_package_multi"] = "snappy"
|
|
||||||
self.cpp_info.components["snappylib"].set_property("cmake_target_name", "Snappy::snappy")
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
diff --git a/snappy-stubs-internal.h b/snappy-stubs-internal.h
|
|
||||||
index 1548ed7..3b4a9f3 100644
|
|
||||||
--- a/snappy-stubs-internal.h
|
|
||||||
+++ b/snappy-stubs-internal.h
|
|
||||||
@@ -100,7 +100,7 @@
|
|
||||||
|
|
||||||
// Inlining hints.
|
|
||||||
#if HAVE_ATTRIBUTE_ALWAYS_INLINE
|
|
||||||
-#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline))
|
|
||||||
+#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
|
||||||
#else
|
|
||||||
#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
|
||||||
#endif // HAVE_ATTRIBUTE_ALWAYS_INLINE
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
diff --git a/snappy.cc b/snappy.cc
|
|
||||||
index d414718..e4efb59 100644
|
|
||||||
--- a/snappy.cc
|
|
||||||
+++ b/snappy.cc
|
|
||||||
@@ -1132,7 +1132,7 @@ inline size_t AdvanceToNextTagX86Optimized(const uint8_t** ip_p, size_t* tag) {
|
|
||||||
size_t literal_len = *tag >> 2;
|
|
||||||
size_t tag_type = *tag;
|
|
||||||
bool is_literal;
|
|
||||||
-#if defined(__GCC_ASM_FLAG_OUTPUTS__) && defined(__x86_64__)
|
|
||||||
+#if defined(__GCC_ASM_FLAG_OUTPUTS__) && defined(__x86_64__) && ( (!defined(__clang__) && !defined(__APPLE__)) || (!defined(__APPLE__) && defined(__clang__) && (__clang_major__ >= 9)) || (defined(__APPLE__) && defined(__clang__) && (__clang_major__ > 11)) )
|
|
||||||
// TODO clang misses the fact that the (c & 3) already correctly
|
|
||||||
// sets the zero flag.
|
|
||||||
asm("and $3, %k[tag_type]\n\t"
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
Fixes the following error:
|
|
||||||
error: inlining failed in call to ‘always_inline’ ‘size_t snappy::AdvanceToNextTag(const uint8_t**, size_t*)’: function body can be overwritten at link time
|
|
||||||
|
|
||||||
--- snappy-stubs-internal.h
|
|
||||||
+++ snappy-stubs-internal.h
|
|
||||||
@@ -100,7 +100,7 @@
|
|
||||||
|
|
||||||
// Inlining hints.
|
|
||||||
#ifdef HAVE_ATTRIBUTE_ALWAYS_INLINE
|
|
||||||
-#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline))
|
|
||||||
+#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
|
||||||
#else
|
|
||||||
#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
|
|
||||||
#endif
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
--- CMakeLists.txt
|
|
||||||
+++ CMakeLists.txt
|
|
||||||
@@ -69,7 +69,7 @@
|
|
||||||
- # Use -Werror for clang only.
|
|
||||||
+if(0)
|
|
||||||
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
|
||||||
if(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
|
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror")
|
|
||||||
endif(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
|
|
||||||
endif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
|
||||||
-
|
|
||||||
+endif()
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
asm clobbers do not work for clang < 9 and apple-clang < 11 (found by SpaceIm)
|
|
||||||
--- snappy.cc
|
|
||||||
+++ snappy.cc
|
|
||||||
@@ -1026,7 +1026,7 @@
|
|
||||||
size_t literal_len = *tag >> 2;
|
|
||||||
size_t tag_type = *tag;
|
|
||||||
bool is_literal;
|
|
||||||
-#if defined(__GNUC__) && defined(__x86_64__)
|
|
||||||
+#if defined(__GNUC__) && defined(__x86_64__) && ( (!defined(__clang__) && !defined(__APPLE__)) || (!defined(__APPLE__) && defined(__clang__) && (__clang_major__ >= 9)) || (defined(__APPLE__) && defined(__clang__) && (__clang_major__ > 11)) )
|
|
||||||
// TODO clang misses the fact that the (c & 3) already correctly
|
|
||||||
// sets the zero flag.
|
|
||||||
asm("and $3, %k[tag_type]\n\t"
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
--- a/CMakeLists.txt
|
|
||||||
+++ b/CMakeLists.txt
|
|
||||||
@@ -53,8 +53,6 @@ if(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
|
||||||
add_definitions(-D_HAS_EXCEPTIONS=0)
|
|
||||||
|
|
||||||
# Disable RTTI.
|
|
||||||
- string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
|
||||||
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
|
|
||||||
else(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
|
||||||
# Use -Wall for clang and gcc.
|
|
||||||
if(NOT CMAKE_CXX_FLAGS MATCHES "-Wall")
|
|
||||||
@@ -78,8 +76,6 @@ endif()
|
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions")
|
|
||||||
|
|
||||||
# Disable RTTI.
|
|
||||||
- string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
|
||||||
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
|
|
||||||
endif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
|
||||||
|
|
||||||
# BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make
|
|
||||||
12
external/soci/conandata.yml
vendored
12
external/soci/conandata.yml
vendored
@@ -1,12 +0,0 @@
|
|||||||
sources:
|
|
||||||
"4.0.3":
|
|
||||||
url: "https://github.com/SOCI/soci/archive/v4.0.3.tar.gz"
|
|
||||||
sha256: "4b1ff9c8545c5d802fbe06ee6cd2886630e5c03bf740e269bb625b45cf934928"
|
|
||||||
patches:
|
|
||||||
"4.0.3":
|
|
||||||
- patch_file: "patches/0001-Remove-hardcoded-INSTALL_NAME_DIR-for-relocatable-li.patch"
|
|
||||||
patch_description: "Generate relocatable libraries on MacOS"
|
|
||||||
patch_type: "portability"
|
|
||||||
- patch_file: "patches/0002-Fix-soci_backend.patch"
|
|
||||||
patch_description: "Fix variable names for dependencies"
|
|
||||||
patch_type: "conan"
|
|
||||||
212
external/soci/conanfile.py
vendored
212
external/soci/conanfile.py
vendored
@@ -1,212 +0,0 @@
|
|||||||
from conan import ConanFile
|
|
||||||
from conan.tools.build import check_min_cppstd
|
|
||||||
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
|
|
||||||
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, rmdir
|
|
||||||
from conan.tools.microsoft import is_msvc
|
|
||||||
from conan.tools.scm import Version
|
|
||||||
from conan.errors import ConanInvalidConfiguration
|
|
||||||
import os
|
|
||||||
|
|
||||||
required_conan_version = ">=1.55.0"
|
|
||||||
|
|
||||||
|
|
||||||
class SociConan(ConanFile):
|
|
||||||
name = "soci"
|
|
||||||
homepage = "https://github.com/SOCI/soci"
|
|
||||||
url = "https://github.com/conan-io/conan-center-index"
|
|
||||||
description = "The C++ Database Access Library "
|
|
||||||
topics = ("mysql", "odbc", "postgresql", "sqlite3")
|
|
||||||
license = "BSL-1.0"
|
|
||||||
|
|
||||||
settings = "os", "arch", "compiler", "build_type"
|
|
||||||
options = {
|
|
||||||
"shared": [True, False],
|
|
||||||
"fPIC": [True, False],
|
|
||||||
"empty": [True, False],
|
|
||||||
"with_sqlite3": [True, False],
|
|
||||||
"with_db2": [True, False],
|
|
||||||
"with_odbc": [True, False],
|
|
||||||
"with_oracle": [True, False],
|
|
||||||
"with_firebird": [True, False],
|
|
||||||
"with_mysql": [True, False],
|
|
||||||
"with_postgresql": [True, False],
|
|
||||||
"with_boost": [True, False],
|
|
||||||
}
|
|
||||||
default_options = {
|
|
||||||
"shared": False,
|
|
||||||
"fPIC": True,
|
|
||||||
"empty": False,
|
|
||||||
"with_sqlite3": False,
|
|
||||||
"with_db2": False,
|
|
||||||
"with_odbc": False,
|
|
||||||
"with_oracle": False,
|
|
||||||
"with_firebird": False,
|
|
||||||
"with_mysql": False,
|
|
||||||
"with_postgresql": False,
|
|
||||||
"with_boost": False,
|
|
||||||
}
|
|
||||||
|
|
||||||
def export_sources(self):
|
|
||||||
export_conandata_patches(self)
|
|
||||||
|
|
||||||
def layout(self):
|
|
||||||
cmake_layout(self, src_folder="src")
|
|
||||||
|
|
||||||
def config_options(self):
|
|
||||||
if self.settings.os == "Windows":
|
|
||||||
self.options.rm_safe("fPIC")
|
|
||||||
|
|
||||||
def configure(self):
|
|
||||||
if self.options.shared:
|
|
||||||
self.options.rm_safe("fPIC")
|
|
||||||
|
|
||||||
def requirements(self):
|
|
||||||
if self.options.with_sqlite3:
|
|
||||||
self.requires("sqlite3/3.47.0")
|
|
||||||
if self.options.with_odbc and self.settings.os != "Windows":
|
|
||||||
self.requires("odbc/2.3.11")
|
|
||||||
if self.options.with_mysql:
|
|
||||||
self.requires("libmysqlclient/8.1.0")
|
|
||||||
if self.options.with_postgresql:
|
|
||||||
self.requires("libpq/15.5")
|
|
||||||
if self.options.with_boost:
|
|
||||||
self.requires("boost/1.86.0")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _minimum_compilers_version(self):
|
|
||||||
return {
|
|
||||||
"Visual Studio": "14",
|
|
||||||
"gcc": "4.8",
|
|
||||||
"clang": "3.8",
|
|
||||||
"apple-clang": "8.0"
|
|
||||||
}
|
|
||||||
|
|
||||||
def validate(self):
|
|
||||||
if self.settings.compiler.get_safe("cppstd"):
|
|
||||||
check_min_cppstd(self, 11)
|
|
||||||
|
|
||||||
compiler = str(self.settings.compiler)
|
|
||||||
compiler_version = Version(self.settings.compiler.version.value)
|
|
||||||
if compiler not in self._minimum_compilers_version:
|
|
||||||
self.output.warning("{} recipe lacks information about the {} compiler support.".format(self.name, self.settings.compiler))
|
|
||||||
elif compiler_version < self._minimum_compilers_version[compiler]:
|
|
||||||
raise ConanInvalidConfiguration("{} requires a {} version >= {}".format(self.name, compiler, compiler_version))
|
|
||||||
|
|
||||||
prefix = "Dependencies for"
|
|
||||||
message = "not configured in this conan package."
|
|
||||||
if self.options.with_db2:
|
|
||||||
# self.requires("db2/0.0.0") # TODO add support for db2
|
|
||||||
raise ConanInvalidConfiguration("{} DB2 {} ".format(prefix, message))
|
|
||||||
if self.options.with_oracle:
|
|
||||||
# self.requires("oracle_db/0.0.0") # TODO add support for oracle
|
|
||||||
raise ConanInvalidConfiguration("{} ORACLE {} ".format(prefix, message))
|
|
||||||
if self.options.with_firebird:
|
|
||||||
# self.requires("firebird/0.0.0") # TODO add support for firebird
|
|
||||||
raise ConanInvalidConfiguration("{} firebird {} ".format(prefix, message))
|
|
||||||
|
|
||||||
def source(self):
|
|
||||||
get(self, **self.conan_data["sources"][self.version], strip_root=True)
|
|
||||||
|
|
||||||
def generate(self):
|
|
||||||
tc = CMakeToolchain(self)
|
|
||||||
|
|
||||||
tc.variables["SOCI_SHARED"] = self.options.shared
|
|
||||||
tc.variables["SOCI_STATIC"] = not self.options.shared
|
|
||||||
tc.variables["SOCI_TESTS"] = False
|
|
||||||
tc.variables["SOCI_CXX11"] = True
|
|
||||||
tc.variables["SOCI_EMPTY"] = self.options.empty
|
|
||||||
tc.variables["WITH_SQLITE3"] = self.options.with_sqlite3
|
|
||||||
tc.variables["WITH_DB2"] = self.options.with_db2
|
|
||||||
tc.variables["WITH_ODBC"] = self.options.with_odbc
|
|
||||||
tc.variables["WITH_ORACLE"] = self.options.with_oracle
|
|
||||||
tc.variables["WITH_FIREBIRD"] = self.options.with_firebird
|
|
||||||
tc.variables["WITH_MYSQL"] = self.options.with_mysql
|
|
||||||
tc.variables["WITH_POSTGRESQL"] = self.options.with_postgresql
|
|
||||||
tc.variables["WITH_BOOST"] = self.options.with_boost
|
|
||||||
tc.generate()
|
|
||||||
|
|
||||||
deps = CMakeDeps(self)
|
|
||||||
deps.generate()
|
|
||||||
|
|
||||||
def build(self):
|
|
||||||
apply_conandata_patches(self)
|
|
||||||
cmake = CMake(self)
|
|
||||||
cmake.configure()
|
|
||||||
cmake.build()
|
|
||||||
|
|
||||||
def package(self):
|
|
||||||
copy(self, "LICENSE_1_0.txt", dst=os.path.join(self.package_folder, "licenses"), src=self.source_folder)
|
|
||||||
|
|
||||||
cmake = CMake(self)
|
|
||||||
cmake.install()
|
|
||||||
|
|
||||||
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
|
|
||||||
|
|
||||||
def package_info(self):
|
|
||||||
self.cpp_info.set_property("cmake_file_name", "SOCI")
|
|
||||||
|
|
||||||
target_suffix = "" if self.options.shared else "_static"
|
|
||||||
lib_prefix = "lib" if is_msvc(self) and not self.options.shared else ""
|
|
||||||
version = Version(self.version)
|
|
||||||
lib_suffix = "_{}_{}".format(version.major, version.minor) if self.settings.os == "Windows" else ""
|
|
||||||
|
|
||||||
# soci_core
|
|
||||||
self.cpp_info.components["soci_core"].set_property("cmake_target_name", "SOCI::soci_core{}".format(target_suffix))
|
|
||||||
self.cpp_info.components["soci_core"].libs = ["{}soci_core{}".format(lib_prefix, lib_suffix)]
|
|
||||||
if self.options.with_boost:
|
|
||||||
self.cpp_info.components["soci_core"].requires.append("boost::headers")
|
|
||||||
|
|
||||||
# soci_empty
|
|
||||||
if self.options.empty:
|
|
||||||
self.cpp_info.components["soci_empty"].set_property("cmake_target_name", "SOCI::soci_empty{}".format(target_suffix))
|
|
||||||
self.cpp_info.components["soci_empty"].libs = ["{}soci_empty{}".format(lib_prefix, lib_suffix)]
|
|
||||||
self.cpp_info.components["soci_empty"].requires = ["soci_core"]
|
|
||||||
|
|
||||||
# soci_sqlite3
|
|
||||||
if self.options.with_sqlite3:
|
|
||||||
self.cpp_info.components["soci_sqlite3"].set_property("cmake_target_name", "SOCI::soci_sqlite3{}".format(target_suffix))
|
|
||||||
self.cpp_info.components["soci_sqlite3"].libs = ["{}soci_sqlite3{}".format(lib_prefix, lib_suffix)]
|
|
||||||
self.cpp_info.components["soci_sqlite3"].requires = ["soci_core", "sqlite3::sqlite3"]
|
|
||||||
|
|
||||||
# soci_odbc
|
|
||||||
if self.options.with_odbc:
|
|
||||||
self.cpp_info.components["soci_odbc"].set_property("cmake_target_name", "SOCI::soci_odbc{}".format(target_suffix))
|
|
||||||
self.cpp_info.components["soci_odbc"].libs = ["{}soci_odbc{}".format(lib_prefix, lib_suffix)]
|
|
||||||
self.cpp_info.components["soci_odbc"].requires = ["soci_core"]
|
|
||||||
if self.settings.os == "Windows":
|
|
||||||
self.cpp_info.components["soci_odbc"].system_libs.append("odbc32")
|
|
||||||
else:
|
|
||||||
self.cpp_info.components["soci_odbc"].requires.append("odbc::odbc")
|
|
||||||
|
|
||||||
# soci_mysql
|
|
||||||
if self.options.with_mysql:
|
|
||||||
self.cpp_info.components["soci_mysql"].set_property("cmake_target_name", "SOCI::soci_mysql{}".format(target_suffix))
|
|
||||||
self.cpp_info.components["soci_mysql"].libs = ["{}soci_mysql{}".format(lib_prefix, lib_suffix)]
|
|
||||||
self.cpp_info.components["soci_mysql"].requires = ["soci_core", "libmysqlclient::libmysqlclient"]
|
|
||||||
|
|
||||||
# soci_postgresql
|
|
||||||
if self.options.with_postgresql:
|
|
||||||
self.cpp_info.components["soci_postgresql"].set_property("cmake_target_name", "SOCI::soci_postgresql{}".format(target_suffix))
|
|
||||||
self.cpp_info.components["soci_postgresql"].libs = ["{}soci_postgresql{}".format(lib_prefix, lib_suffix)]
|
|
||||||
self.cpp_info.components["soci_postgresql"].requires = ["soci_core", "libpq::libpq"]
|
|
||||||
|
|
||||||
# TODO: to remove in conan v2 once cmake_find_package* generators removed
|
|
||||||
self.cpp_info.names["cmake_find_package"] = "SOCI"
|
|
||||||
self.cpp_info.names["cmake_find_package_multi"] = "SOCI"
|
|
||||||
self.cpp_info.components["soci_core"].names["cmake_find_package"] = "soci_core{}".format(target_suffix)
|
|
||||||
self.cpp_info.components["soci_core"].names["cmake_find_package_multi"] = "soci_core{}".format(target_suffix)
|
|
||||||
if self.options.empty:
|
|
||||||
self.cpp_info.components["soci_empty"].names["cmake_find_package"] = "soci_empty{}".format(target_suffix)
|
|
||||||
self.cpp_info.components["soci_empty"].names["cmake_find_package_multi"] = "soci_empty{}".format(target_suffix)
|
|
||||||
if self.options.with_sqlite3:
|
|
||||||
self.cpp_info.components["soci_sqlite3"].names["cmake_find_package"] = "soci_sqlite3{}".format(target_suffix)
|
|
||||||
self.cpp_info.components["soci_sqlite3"].names["cmake_find_package_multi"] = "soci_sqlite3{}".format(target_suffix)
|
|
||||||
if self.options.with_odbc:
|
|
||||||
self.cpp_info.components["soci_odbc"].names["cmake_find_package"] = "soci_odbc{}".format(target_suffix)
|
|
||||||
self.cpp_info.components["soci_odbc"].names["cmake_find_package_multi"] = "soci_odbc{}".format(target_suffix)
|
|
||||||
if self.options.with_mysql:
|
|
||||||
self.cpp_info.components["soci_mysql"].names["cmake_find_package"] = "soci_mysql{}".format(target_suffix)
|
|
||||||
self.cpp_info.components["soci_mysql"].names["cmake_find_package_multi"] = "soci_mysql{}".format(target_suffix)
|
|
||||||
if self.options.with_postgresql:
|
|
||||||
self.cpp_info.components["soci_postgresql"].names["cmake_find_package"] = "soci_postgresql{}".format(target_suffix)
|
|
||||||
self.cpp_info.components["soci_postgresql"].names["cmake_find_package_multi"] = "soci_postgresql{}".format(target_suffix)
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
From d491bf7b5040d314ffd0c6310ba01f78ff44c85e Mon Sep 17 00:00:00 2001
|
|
||||||
From: Rasmus Thomsen <rasmus.thomsen@dampsoft.de>
|
|
||||||
Date: Fri, 14 Apr 2023 09:16:29 +0200
|
|
||||||
Subject: [PATCH] Remove hardcoded INSTALL_NAME_DIR for relocatable libraries
|
|
||||||
on MacOS
|
|
||||||
|
|
||||||
---
|
|
||||||
cmake/SociBackend.cmake | 2 +-
|
|
||||||
src/core/CMakeLists.txt | 1 -
|
|
||||||
2 files changed, 1 insertion(+), 2 deletions(-)
|
|
||||||
|
|
||||||
diff --git a/cmake/SociBackend.cmake b/cmake/SociBackend.cmake
|
|
||||||
index 5d4ef0df..39fe1f77 100644
|
|
||||||
--- a/cmake/SociBackend.cmake
|
|
||||||
+++ b/cmake/SociBackend.cmake
|
|
||||||
@@ -171,7 +171,7 @@ macro(soci_backend NAME)
|
|
||||||
set_target_properties(${THIS_BACKEND_TARGET}
|
|
||||||
PROPERTIES
|
|
||||||
SOVERSION ${${PROJECT_NAME}_SOVERSION}
|
|
||||||
- INSTALL_NAME_DIR ${CMAKE_INSTALL_PREFIX}/lib)
|
|
||||||
+ )
|
|
||||||
|
|
||||||
if(APPLE)
|
|
||||||
set_target_properties(${THIS_BACKEND_TARGET}
|
|
||||||
diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt
|
|
||||||
index 3e7deeae..f9eae564 100644
|
|
||||||
--- a/src/core/CMakeLists.txt
|
|
||||||
+++ b/src/core/CMakeLists.txt
|
|
||||||
@@ -59,7 +59,6 @@ if (SOCI_SHARED)
|
|
||||||
PROPERTIES
|
|
||||||
VERSION ${SOCI_VERSION}
|
|
||||||
SOVERSION ${SOCI_SOVERSION}
|
|
||||||
- INSTALL_NAME_DIR ${CMAKE_INSTALL_PREFIX}/lib
|
|
||||||
CLEAN_DIRECT_OUTPUT 1)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
--
|
|
||||||
2.25.1
|
|
||||||
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
diff --git a/cmake/SociBackend.cmake b/cmake/SociBackend.cmake
|
|
||||||
index 0a664667..3fa2ed95 100644
|
|
||||||
--- a/cmake/SociBackend.cmake
|
|
||||||
+++ b/cmake/SociBackend.cmake
|
|
||||||
@@ -31,14 +31,13 @@ macro(soci_backend_deps_found NAME DEPS SUCCESS)
|
|
||||||
if(NOT DEPEND_FOUND)
|
|
||||||
list(APPEND DEPS_NOT_FOUND ${dep})
|
|
||||||
else()
|
|
||||||
- string(TOUPPER "${dep}" DEPU)
|
|
||||||
- if( ${DEPU}_INCLUDE_DIR )
|
|
||||||
- list(APPEND DEPS_INCLUDE_DIRS ${${DEPU}_INCLUDE_DIR})
|
|
||||||
+ if( ${dep}_INCLUDE_DIR )
|
|
||||||
+ list(APPEND DEPS_INCLUDE_DIRS ${${dep}_INCLUDE_DIR})
|
|
||||||
endif()
|
|
||||||
- if( ${DEPU}_INCLUDE_DIRS )
|
|
||||||
- list(APPEND DEPS_INCLUDE_DIRS ${${DEPU}_INCLUDE_DIRS})
|
|
||||||
+ if( ${dep}_INCLUDE_DIRS )
|
|
||||||
+ list(APPEND DEPS_INCLUDE_DIRS ${${dep}_INCLUDE_DIRS})
|
|
||||||
endif()
|
|
||||||
- list(APPEND DEPS_LIBRARIES ${${DEPU}_LIBRARIES})
|
|
||||||
+ list(APPEND DEPS_LIBRARIES ${${dep}_LIBRARIES})
|
|
||||||
endif()
|
|
||||||
endforeach()
|
|
||||||
|
|
||||||
@@ -21,7 +21,6 @@
|
|||||||
#define RIPPLE_BASICS_SHAMAP_HASH_H_INCLUDED
|
#define RIPPLE_BASICS_SHAMAP_HASH_H_INCLUDED
|
||||||
|
|
||||||
#include <xrpl/basics/base_uint.h>
|
#include <xrpl/basics/base_uint.h>
|
||||||
#include <xrpl/basics/partitioned_unordered_map.h>
|
|
||||||
|
|
||||||
#include <ostream>
|
#include <ostream>
|
||||||
|
|
||||||
|
|||||||
@@ -90,9 +90,6 @@ public:
|
|||||||
int
|
int
|
||||||
getCacheSize() const;
|
getCacheSize() const;
|
||||||
|
|
||||||
int
|
|
||||||
getTrackSize() const;
|
|
||||||
|
|
||||||
float
|
float
|
||||||
getHitRate();
|
getHitRate();
|
||||||
|
|
||||||
@@ -170,9 +167,6 @@ public:
|
|||||||
bool
|
bool
|
||||||
retrieve(key_type const& key, T& data);
|
retrieve(key_type const& key, T& data);
|
||||||
|
|
||||||
mutex_type&
|
|
||||||
peekMutex();
|
|
||||||
|
|
||||||
std::vector<key_type>
|
std::vector<key_type>
|
||||||
getKeys() const;
|
getKeys() const;
|
||||||
|
|
||||||
@@ -193,11 +187,14 @@ public:
|
|||||||
|
|
||||||
private:
|
private:
|
||||||
SharedPointerType
|
SharedPointerType
|
||||||
initialFetch(key_type const& key, std::lock_guard<mutex_type> const& l);
|
initialFetch(key_type const& key);
|
||||||
|
|
||||||
void
|
void
|
||||||
collect_metrics();
|
collect_metrics();
|
||||||
|
|
||||||
|
Mutex&
|
||||||
|
lockPartition(key_type const& key) const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
struct Stats
|
struct Stats
|
||||||
{
|
{
|
||||||
@@ -300,8 +297,8 @@ private:
|
|||||||
[[maybe_unused]] clock_type::time_point const& now,
|
[[maybe_unused]] clock_type::time_point const& now,
|
||||||
typename KeyValueCacheType::map_type& partition,
|
typename KeyValueCacheType::map_type& partition,
|
||||||
SweptPointersVector& stuffToSweep,
|
SweptPointersVector& stuffToSweep,
|
||||||
std::atomic<int>& allRemovals,
|
std::atomic<int>& allRemoval,
|
||||||
std::lock_guard<std::recursive_mutex> const&);
|
Mutex& partitionLock);
|
||||||
|
|
||||||
[[nodiscard]] std::thread
|
[[nodiscard]] std::thread
|
||||||
sweepHelper(
|
sweepHelper(
|
||||||
@@ -310,14 +307,12 @@ private:
|
|||||||
typename KeyOnlyCacheType::map_type& partition,
|
typename KeyOnlyCacheType::map_type& partition,
|
||||||
SweptPointersVector&,
|
SweptPointersVector&,
|
||||||
std::atomic<int>& allRemovals,
|
std::atomic<int>& allRemovals,
|
||||||
std::lock_guard<std::recursive_mutex> const&);
|
Mutex& partitionLock);
|
||||||
|
|
||||||
beast::Journal m_journal;
|
beast::Journal m_journal;
|
||||||
clock_type& m_clock;
|
clock_type& m_clock;
|
||||||
Stats m_stats;
|
Stats m_stats;
|
||||||
|
|
||||||
mutex_type mutable m_mutex;
|
|
||||||
|
|
||||||
// Used for logging
|
// Used for logging
|
||||||
std::string m_name;
|
std::string m_name;
|
||||||
|
|
||||||
@@ -328,10 +323,11 @@ private:
|
|||||||
clock_type::duration const m_target_age;
|
clock_type::duration const m_target_age;
|
||||||
|
|
||||||
// Number of items cached
|
// Number of items cached
|
||||||
int m_cache_count;
|
std::atomic<int> m_cache_count;
|
||||||
cache_type m_cache; // Hold strong reference to recent objects
|
cache_type m_cache; // Hold strong reference to recent objects
|
||||||
std::uint64_t m_hits;
|
std::atomic<std::uint64_t> m_hits;
|
||||||
std::uint64_t m_misses;
|
std::atomic<std::uint64_t> m_misses;
|
||||||
|
mutable std::vector<mutex_type> partitionLocks_;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace ripple
|
} // namespace ripple
|
||||||
|
|||||||
@@ -22,6 +22,7 @@
|
|||||||
|
|
||||||
#include <xrpl/basics/IntrusivePointer.ipp>
|
#include <xrpl/basics/IntrusivePointer.ipp>
|
||||||
#include <xrpl/basics/TaggedCache.h>
|
#include <xrpl/basics/TaggedCache.h>
|
||||||
|
#include <xrpl/beast/core/CurrentThreadName.h>
|
||||||
|
|
||||||
namespace ripple {
|
namespace ripple {
|
||||||
|
|
||||||
@@ -60,6 +61,7 @@ inline TaggedCache<
|
|||||||
, m_hits(0)
|
, m_hits(0)
|
||||||
, m_misses(0)
|
, m_misses(0)
|
||||||
{
|
{
|
||||||
|
partitionLocks_ = std::vector<mutex_type>(m_cache.partitions());
|
||||||
}
|
}
|
||||||
|
|
||||||
template <
|
template <
|
||||||
@@ -105,8 +107,13 @@ TaggedCache<
|
|||||||
KeyEqual,
|
KeyEqual,
|
||||||
Mutex>::size() const
|
Mutex>::size() const
|
||||||
{
|
{
|
||||||
std::lock_guard lock(m_mutex);
|
std::size_t totalSize = 0;
|
||||||
return m_cache.size();
|
for (size_t i = 0; i < partitionLocks_.size(); ++i)
|
||||||
|
{
|
||||||
|
std::lock_guard<Mutex> lock(partitionLocks_[i]);
|
||||||
|
totalSize += m_cache.map()[i].size();
|
||||||
|
}
|
||||||
|
return totalSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <
|
template <
|
||||||
@@ -129,32 +136,7 @@ TaggedCache<
|
|||||||
KeyEqual,
|
KeyEqual,
|
||||||
Mutex>::getCacheSize() const
|
Mutex>::getCacheSize() const
|
||||||
{
|
{
|
||||||
std::lock_guard lock(m_mutex);
|
return m_cache_count.load(std::memory_order_relaxed);
|
||||||
return m_cache_count;
|
|
||||||
}
|
|
||||||
|
|
||||||
template <
|
|
||||||
class Key,
|
|
||||||
class T,
|
|
||||||
bool IsKeyCache,
|
|
||||||
class SharedWeakUnionPointer,
|
|
||||||
class SharedPointerType,
|
|
||||||
class Hash,
|
|
||||||
class KeyEqual,
|
|
||||||
class Mutex>
|
|
||||||
inline int
|
|
||||||
TaggedCache<
|
|
||||||
Key,
|
|
||||||
T,
|
|
||||||
IsKeyCache,
|
|
||||||
SharedWeakUnionPointer,
|
|
||||||
SharedPointerType,
|
|
||||||
Hash,
|
|
||||||
KeyEqual,
|
|
||||||
Mutex>::getTrackSize() const
|
|
||||||
{
|
|
||||||
std::lock_guard lock(m_mutex);
|
|
||||||
return m_cache.size();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template <
|
template <
|
||||||
@@ -177,9 +159,10 @@ TaggedCache<
|
|||||||
KeyEqual,
|
KeyEqual,
|
||||||
Mutex>::getHitRate()
|
Mutex>::getHitRate()
|
||||||
{
|
{
|
||||||
std::lock_guard lock(m_mutex);
|
auto const hits = m_hits.load(std::memory_order_relaxed);
|
||||||
auto const total = static_cast<float>(m_hits + m_misses);
|
auto const misses = m_misses.load(std::memory_order_relaxed);
|
||||||
return m_hits * (100.0f / std::max(1.0f, total));
|
float const total = float(hits + misses);
|
||||||
|
return hits * (100.0f / std::max(1.0f, total));
|
||||||
}
|
}
|
||||||
|
|
||||||
template <
|
template <
|
||||||
@@ -202,9 +185,12 @@ TaggedCache<
|
|||||||
KeyEqual,
|
KeyEqual,
|
||||||
Mutex>::clear()
|
Mutex>::clear()
|
||||||
{
|
{
|
||||||
std::lock_guard lock(m_mutex);
|
for (auto& mutex : partitionLocks_)
|
||||||
|
mutex.lock();
|
||||||
m_cache.clear();
|
m_cache.clear();
|
||||||
m_cache_count = 0;
|
for (auto& mutex : partitionLocks_)
|
||||||
|
mutex.unlock();
|
||||||
|
m_cache_count.store(0, std::memory_order_relaxed);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <
|
template <
|
||||||
@@ -227,11 +213,9 @@ TaggedCache<
|
|||||||
KeyEqual,
|
KeyEqual,
|
||||||
Mutex>::reset()
|
Mutex>::reset()
|
||||||
{
|
{
|
||||||
std::lock_guard lock(m_mutex);
|
clear();
|
||||||
m_cache.clear();
|
m_hits.store(0, std::memory_order_relaxed);
|
||||||
m_cache_count = 0;
|
m_misses.store(0, std::memory_order_relaxed);
|
||||||
m_hits = 0;
|
|
||||||
m_misses = 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template <
|
template <
|
||||||
@@ -255,7 +239,7 @@ TaggedCache<
|
|||||||
KeyEqual,
|
KeyEqual,
|
||||||
Mutex>::touch_if_exists(KeyComparable const& key)
|
Mutex>::touch_if_exists(KeyComparable const& key)
|
||||||
{
|
{
|
||||||
std::lock_guard lock(m_mutex);
|
std::lock_guard<Mutex> lock(lockPartition(key));
|
||||||
auto const iter(m_cache.find(key));
|
auto const iter(m_cache.find(key));
|
||||||
if (iter == m_cache.end())
|
if (iter == m_cache.end())
|
||||||
{
|
{
|
||||||
@@ -297,8 +281,6 @@ TaggedCache<
|
|||||||
|
|
||||||
auto const start = std::chrono::steady_clock::now();
|
auto const start = std::chrono::steady_clock::now();
|
||||||
{
|
{
|
||||||
std::lock_guard lock(m_mutex);
|
|
||||||
|
|
||||||
if (m_target_size == 0 ||
|
if (m_target_size == 0 ||
|
||||||
(static_cast<int>(m_cache.size()) <= m_target_size))
|
(static_cast<int>(m_cache.size()) <= m_target_size))
|
||||||
{
|
{
|
||||||
@@ -330,12 +312,13 @@ TaggedCache<
|
|||||||
m_cache.map()[p],
|
m_cache.map()[p],
|
||||||
allStuffToSweep[p],
|
allStuffToSweep[p],
|
||||||
allRemovals,
|
allRemovals,
|
||||||
lock));
|
partitionLocks_[p]));
|
||||||
}
|
}
|
||||||
for (std::thread& worker : workers)
|
for (std::thread& worker : workers)
|
||||||
worker.join();
|
worker.join();
|
||||||
|
|
||||||
m_cache_count -= allRemovals;
|
int removals = allRemovals.load(std::memory_order_relaxed);
|
||||||
|
m_cache_count.fetch_sub(removals, std::memory_order_relaxed);
|
||||||
}
|
}
|
||||||
// At this point allStuffToSweep will go out of scope outside the lock
|
// At this point allStuffToSweep will go out of scope outside the lock
|
||||||
// and decrement the reference count on each strong pointer.
|
// and decrement the reference count on each strong pointer.
|
||||||
@@ -369,7 +352,8 @@ TaggedCache<
|
|||||||
{
|
{
|
||||||
// Remove from cache, if !valid, remove from map too. Returns true if
|
// Remove from cache, if !valid, remove from map too. Returns true if
|
||||||
// removed from cache
|
// removed from cache
|
||||||
std::lock_guard lock(m_mutex);
|
|
||||||
|
std::lock_guard<Mutex> lock(lockPartition(key));
|
||||||
|
|
||||||
auto cit = m_cache.find(key);
|
auto cit = m_cache.find(key);
|
||||||
|
|
||||||
@@ -382,7 +366,7 @@ TaggedCache<
|
|||||||
|
|
||||||
if (entry.isCached())
|
if (entry.isCached())
|
||||||
{
|
{
|
||||||
--m_cache_count;
|
m_cache_count.fetch_sub(1, std::memory_order_relaxed);
|
||||||
entry.ptr.convertToWeak();
|
entry.ptr.convertToWeak();
|
||||||
ret = true;
|
ret = true;
|
||||||
}
|
}
|
||||||
@@ -420,17 +404,16 @@ TaggedCache<
|
|||||||
{
|
{
|
||||||
// Return canonical value, store if needed, refresh in cache
|
// Return canonical value, store if needed, refresh in cache
|
||||||
// Return values: true=we had the data already
|
// Return values: true=we had the data already
|
||||||
std::lock_guard lock(m_mutex);
|
|
||||||
|
|
||||||
|
std::lock_guard<Mutex> lock(lockPartition(key));
|
||||||
auto cit = m_cache.find(key);
|
auto cit = m_cache.find(key);
|
||||||
|
|
||||||
if (cit == m_cache.end())
|
if (cit == m_cache.end())
|
||||||
{
|
{
|
||||||
m_cache.emplace(
|
m_cache.emplace(
|
||||||
std::piecewise_construct,
|
std::piecewise_construct,
|
||||||
std::forward_as_tuple(key),
|
std::forward_as_tuple(key),
|
||||||
std::forward_as_tuple(m_clock.now(), data));
|
std::forward_as_tuple(m_clock.now(), data));
|
||||||
++m_cache_count;
|
m_cache_count.fetch_add(1, std::memory_order_relaxed);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -479,12 +462,12 @@ TaggedCache<
|
|||||||
data = cachedData;
|
data = cachedData;
|
||||||
}
|
}
|
||||||
|
|
||||||
++m_cache_count;
|
m_cache_count.fetch_add(1, std::memory_order_relaxed);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
entry.ptr = data;
|
entry.ptr = data;
|
||||||
++m_cache_count;
|
m_cache_count.fetch_add(1, std::memory_order_relaxed);
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@@ -560,10 +543,11 @@ TaggedCache<
|
|||||||
KeyEqual,
|
KeyEqual,
|
||||||
Mutex>::fetch(key_type const& key)
|
Mutex>::fetch(key_type const& key)
|
||||||
{
|
{
|
||||||
std::lock_guard<mutex_type> l(m_mutex);
|
std::lock_guard<Mutex> lock(lockPartition(key));
|
||||||
auto ret = initialFetch(key, l);
|
|
||||||
|
auto ret = initialFetch(key);
|
||||||
if (!ret)
|
if (!ret)
|
||||||
++m_misses;
|
m_misses.fetch_add(1, std::memory_order_relaxed);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -627,8 +611,8 @@ TaggedCache<
|
|||||||
Mutex>::insert(key_type const& key)
|
Mutex>::insert(key_type const& key)
|
||||||
-> std::enable_if_t<IsKeyCache, ReturnType>
|
-> std::enable_if_t<IsKeyCache, ReturnType>
|
||||||
{
|
{
|
||||||
std::lock_guard lock(m_mutex);
|
|
||||||
clock_type::time_point const now(m_clock.now());
|
clock_type::time_point const now(m_clock.now());
|
||||||
|
std::lock_guard<Mutex> lock(lockPartition(key));
|
||||||
auto [it, inserted] = m_cache.emplace(
|
auto [it, inserted] = m_cache.emplace(
|
||||||
std::piecewise_construct,
|
std::piecewise_construct,
|
||||||
std::forward_as_tuple(key),
|
std::forward_as_tuple(key),
|
||||||
@@ -668,29 +652,6 @@ TaggedCache<
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <
|
|
||||||
class Key,
|
|
||||||
class T,
|
|
||||||
bool IsKeyCache,
|
|
||||||
class SharedWeakUnionPointer,
|
|
||||||
class SharedPointerType,
|
|
||||||
class Hash,
|
|
||||||
class KeyEqual,
|
|
||||||
class Mutex>
|
|
||||||
inline auto
|
|
||||||
TaggedCache<
|
|
||||||
Key,
|
|
||||||
T,
|
|
||||||
IsKeyCache,
|
|
||||||
SharedWeakUnionPointer,
|
|
||||||
SharedPointerType,
|
|
||||||
Hash,
|
|
||||||
KeyEqual,
|
|
||||||
Mutex>::peekMutex() -> mutex_type&
|
|
||||||
{
|
|
||||||
return m_mutex;
|
|
||||||
}
|
|
||||||
|
|
||||||
template <
|
template <
|
||||||
class Key,
|
class Key,
|
||||||
class T,
|
class T,
|
||||||
@@ -714,10 +675,13 @@ TaggedCache<
|
|||||||
std::vector<key_type> v;
|
std::vector<key_type> v;
|
||||||
|
|
||||||
{
|
{
|
||||||
std::lock_guard lock(m_mutex);
|
|
||||||
v.reserve(m_cache.size());
|
v.reserve(m_cache.size());
|
||||||
for (auto const& _ : m_cache)
|
for (std::size_t i = 0; i < partitionLocks_.size(); ++i)
|
||||||
v.push_back(_.first);
|
{
|
||||||
|
std::lock_guard<Mutex> lock(partitionLocks_[i]);
|
||||||
|
for (auto const& entry : m_cache.map()[i])
|
||||||
|
v.push_back(entry.first);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return v;
|
return v;
|
||||||
@@ -743,11 +707,12 @@ TaggedCache<
|
|||||||
KeyEqual,
|
KeyEqual,
|
||||||
Mutex>::rate() const
|
Mutex>::rate() const
|
||||||
{
|
{
|
||||||
std::lock_guard lock(m_mutex);
|
auto const hits = m_hits.load(std::memory_order_relaxed);
|
||||||
auto const tot = m_hits + m_misses;
|
auto const misses = m_misses.load(std::memory_order_relaxed);
|
||||||
|
auto const tot = hits + misses;
|
||||||
if (tot == 0)
|
if (tot == 0)
|
||||||
return 0;
|
return 0.0;
|
||||||
return double(m_hits) / tot;
|
return double(hits) / tot;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <
|
template <
|
||||||
@@ -771,18 +736,16 @@ TaggedCache<
|
|||||||
KeyEqual,
|
KeyEqual,
|
||||||
Mutex>::fetch(key_type const& digest, Handler const& h)
|
Mutex>::fetch(key_type const& digest, Handler const& h)
|
||||||
{
|
{
|
||||||
{
|
std::lock_guard<Mutex> lock(lockPartition(digest));
|
||||||
std::lock_guard l(m_mutex);
|
|
||||||
if (auto ret = initialFetch(digest, l))
|
if (auto ret = initialFetch(digest))
|
||||||
return ret;
|
return ret;
|
||||||
}
|
|
||||||
|
|
||||||
auto sle = h();
|
auto sle = h();
|
||||||
if (!sle)
|
if (!sle)
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
std::lock_guard l(m_mutex);
|
m_misses.fetch_add(1, std::memory_order_relaxed);
|
||||||
++m_misses;
|
|
||||||
auto const [it, inserted] =
|
auto const [it, inserted] =
|
||||||
m_cache.emplace(digest, Entry(m_clock.now(), std::move(sle)));
|
m_cache.emplace(digest, Entry(m_clock.now(), std::move(sle)));
|
||||||
if (!inserted)
|
if (!inserted)
|
||||||
@@ -809,9 +772,10 @@ TaggedCache<
|
|||||||
SharedPointerType,
|
SharedPointerType,
|
||||||
Hash,
|
Hash,
|
||||||
KeyEqual,
|
KeyEqual,
|
||||||
Mutex>::
|
Mutex>::initialFetch(key_type const& key)
|
||||||
initialFetch(key_type const& key, std::lock_guard<mutex_type> const& l)
|
|
||||||
{
|
{
|
||||||
|
std::lock_guard<Mutex> lock(lockPartition(key));
|
||||||
|
|
||||||
auto cit = m_cache.find(key);
|
auto cit = m_cache.find(key);
|
||||||
if (cit == m_cache.end())
|
if (cit == m_cache.end())
|
||||||
return {};
|
return {};
|
||||||
@@ -819,7 +783,7 @@ TaggedCache<
|
|||||||
Entry& entry = cit->second;
|
Entry& entry = cit->second;
|
||||||
if (entry.isCached())
|
if (entry.isCached())
|
||||||
{
|
{
|
||||||
++m_hits;
|
m_hits.fetch_add(1, std::memory_order_relaxed);
|
||||||
entry.touch(m_clock.now());
|
entry.touch(m_clock.now());
|
||||||
return entry.ptr.getStrong();
|
return entry.ptr.getStrong();
|
||||||
}
|
}
|
||||||
@@ -827,12 +791,13 @@ TaggedCache<
|
|||||||
if (entry.isCached())
|
if (entry.isCached())
|
||||||
{
|
{
|
||||||
// independent of cache size, so not counted as a hit
|
// independent of cache size, so not counted as a hit
|
||||||
++m_cache_count;
|
m_cache_count.fetch_add(1, std::memory_order_relaxed);
|
||||||
entry.touch(m_clock.now());
|
entry.touch(m_clock.now());
|
||||||
return entry.ptr.getStrong();
|
return entry.ptr.getStrong();
|
||||||
}
|
}
|
||||||
|
|
||||||
m_cache.erase(cit);
|
m_cache.erase(cit);
|
||||||
|
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -861,10 +826,11 @@ TaggedCache<
|
|||||||
{
|
{
|
||||||
beast::insight::Gauge::value_type hit_rate(0);
|
beast::insight::Gauge::value_type hit_rate(0);
|
||||||
{
|
{
|
||||||
std::lock_guard lock(m_mutex);
|
auto const hits = m_hits.load(std::memory_order_relaxed);
|
||||||
auto const total(m_hits + m_misses);
|
auto const misses = m_misses.load(std::memory_order_relaxed);
|
||||||
|
auto const total = hits + misses;
|
||||||
if (total != 0)
|
if (total != 0)
|
||||||
hit_rate = (m_hits * 100) / total;
|
hit_rate = (hits * 100) / total;
|
||||||
}
|
}
|
||||||
m_stats.hit_rate.set(hit_rate);
|
m_stats.hit_rate.set(hit_rate);
|
||||||
}
|
}
|
||||||
@@ -895,12 +861,16 @@ TaggedCache<
|
|||||||
typename KeyValueCacheType::map_type& partition,
|
typename KeyValueCacheType::map_type& partition,
|
||||||
SweptPointersVector& stuffToSweep,
|
SweptPointersVector& stuffToSweep,
|
||||||
std::atomic<int>& allRemovals,
|
std::atomic<int>& allRemovals,
|
||||||
std::lock_guard<std::recursive_mutex> const&)
|
Mutex& partitionLock)
|
||||||
{
|
{
|
||||||
return std::thread([&, this]() {
|
return std::thread([&, this]() {
|
||||||
|
beast::setCurrentThreadName("sweep-KVCache");
|
||||||
|
|
||||||
int cacheRemovals = 0;
|
int cacheRemovals = 0;
|
||||||
int mapRemovals = 0;
|
int mapRemovals = 0;
|
||||||
|
|
||||||
|
std::lock_guard<Mutex> lock(partitionLock);
|
||||||
|
|
||||||
// Keep references to all the stuff we sweep
|
// Keep references to all the stuff we sweep
|
||||||
// so that we can destroy them outside the lock.
|
// so that we can destroy them outside the lock.
|
||||||
stuffToSweep.reserve(partition.size());
|
stuffToSweep.reserve(partition.size());
|
||||||
@@ -984,12 +954,16 @@ TaggedCache<
|
|||||||
typename KeyOnlyCacheType::map_type& partition,
|
typename KeyOnlyCacheType::map_type& partition,
|
||||||
SweptPointersVector&,
|
SweptPointersVector&,
|
||||||
std::atomic<int>& allRemovals,
|
std::atomic<int>& allRemovals,
|
||||||
std::lock_guard<std::recursive_mutex> const&)
|
Mutex& partitionLock)
|
||||||
{
|
{
|
||||||
return std::thread([&, this]() {
|
return std::thread([&, this]() {
|
||||||
|
beast::setCurrentThreadName("sweep-KCache");
|
||||||
|
|
||||||
int cacheRemovals = 0;
|
int cacheRemovals = 0;
|
||||||
int mapRemovals = 0;
|
int mapRemovals = 0;
|
||||||
|
|
||||||
|
std::lock_guard<Mutex> lock(partitionLock);
|
||||||
|
|
||||||
// Keep references to all the stuff we sweep
|
// Keep references to all the stuff we sweep
|
||||||
// so that we can destroy them outside the lock.
|
// so that we can destroy them outside the lock.
|
||||||
{
|
{
|
||||||
@@ -1024,6 +998,29 @@ TaggedCache<
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <
|
||||||
|
class Key,
|
||||||
|
class T,
|
||||||
|
bool IsKeyCache,
|
||||||
|
class SharedWeakUnionPointer,
|
||||||
|
class SharedPointerType,
|
||||||
|
class Hash,
|
||||||
|
class KeyEqual,
|
||||||
|
class Mutex>
|
||||||
|
inline Mutex&
|
||||||
|
TaggedCache<
|
||||||
|
Key,
|
||||||
|
T,
|
||||||
|
IsKeyCache,
|
||||||
|
SharedWeakUnionPointer,
|
||||||
|
SharedPointerType,
|
||||||
|
Hash,
|
||||||
|
KeyEqual,
|
||||||
|
Mutex>::lockPartition(key_type const& key) const
|
||||||
|
{
|
||||||
|
return partitionLocks_[m_cache.partition_index(key)];
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace ripple
|
} // namespace ripple
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
@@ -277,6 +277,12 @@ public:
|
|||||||
return map_;
|
return map_;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
partition_map_type const&
|
||||||
|
map() const
|
||||||
|
{
|
||||||
|
return map_;
|
||||||
|
}
|
||||||
|
|
||||||
iterator
|
iterator
|
||||||
begin()
|
begin()
|
||||||
{
|
{
|
||||||
@@ -321,6 +327,12 @@ public:
|
|||||||
return cend();
|
return cend();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::size_t
|
||||||
|
partition_index(key_type const& key) const
|
||||||
|
{
|
||||||
|
return partitioner(key);
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
template <class T>
|
template <class T>
|
||||||
void
|
void
|
||||||
|
|||||||
@@ -24,32 +24,111 @@
|
|||||||
|
|
||||||
#include <xxhash.h>
|
#include <xxhash.h>
|
||||||
|
|
||||||
|
#include <array>
|
||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
#include <new>
|
#include <cstdint>
|
||||||
#include <type_traits>
|
#include <optional>
|
||||||
|
#include <span>
|
||||||
|
|
||||||
namespace beast {
|
namespace beast {
|
||||||
|
|
||||||
class xxhasher
|
class xxhasher
|
||||||
{
|
{
|
||||||
private:
|
public:
|
||||||
// requires 64-bit std::size_t
|
using result_type = std::size_t;
|
||||||
static_assert(sizeof(std::size_t) == 8, "");
|
|
||||||
|
|
||||||
XXH3_state_t* state_;
|
private:
|
||||||
|
static_assert(sizeof(std::size_t) == 8, "requires 64-bit std::size_t");
|
||||||
|
|
||||||
|
// Have an internal buffer to avoid the streaming API
|
||||||
|
// A 64-byte buffer should to be big enough for us
|
||||||
|
static constexpr std::size_t INTERNAL_BUFFER_SIZE = 64;
|
||||||
|
|
||||||
|
alignas(64) std::array<std::uint8_t, INTERNAL_BUFFER_SIZE> buffer_;
|
||||||
|
std::span<std::uint8_t> readBuffer_;
|
||||||
|
std::span<std::uint8_t> writeBuffer_;
|
||||||
|
|
||||||
|
std::optional<XXH64_hash_t> seed_;
|
||||||
|
XXH3_state_t* state_ = nullptr;
|
||||||
|
|
||||||
|
void
|
||||||
|
resetBuffers()
|
||||||
|
{
|
||||||
|
writeBuffer_ = std::span{buffer_};
|
||||||
|
readBuffer_ = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
updateHash(void const* data, std::size_t len)
|
||||||
|
{
|
||||||
|
if (writeBuffer_.size() < len)
|
||||||
|
{
|
||||||
|
flushToState(data, len);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
std::memcpy(writeBuffer_.data(), data, len);
|
||||||
|
writeBuffer_ = writeBuffer_.subspan(len);
|
||||||
|
readBuffer_ = std::span{
|
||||||
|
std::begin(buffer_), buffer_.size() - writeBuffer_.size()};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static XXH3_state_t*
|
static XXH3_state_t*
|
||||||
allocState()
|
allocState()
|
||||||
{
|
{
|
||||||
auto ret = XXH3_createState();
|
auto ret = XXH3_createState();
|
||||||
if (ret == nullptr)
|
if (ret == nullptr)
|
||||||
throw std::bad_alloc();
|
throw std::bad_alloc(); // LCOV_EXCL_LINE
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
public:
|
void
|
||||||
using result_type = std::size_t;
|
flushToState(void const* data, std::size_t len)
|
||||||
|
{
|
||||||
|
if (!state_)
|
||||||
|
{
|
||||||
|
state_ = allocState();
|
||||||
|
if (seed_.has_value())
|
||||||
|
{
|
||||||
|
XXH3_64bits_reset_withSeed(state_, *seed_);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
XXH3_64bits_reset(state_);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
XXH3_64bits_update(state_, readBuffer_.data(), readBuffer_.size());
|
||||||
|
resetBuffers();
|
||||||
|
if (data && len)
|
||||||
|
{
|
||||||
|
XXH3_64bits_update(state_, data, len);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result_type
|
||||||
|
retrieveHash()
|
||||||
|
{
|
||||||
|
if (state_)
|
||||||
|
{
|
||||||
|
flushToState(nullptr, 0);
|
||||||
|
return XXH3_64bits_digest(state_);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (seed_.has_value())
|
||||||
|
{
|
||||||
|
return XXH3_64bits_withSeed(
|
||||||
|
readBuffer_.data(), readBuffer_.size(), *seed_);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
return XXH3_64bits(readBuffer_.data(), readBuffer_.size());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public:
|
||||||
static constexpr auto const endian = boost::endian::order::native;
|
static constexpr auto const endian = boost::endian::order::native;
|
||||||
|
|
||||||
xxhasher(xxhasher const&) = delete;
|
xxhasher(xxhasher const&) = delete;
|
||||||
@@ -58,43 +137,43 @@ public:
|
|||||||
|
|
||||||
xxhasher()
|
xxhasher()
|
||||||
{
|
{
|
||||||
state_ = allocState();
|
resetBuffers();
|
||||||
XXH3_64bits_reset(state_);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
~xxhasher() noexcept
|
~xxhasher() noexcept
|
||||||
|
{
|
||||||
|
if (state_)
|
||||||
{
|
{
|
||||||
XXH3_freeState(state_);
|
XXH3_freeState(state_);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <
|
|
||||||
class Seed,
|
|
||||||
std::enable_if_t<std::is_unsigned<Seed>::value>* = nullptr>
|
|
||||||
explicit xxhasher(Seed seed)
|
|
||||||
{
|
|
||||||
state_ = allocState();
|
|
||||||
XXH3_64bits_reset_withSeed(state_, seed);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template <
|
template <
|
||||||
class Seed,
|
class Seed,
|
||||||
std::enable_if_t<std::is_unsigned<Seed>::value>* = nullptr>
|
std::enable_if_t<std::is_unsigned<Seed>::value>* = nullptr>
|
||||||
xxhasher(Seed seed, Seed)
|
explicit xxhasher(Seed seed) : seed_(seed)
|
||||||
{
|
{
|
||||||
state_ = allocState();
|
resetBuffers();
|
||||||
XXH3_64bits_reset_withSeed(state_, seed);
|
}
|
||||||
|
|
||||||
|
template <
|
||||||
|
class Seed,
|
||||||
|
std::enable_if_t<std::is_unsigned<Seed>::value>* = nullptr>
|
||||||
|
xxhasher(Seed seed, Seed) : seed_(seed)
|
||||||
|
{
|
||||||
|
resetBuffers();
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
operator()(void const* key, std::size_t len) noexcept
|
operator()(void const* key, std::size_t len) noexcept
|
||||||
{
|
{
|
||||||
XXH3_64bits_update(state_, key, len);
|
updateHash(key, len);
|
||||||
}
|
}
|
||||||
|
|
||||||
explicit
|
explicit
|
||||||
operator std::size_t() noexcept
|
operator result_type() noexcept
|
||||||
{
|
{
|
||||||
return XXH3_64bits_digest(state_);
|
return retrieveHash();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -22,7 +22,6 @@
|
|||||||
|
|
||||||
#include <xrpl/basics/ByteUtilities.h>
|
#include <xrpl/basics/ByteUtilities.h>
|
||||||
#include <xrpl/basics/base_uint.h>
|
#include <xrpl/basics/base_uint.h>
|
||||||
#include <xrpl/basics/partitioned_unordered_map.h>
|
|
||||||
|
|
||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
|
|
||||||
|
|||||||
@@ -32,6 +32,7 @@
|
|||||||
// If you add an amendment here, then do not forget to increment `numFeatures`
|
// If you add an amendment here, then do not forget to increment `numFeatures`
|
||||||
// in include/xrpl/protocol/Feature.h.
|
// in include/xrpl/protocol/Feature.h.
|
||||||
|
|
||||||
|
XRPL_FIX (PriceOracleOrder, Supported::no, VoteBehavior::DefaultNo)
|
||||||
XRPL_FIX (MPTDeliveredAmount, Supported::no, VoteBehavior::DefaultNo)
|
XRPL_FIX (MPTDeliveredAmount, Supported::no, VoteBehavior::DefaultNo)
|
||||||
XRPL_FIX (AMMClawbackRounding, Supported::no, VoteBehavior::DefaultNo)
|
XRPL_FIX (AMMClawbackRounding, Supported::no, VoteBehavior::DefaultNo)
|
||||||
XRPL_FEATURE(TokenEscrow, Supported::yes, VoteBehavior::DefaultNo)
|
XRPL_FEATURE(TokenEscrow, Supported::yes, VoteBehavior::DefaultNo)
|
||||||
|
|||||||
@@ -678,6 +678,61 @@ private:
|
|||||||
oracle.set(
|
oracle.set(
|
||||||
UpdateArg{.series = {{"XRP", "USD", 742, 2}}, .fee = baseFee});
|
UpdateArg{.series = {{"XRP", "USD", 742, 2}}, .fee = baseFee});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (bool const withFixOrder : {false, true})
|
||||||
|
{
|
||||||
|
// Should be same order as creation
|
||||||
|
Env env(
|
||||||
|
*this,
|
||||||
|
withFixOrder ? testable_amendments()
|
||||||
|
: testable_amendments() - fixPriceOracleOrder);
|
||||||
|
auto const baseFee =
|
||||||
|
static_cast<int>(env.current()->fees().base.drops());
|
||||||
|
|
||||||
|
auto test = [&](Env& env, DataSeries const& series) {
|
||||||
|
env.fund(XRP(1'000), owner);
|
||||||
|
Oracle oracle(
|
||||||
|
env, {.owner = owner, .series = series, .fee = baseFee});
|
||||||
|
BEAST_EXPECT(oracle.exists());
|
||||||
|
auto sle = env.le(keylet::oracle(owner, oracle.documentID()));
|
||||||
|
BEAST_EXPECT(
|
||||||
|
sle->getFieldArray(sfPriceDataSeries).size() ==
|
||||||
|
series.size());
|
||||||
|
|
||||||
|
auto const beforeQuoteAssetName1 =
|
||||||
|
sle->getFieldArray(sfPriceDataSeries)[0]
|
||||||
|
.getFieldCurrency(sfQuoteAsset)
|
||||||
|
.getText();
|
||||||
|
auto const beforeQuoteAssetName2 =
|
||||||
|
sle->getFieldArray(sfPriceDataSeries)[1]
|
||||||
|
.getFieldCurrency(sfQuoteAsset)
|
||||||
|
.getText();
|
||||||
|
|
||||||
|
oracle.set(UpdateArg{.series = series, .fee = baseFee});
|
||||||
|
sle = env.le(keylet::oracle(owner, oracle.documentID()));
|
||||||
|
|
||||||
|
auto const afterQuoteAssetName1 =
|
||||||
|
sle->getFieldArray(sfPriceDataSeries)[0]
|
||||||
|
.getFieldCurrency(sfQuoteAsset)
|
||||||
|
.getText();
|
||||||
|
auto const afterQuoteAssetName2 =
|
||||||
|
sle->getFieldArray(sfPriceDataSeries)[1]
|
||||||
|
.getFieldCurrency(sfQuoteAsset)
|
||||||
|
.getText();
|
||||||
|
|
||||||
|
if (env.current()->rules().enabled(fixPriceOracleOrder))
|
||||||
|
{
|
||||||
|
BEAST_EXPECT(afterQuoteAssetName1 == beforeQuoteAssetName1);
|
||||||
|
BEAST_EXPECT(afterQuoteAssetName2 == beforeQuoteAssetName2);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
BEAST_EXPECT(afterQuoteAssetName1 != beforeQuoteAssetName1);
|
||||||
|
BEAST_EXPECT(afterQuoteAssetName2 != beforeQuoteAssetName2);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
test(env, {{"XRP", "USD", 742, 2}, {"XRP", "EUR", 711, 2}});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
|||||||
@@ -58,10 +58,10 @@ public:
|
|||||||
// Insert an item, retrieve it, and age it so it gets purged.
|
// Insert an item, retrieve it, and age it so it gets purged.
|
||||||
{
|
{
|
||||||
BEAST_EXPECT(c.getCacheSize() == 0);
|
BEAST_EXPECT(c.getCacheSize() == 0);
|
||||||
BEAST_EXPECT(c.getTrackSize() == 0);
|
BEAST_EXPECT(c.size() == 0);
|
||||||
BEAST_EXPECT(!c.insert(1, "one"));
|
BEAST_EXPECT(!c.insert(1, "one"));
|
||||||
BEAST_EXPECT(c.getCacheSize() == 1);
|
BEAST_EXPECT(c.getCacheSize() == 1);
|
||||||
BEAST_EXPECT(c.getTrackSize() == 1);
|
BEAST_EXPECT(c.size() == 1);
|
||||||
|
|
||||||
{
|
{
|
||||||
std::string s;
|
std::string s;
|
||||||
@@ -72,7 +72,7 @@ public:
|
|||||||
++clock;
|
++clock;
|
||||||
c.sweep();
|
c.sweep();
|
||||||
BEAST_EXPECT(c.getCacheSize() == 0);
|
BEAST_EXPECT(c.getCacheSize() == 0);
|
||||||
BEAST_EXPECT(c.getTrackSize() == 0);
|
BEAST_EXPECT(c.size() == 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert an item, maintain a strong pointer, age it, and
|
// Insert an item, maintain a strong pointer, age it, and
|
||||||
@@ -80,7 +80,7 @@ public:
|
|||||||
{
|
{
|
||||||
BEAST_EXPECT(!c.insert(2, "two"));
|
BEAST_EXPECT(!c.insert(2, "two"));
|
||||||
BEAST_EXPECT(c.getCacheSize() == 1);
|
BEAST_EXPECT(c.getCacheSize() == 1);
|
||||||
BEAST_EXPECT(c.getTrackSize() == 1);
|
BEAST_EXPECT(c.size() == 1);
|
||||||
|
|
||||||
{
|
{
|
||||||
auto p = c.fetch(2);
|
auto p = c.fetch(2);
|
||||||
@@ -88,14 +88,14 @@ public:
|
|||||||
++clock;
|
++clock;
|
||||||
c.sweep();
|
c.sweep();
|
||||||
BEAST_EXPECT(c.getCacheSize() == 0);
|
BEAST_EXPECT(c.getCacheSize() == 0);
|
||||||
BEAST_EXPECT(c.getTrackSize() == 1);
|
BEAST_EXPECT(c.size() == 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make sure its gone now that our reference is gone
|
// Make sure its gone now that our reference is gone
|
||||||
++clock;
|
++clock;
|
||||||
c.sweep();
|
c.sweep();
|
||||||
BEAST_EXPECT(c.getCacheSize() == 0);
|
BEAST_EXPECT(c.getCacheSize() == 0);
|
||||||
BEAST_EXPECT(c.getTrackSize() == 0);
|
BEAST_EXPECT(c.size() == 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert the same key/value pair and make sure we get the same result
|
// Insert the same key/value pair and make sure we get the same result
|
||||||
@@ -111,7 +111,7 @@ public:
|
|||||||
++clock;
|
++clock;
|
||||||
c.sweep();
|
c.sweep();
|
||||||
BEAST_EXPECT(c.getCacheSize() == 0);
|
BEAST_EXPECT(c.getCacheSize() == 0);
|
||||||
BEAST_EXPECT(c.getTrackSize() == 0);
|
BEAST_EXPECT(c.size() == 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Put an object in but keep a strong pointer to it, advance the clock a
|
// Put an object in but keep a strong pointer to it, advance the clock a
|
||||||
@@ -121,24 +121,24 @@ public:
|
|||||||
// Put an object in
|
// Put an object in
|
||||||
BEAST_EXPECT(!c.insert(4, "four"));
|
BEAST_EXPECT(!c.insert(4, "four"));
|
||||||
BEAST_EXPECT(c.getCacheSize() == 1);
|
BEAST_EXPECT(c.getCacheSize() == 1);
|
||||||
BEAST_EXPECT(c.getTrackSize() == 1);
|
BEAST_EXPECT(c.size() == 1);
|
||||||
|
|
||||||
{
|
{
|
||||||
// Keep a strong pointer to it
|
// Keep a strong pointer to it
|
||||||
auto const p1 = c.fetch(4);
|
auto const p1 = c.fetch(4);
|
||||||
BEAST_EXPECT(p1 != nullptr);
|
BEAST_EXPECT(p1 != nullptr);
|
||||||
BEAST_EXPECT(c.getCacheSize() == 1);
|
BEAST_EXPECT(c.getCacheSize() == 1);
|
||||||
BEAST_EXPECT(c.getTrackSize() == 1);
|
BEAST_EXPECT(c.size() == 1);
|
||||||
// Advance the clock a lot
|
// Advance the clock a lot
|
||||||
++clock;
|
++clock;
|
||||||
c.sweep();
|
c.sweep();
|
||||||
BEAST_EXPECT(c.getCacheSize() == 0);
|
BEAST_EXPECT(c.getCacheSize() == 0);
|
||||||
BEAST_EXPECT(c.getTrackSize() == 1);
|
BEAST_EXPECT(c.size() == 1);
|
||||||
// Canonicalize a new object with the same key
|
// Canonicalize a new object with the same key
|
||||||
auto p2 = std::make_shared<std::string>("four");
|
auto p2 = std::make_shared<std::string>("four");
|
||||||
BEAST_EXPECT(c.canonicalize_replace_client(4, p2));
|
BEAST_EXPECT(c.canonicalize_replace_client(4, p2));
|
||||||
BEAST_EXPECT(c.getCacheSize() == 1);
|
BEAST_EXPECT(c.getCacheSize() == 1);
|
||||||
BEAST_EXPECT(c.getTrackSize() == 1);
|
BEAST_EXPECT(c.size() == 1);
|
||||||
// Make sure we get the original object
|
// Make sure we get the original object
|
||||||
BEAST_EXPECT(p1.get() == p2.get());
|
BEAST_EXPECT(p1.get() == p2.get());
|
||||||
}
|
}
|
||||||
@@ -146,7 +146,7 @@ public:
|
|||||||
++clock;
|
++clock;
|
||||||
c.sweep();
|
c.sweep();
|
||||||
BEAST_EXPECT(c.getCacheSize() == 0);
|
BEAST_EXPECT(c.getCacheSize() == 0);
|
||||||
BEAST_EXPECT(c.getTrackSize() == 0);
|
BEAST_EXPECT(c.size() == 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
201
src/test/beast/xxhasher_test.cpp
Normal file
201
src/test/beast/xxhasher_test.cpp
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
//------------------------------------------------------------------------------
|
||||||
|
/*
|
||||||
|
This file is part of rippled: https://github.com/ripple/rippled
|
||||||
|
Copyright (c) 2025 Ripple Labs Inc.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
//==============================================================================
|
||||||
|
|
||||||
|
#include <xrpl/beast/hash/xxhasher.h>
|
||||||
|
#include <xrpl/beast/unit_test.h>
|
||||||
|
|
||||||
|
namespace beast {
|
||||||
|
|
||||||
|
class XXHasher_test : public unit_test::suite
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
void
|
||||||
|
testWithoutSeed()
|
||||||
|
{
|
||||||
|
testcase("Without Seed");
|
||||||
|
|
||||||
|
xxhasher hasher{};
|
||||||
|
|
||||||
|
std::string objectToHash{"Hello, xxHash!"};
|
||||||
|
hasher(objectToHash.data(), objectToHash.size());
|
||||||
|
|
||||||
|
BEAST_EXPECT(
|
||||||
|
static_cast<xxhasher::result_type>(hasher) ==
|
||||||
|
16042857369214894119ULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
testWithSeed()
|
||||||
|
{
|
||||||
|
testcase("With Seed");
|
||||||
|
|
||||||
|
xxhasher hasher{static_cast<std::uint32_t>(102)};
|
||||||
|
|
||||||
|
std::string objectToHash{"Hello, xxHash!"};
|
||||||
|
hasher(objectToHash.data(), objectToHash.size());
|
||||||
|
|
||||||
|
BEAST_EXPECT(
|
||||||
|
static_cast<xxhasher::result_type>(hasher) ==
|
||||||
|
14440132435660934800ULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
testWithTwoSeeds()
|
||||||
|
{
|
||||||
|
testcase("With Two Seeds");
|
||||||
|
xxhasher hasher{
|
||||||
|
static_cast<std::uint32_t>(102), static_cast<std::uint32_t>(103)};
|
||||||
|
|
||||||
|
std::string objectToHash{"Hello, xxHash!"};
|
||||||
|
hasher(objectToHash.data(), objectToHash.size());
|
||||||
|
|
||||||
|
BEAST_EXPECT(
|
||||||
|
static_cast<xxhasher::result_type>(hasher) ==
|
||||||
|
14440132435660934800ULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
testBigObjectWithMultiupleSmallUpdatesWithoutSeed()
|
||||||
|
{
|
||||||
|
testcase("Big Object With Multiuple Small Updates Without Seed");
|
||||||
|
xxhasher hasher{};
|
||||||
|
|
||||||
|
std::string objectToHash{"Hello, xxHash!"};
|
||||||
|
for (int i = 0; i < 100; i++)
|
||||||
|
{
|
||||||
|
hasher(objectToHash.data(), objectToHash.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
BEAST_EXPECT(
|
||||||
|
static_cast<xxhasher::result_type>(hasher) ==
|
||||||
|
15296278154063476002ULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
testBigObjectWithMultiupleSmallUpdatesWithSeed()
|
||||||
|
{
|
||||||
|
testcase("Big Object With Multiuple Small Updates With Seed");
|
||||||
|
xxhasher hasher{static_cast<std::uint32_t>(103)};
|
||||||
|
|
||||||
|
std::string objectToHash{"Hello, xxHash!"};
|
||||||
|
for (int i = 0; i < 100; i++)
|
||||||
|
{
|
||||||
|
hasher(objectToHash.data(), objectToHash.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
BEAST_EXPECT(
|
||||||
|
static_cast<xxhasher::result_type>(hasher) ==
|
||||||
|
17285302196561698791ULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
testBigObjectWithSmallAndBigUpdatesWithoutSeed()
|
||||||
|
{
|
||||||
|
testcase("Big Object With Small And Big Updates Without Seed");
|
||||||
|
xxhasher hasher{};
|
||||||
|
|
||||||
|
std::string objectToHash{"Hello, xxHash!"};
|
||||||
|
std::string bigObject;
|
||||||
|
for (int i = 0; i < 20; i++)
|
||||||
|
{
|
||||||
|
bigObject += "Hello, xxHash!";
|
||||||
|
}
|
||||||
|
hasher(objectToHash.data(), objectToHash.size());
|
||||||
|
hasher(bigObject.data(), bigObject.size());
|
||||||
|
hasher(objectToHash.data(), objectToHash.size());
|
||||||
|
|
||||||
|
BEAST_EXPECT(
|
||||||
|
static_cast<xxhasher::result_type>(hasher) ==
|
||||||
|
1865045178324729219ULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
testBigObjectWithSmallAndBigUpdatesWithSeed()
|
||||||
|
{
|
||||||
|
testcase("Big Object With Small And Big Updates With Seed");
|
||||||
|
xxhasher hasher{static_cast<std::uint32_t>(103)};
|
||||||
|
|
||||||
|
std::string objectToHash{"Hello, xxHash!"};
|
||||||
|
std::string bigObject;
|
||||||
|
for (int i = 0; i < 20; i++)
|
||||||
|
{
|
||||||
|
bigObject += "Hello, xxHash!";
|
||||||
|
}
|
||||||
|
hasher(objectToHash.data(), objectToHash.size());
|
||||||
|
hasher(bigObject.data(), bigObject.size());
|
||||||
|
hasher(objectToHash.data(), objectToHash.size());
|
||||||
|
|
||||||
|
BEAST_EXPECT(
|
||||||
|
static_cast<xxhasher::result_type>(hasher) ==
|
||||||
|
16189862915636005281ULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
testBigObjectWithOneUpdateWithoutSeed()
|
||||||
|
{
|
||||||
|
testcase("Big Object With One Update Without Seed");
|
||||||
|
xxhasher hasher{};
|
||||||
|
|
||||||
|
std::string objectToHash;
|
||||||
|
for (int i = 0; i < 100; i++)
|
||||||
|
{
|
||||||
|
objectToHash += "Hello, xxHash!";
|
||||||
|
}
|
||||||
|
hasher(objectToHash.data(), objectToHash.size());
|
||||||
|
|
||||||
|
BEAST_EXPECT(
|
||||||
|
static_cast<xxhasher::result_type>(hasher) ==
|
||||||
|
15296278154063476002ULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
testBigObjectWithOneUpdateWithSeed()
|
||||||
|
{
|
||||||
|
testcase("Big Object With One Update With Seed");
|
||||||
|
xxhasher hasher{static_cast<std::uint32_t>(103)};
|
||||||
|
|
||||||
|
std::string objectToHash;
|
||||||
|
for (int i = 0; i < 100; i++)
|
||||||
|
{
|
||||||
|
objectToHash += "Hello, xxHash!";
|
||||||
|
}
|
||||||
|
hasher(objectToHash.data(), objectToHash.size());
|
||||||
|
|
||||||
|
BEAST_EXPECT(
|
||||||
|
static_cast<xxhasher::result_type>(hasher) ==
|
||||||
|
17285302196561698791ULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
run() override
|
||||||
|
{
|
||||||
|
testWithoutSeed();
|
||||||
|
testWithSeed();
|
||||||
|
testWithTwoSeeds();
|
||||||
|
testBigObjectWithMultiupleSmallUpdatesWithoutSeed();
|
||||||
|
testBigObjectWithMultiupleSmallUpdatesWithSeed();
|
||||||
|
testBigObjectWithSmallAndBigUpdatesWithoutSeed();
|
||||||
|
testBigObjectWithSmallAndBigUpdatesWithSeed();
|
||||||
|
testBigObjectWithOneUpdateWithoutSeed();
|
||||||
|
testBigObjectWithOneUpdateWithSeed();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
BEAST_DEFINE_TESTSUITE(XXHasher, beast_core, beast);
|
||||||
|
} // namespace beast
|
||||||
@@ -63,8 +63,6 @@ LedgerHistory::insert(
|
|||||||
ledger->stateMap().getHash().isNonZero(),
|
ledger->stateMap().getHash().isNonZero(),
|
||||||
"ripple::LedgerHistory::insert : nonzero hash");
|
"ripple::LedgerHistory::insert : nonzero hash");
|
||||||
|
|
||||||
std::unique_lock sl(m_ledgers_by_hash.peekMutex());
|
|
||||||
|
|
||||||
bool const alreadyHad = m_ledgers_by_hash.canonicalize_replace_cache(
|
bool const alreadyHad = m_ledgers_by_hash.canonicalize_replace_cache(
|
||||||
ledger->info().hash, ledger);
|
ledger->info().hash, ledger);
|
||||||
if (validated)
|
if (validated)
|
||||||
@@ -76,7 +74,6 @@ LedgerHistory::insert(
|
|||||||
LedgerHash
|
LedgerHash
|
||||||
LedgerHistory::getLedgerHash(LedgerIndex index)
|
LedgerHistory::getLedgerHash(LedgerIndex index)
|
||||||
{
|
{
|
||||||
std::unique_lock sl(m_ledgers_by_hash.peekMutex());
|
|
||||||
if (auto it = mLedgersByIndex.find(index); it != mLedgersByIndex.end())
|
if (auto it = mLedgersByIndex.find(index); it != mLedgersByIndex.end())
|
||||||
return it->second;
|
return it->second;
|
||||||
return {};
|
return {};
|
||||||
@@ -86,13 +83,11 @@ std::shared_ptr<Ledger const>
|
|||||||
LedgerHistory::getLedgerBySeq(LedgerIndex index)
|
LedgerHistory::getLedgerBySeq(LedgerIndex index)
|
||||||
{
|
{
|
||||||
{
|
{
|
||||||
std::unique_lock sl(m_ledgers_by_hash.peekMutex());
|
|
||||||
auto it = mLedgersByIndex.find(index);
|
auto it = mLedgersByIndex.find(index);
|
||||||
|
|
||||||
if (it != mLedgersByIndex.end())
|
if (it != mLedgersByIndex.end())
|
||||||
{
|
{
|
||||||
uint256 hash = it->second;
|
uint256 hash = it->second;
|
||||||
sl.unlock();
|
|
||||||
return getLedgerByHash(hash);
|
return getLedgerByHash(hash);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -108,7 +103,6 @@ LedgerHistory::getLedgerBySeq(LedgerIndex index)
|
|||||||
|
|
||||||
{
|
{
|
||||||
// Add this ledger to the local tracking by index
|
// Add this ledger to the local tracking by index
|
||||||
std::unique_lock sl(m_ledgers_by_hash.peekMutex());
|
|
||||||
|
|
||||||
XRPL_ASSERT(
|
XRPL_ASSERT(
|
||||||
ret->isImmutable(),
|
ret->isImmutable(),
|
||||||
@@ -458,8 +452,6 @@ LedgerHistory::builtLedger(
|
|||||||
XRPL_ASSERT(
|
XRPL_ASSERT(
|
||||||
!hash.isZero(), "ripple::LedgerHistory::builtLedger : nonzero hash");
|
!hash.isZero(), "ripple::LedgerHistory::builtLedger : nonzero hash");
|
||||||
|
|
||||||
std::unique_lock sl(m_consensus_validated.peekMutex());
|
|
||||||
|
|
||||||
auto entry = std::make_shared<cv_entry>();
|
auto entry = std::make_shared<cv_entry>();
|
||||||
m_consensus_validated.canonicalize_replace_client(index, entry);
|
m_consensus_validated.canonicalize_replace_client(index, entry);
|
||||||
|
|
||||||
@@ -500,8 +492,6 @@ LedgerHistory::validatedLedger(
|
|||||||
!hash.isZero(),
|
!hash.isZero(),
|
||||||
"ripple::LedgerHistory::validatedLedger : nonzero hash");
|
"ripple::LedgerHistory::validatedLedger : nonzero hash");
|
||||||
|
|
||||||
std::unique_lock sl(m_consensus_validated.peekMutex());
|
|
||||||
|
|
||||||
auto entry = std::make_shared<cv_entry>();
|
auto entry = std::make_shared<cv_entry>();
|
||||||
m_consensus_validated.canonicalize_replace_client(index, entry);
|
m_consensus_validated.canonicalize_replace_client(index, entry);
|
||||||
|
|
||||||
@@ -535,10 +525,9 @@ LedgerHistory::validatedLedger(
|
|||||||
bool
|
bool
|
||||||
LedgerHistory::fixIndex(LedgerIndex ledgerIndex, LedgerHash const& ledgerHash)
|
LedgerHistory::fixIndex(LedgerIndex ledgerIndex, LedgerHash const& ledgerHash)
|
||||||
{
|
{
|
||||||
std::unique_lock sl(m_ledgers_by_hash.peekMutex());
|
auto ledger = m_ledgers_by_hash.fetch(ledgerHash);
|
||||||
auto it = mLedgersByIndex.find(ledgerIndex);
|
auto it = mLedgersByIndex.find(ledgerIndex);
|
||||||
|
if (ledger && (it != mLedgersByIndex.end()) && (it->second != ledgerHash))
|
||||||
if ((it != mLedgersByIndex.end()) && (it->second != ledgerHash))
|
|
||||||
{
|
{
|
||||||
it->second = ledgerHash;
|
it->second = ledgerHash;
|
||||||
return false;
|
return false;
|
||||||
|
|||||||
@@ -175,126 +175,18 @@ MPTokenAuthorize::createMPToken(
|
|||||||
return tesSUCCESS;
|
return tesSUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
TER
|
|
||||||
MPTokenAuthorize::authorize(
|
|
||||||
ApplyView& view,
|
|
||||||
beast::Journal journal,
|
|
||||||
MPTAuthorizeArgs const& args)
|
|
||||||
{
|
|
||||||
auto const sleAcct = view.peek(keylet::account(args.account));
|
|
||||||
if (!sleAcct)
|
|
||||||
return tecINTERNAL;
|
|
||||||
|
|
||||||
// If the account that submitted the tx is a holder
|
|
||||||
// Note: `account_` is holder's account
|
|
||||||
// `holderID` is NOT used
|
|
||||||
if (!args.holderID)
|
|
||||||
{
|
|
||||||
// When a holder wants to unauthorize/delete a MPT, the ledger must
|
|
||||||
// - delete mptokenKey from owner directory
|
|
||||||
// - delete the MPToken
|
|
||||||
if (args.flags & tfMPTUnauthorize)
|
|
||||||
{
|
|
||||||
auto const mptokenKey =
|
|
||||||
keylet::mptoken(args.mptIssuanceID, args.account);
|
|
||||||
auto const sleMpt = view.peek(mptokenKey);
|
|
||||||
if (!sleMpt || (*sleMpt)[sfMPTAmount] != 0)
|
|
||||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
|
||||||
|
|
||||||
if (!view.dirRemove(
|
|
||||||
keylet::ownerDir(args.account),
|
|
||||||
(*sleMpt)[sfOwnerNode],
|
|
||||||
sleMpt->key(),
|
|
||||||
false))
|
|
||||||
return tecINTERNAL; // LCOV_EXCL_LINE
|
|
||||||
|
|
||||||
adjustOwnerCount(view, sleAcct, -1, journal);
|
|
||||||
|
|
||||||
view.erase(sleMpt);
|
|
||||||
return tesSUCCESS;
|
|
||||||
}
|
|
||||||
|
|
||||||
// A potential holder wants to authorize/hold a mpt, the ledger must:
|
|
||||||
// - add the new mptokenKey to the owner directory
|
|
||||||
// - create the MPToken object for the holder
|
|
||||||
|
|
||||||
// The reserve that is required to create the MPToken. Note
|
|
||||||
// that although the reserve increases with every item
|
|
||||||
// an account owns, in the case of MPTokens we only
|
|
||||||
// *enforce* a reserve if the user owns more than two
|
|
||||||
// items. This is similar to the reserve requirements of trust lines.
|
|
||||||
std::uint32_t const uOwnerCount = sleAcct->getFieldU32(sfOwnerCount);
|
|
||||||
XRPAmount const reserveCreate(
|
|
||||||
(uOwnerCount < 2) ? XRPAmount(beast::zero)
|
|
||||||
: view.fees().accountReserve(uOwnerCount + 1));
|
|
||||||
|
|
||||||
if (args.priorBalance < reserveCreate)
|
|
||||||
return tecINSUFFICIENT_RESERVE;
|
|
||||||
|
|
||||||
auto const mptokenKey =
|
|
||||||
keylet::mptoken(args.mptIssuanceID, args.account);
|
|
||||||
auto mptoken = std::make_shared<SLE>(mptokenKey);
|
|
||||||
if (auto ter = dirLink(view, args.account, mptoken))
|
|
||||||
return ter; // LCOV_EXCL_LINE
|
|
||||||
|
|
||||||
(*mptoken)[sfAccount] = args.account;
|
|
||||||
(*mptoken)[sfMPTokenIssuanceID] = args.mptIssuanceID;
|
|
||||||
(*mptoken)[sfFlags] = 0;
|
|
||||||
view.insert(mptoken);
|
|
||||||
|
|
||||||
// Update owner count.
|
|
||||||
adjustOwnerCount(view, sleAcct, 1, journal);
|
|
||||||
|
|
||||||
return tesSUCCESS;
|
|
||||||
}
|
|
||||||
|
|
||||||
auto const sleMptIssuance =
|
|
||||||
view.read(keylet::mptIssuance(args.mptIssuanceID));
|
|
||||||
if (!sleMptIssuance)
|
|
||||||
return tecINTERNAL;
|
|
||||||
|
|
||||||
// If the account that submitted this tx is the issuer of the MPT
|
|
||||||
// Note: `account_` is issuer's account
|
|
||||||
// `holderID` is holder's account
|
|
||||||
if (args.account != (*sleMptIssuance)[sfIssuer])
|
|
||||||
return tecINTERNAL;
|
|
||||||
|
|
||||||
auto const sleMpt =
|
|
||||||
view.peek(keylet::mptoken(args.mptIssuanceID, *args.holderID));
|
|
||||||
if (!sleMpt)
|
|
||||||
return tecINTERNAL;
|
|
||||||
|
|
||||||
std::uint32_t const flagsIn = sleMpt->getFieldU32(sfFlags);
|
|
||||||
std::uint32_t flagsOut = flagsIn;
|
|
||||||
|
|
||||||
// Issuer wants to unauthorize the holder, unset lsfMPTAuthorized on
|
|
||||||
// their MPToken
|
|
||||||
if (args.flags & tfMPTUnauthorize)
|
|
||||||
flagsOut &= ~lsfMPTAuthorized;
|
|
||||||
// Issuer wants to authorize a holder, set lsfMPTAuthorized on their
|
|
||||||
// MPToken
|
|
||||||
else
|
|
||||||
flagsOut |= lsfMPTAuthorized;
|
|
||||||
|
|
||||||
if (flagsIn != flagsOut)
|
|
||||||
sleMpt->setFieldU32(sfFlags, flagsOut);
|
|
||||||
|
|
||||||
view.update(sleMpt);
|
|
||||||
return tesSUCCESS;
|
|
||||||
}
|
|
||||||
|
|
||||||
TER
|
TER
|
||||||
MPTokenAuthorize::doApply()
|
MPTokenAuthorize::doApply()
|
||||||
{
|
{
|
||||||
auto const& tx = ctx_.tx;
|
auto const& tx = ctx_.tx;
|
||||||
return authorize(
|
return authorizeMPToken(
|
||||||
ctx_.view(),
|
ctx_.view(),
|
||||||
|
mPriorBalance,
|
||||||
|
tx[sfMPTokenIssuanceID],
|
||||||
|
account_,
|
||||||
ctx_.journal,
|
ctx_.journal,
|
||||||
{.priorBalance = mPriorBalance,
|
tx.getFlags(),
|
||||||
.mptIssuanceID = tx[sfMPTokenIssuanceID],
|
tx[~sfHolder]);
|
||||||
.account = account_,
|
|
||||||
.flags = tx.getFlags(),
|
|
||||||
.holderID = tx[~sfHolder]});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace ripple
|
} // namespace ripple
|
||||||
|
|||||||
@@ -48,12 +48,6 @@ public:
|
|||||||
static TER
|
static TER
|
||||||
preclaim(PreclaimContext const& ctx);
|
preclaim(PreclaimContext const& ctx);
|
||||||
|
|
||||||
static TER
|
|
||||||
authorize(
|
|
||||||
ApplyView& view,
|
|
||||||
beast::Journal journal,
|
|
||||||
MPTAuthorizeArgs const& args);
|
|
||||||
|
|
||||||
static TER
|
static TER
|
||||||
createMPToken(
|
createMPToken(
|
||||||
ApplyView& view,
|
ApplyView& view,
|
||||||
|
|||||||
@@ -209,6 +209,17 @@ SetOracle::doApply()
|
|||||||
{
|
{
|
||||||
auto const oracleID = keylet::oracle(account_, ctx_.tx[sfOracleDocumentID]);
|
auto const oracleID = keylet::oracle(account_, ctx_.tx[sfOracleDocumentID]);
|
||||||
|
|
||||||
|
auto populatePriceData = [](STObject& priceData, STObject const& entry) {
|
||||||
|
setPriceDataInnerObjTemplate(priceData);
|
||||||
|
priceData.setFieldCurrency(
|
||||||
|
sfBaseAsset, entry.getFieldCurrency(sfBaseAsset));
|
||||||
|
priceData.setFieldCurrency(
|
||||||
|
sfQuoteAsset, entry.getFieldCurrency(sfQuoteAsset));
|
||||||
|
priceData.setFieldU64(sfAssetPrice, entry.getFieldU64(sfAssetPrice));
|
||||||
|
if (entry.isFieldPresent(sfScale))
|
||||||
|
priceData.setFieldU8(sfScale, entry.getFieldU8(sfScale));
|
||||||
|
};
|
||||||
|
|
||||||
if (auto sle = ctx_.view().peek(oracleID))
|
if (auto sle = ctx_.view().peek(oracleID))
|
||||||
{
|
{
|
||||||
// update
|
// update
|
||||||
@@ -249,15 +260,7 @@ SetOracle::doApply()
|
|||||||
{
|
{
|
||||||
// add a token pair with the price
|
// add a token pair with the price
|
||||||
STObject priceData{sfPriceData};
|
STObject priceData{sfPriceData};
|
||||||
setPriceDataInnerObjTemplate(priceData);
|
populatePriceData(priceData, entry);
|
||||||
priceData.setFieldCurrency(
|
|
||||||
sfBaseAsset, entry.getFieldCurrency(sfBaseAsset));
|
|
||||||
priceData.setFieldCurrency(
|
|
||||||
sfQuoteAsset, entry.getFieldCurrency(sfQuoteAsset));
|
|
||||||
priceData.setFieldU64(
|
|
||||||
sfAssetPrice, entry.getFieldU64(sfAssetPrice));
|
|
||||||
if (entry.isFieldPresent(sfScale))
|
|
||||||
priceData.setFieldU8(sfScale, entry.getFieldU8(sfScale));
|
|
||||||
pairs.emplace(key, std::move(priceData));
|
pairs.emplace(key, std::move(priceData));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -285,7 +288,26 @@ SetOracle::doApply()
|
|||||||
sle->setFieldVL(sfProvider, ctx_.tx[sfProvider]);
|
sle->setFieldVL(sfProvider, ctx_.tx[sfProvider]);
|
||||||
if (ctx_.tx.isFieldPresent(sfURI))
|
if (ctx_.tx.isFieldPresent(sfURI))
|
||||||
sle->setFieldVL(sfURI, ctx_.tx[sfURI]);
|
sle->setFieldVL(sfURI, ctx_.tx[sfURI]);
|
||||||
auto const& series = ctx_.tx.getFieldArray(sfPriceDataSeries);
|
|
||||||
|
STArray series;
|
||||||
|
if (!ctx_.view().rules().enabled(fixPriceOracleOrder))
|
||||||
|
{
|
||||||
|
series = ctx_.tx.getFieldArray(sfPriceDataSeries);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
std::map<std::pair<Currency, Currency>, STObject> pairs;
|
||||||
|
for (auto const& entry : ctx_.tx.getFieldArray(sfPriceDataSeries))
|
||||||
|
{
|
||||||
|
auto const key = tokenPairKey(entry);
|
||||||
|
STObject priceData{sfPriceData};
|
||||||
|
populatePriceData(priceData, entry);
|
||||||
|
pairs.emplace(key, std::move(priceData));
|
||||||
|
}
|
||||||
|
for (auto const& iter : pairs)
|
||||||
|
series.push_back(std::move(iter.second));
|
||||||
|
}
|
||||||
|
|
||||||
sle->setFieldArray(sfPriceDataSeries, series);
|
sle->setFieldArray(sfPriceDataSeries, series);
|
||||||
sle->setFieldVL(sfAssetClass, ctx_.tx[sfAssetClass]);
|
sle->setFieldVL(sfAssetClass, ctx_.tx[sfAssetClass]);
|
||||||
sle->setFieldU32(sfLastUpdateTime, ctx_.tx[sfLastUpdateTime]);
|
sle->setFieldU32(sfLastUpdateTime, ctx_.tx[sfLastUpdateTime]);
|
||||||
|
|||||||
@@ -210,12 +210,12 @@ VaultDeposit::doApply()
|
|||||||
auto sleMpt = view().read(keylet::mptoken(mptIssuanceID, account_));
|
auto sleMpt = view().read(keylet::mptoken(mptIssuanceID, account_));
|
||||||
if (!sleMpt)
|
if (!sleMpt)
|
||||||
{
|
{
|
||||||
if (auto const err = MPTokenAuthorize::authorize(
|
if (auto const err = authorizeMPToken(
|
||||||
view(),
|
view(),
|
||||||
ctx_.journal,
|
mPriorBalance,
|
||||||
{.priorBalance = mPriorBalance,
|
mptIssuanceID->value(),
|
||||||
.mptIssuanceID = mptIssuanceID->value(),
|
account_,
|
||||||
.account = account_});
|
ctx_.journal);
|
||||||
!isTesSuccess(err))
|
!isTesSuccess(err))
|
||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
@@ -223,15 +223,15 @@ VaultDeposit::doApply()
|
|||||||
// If the vault is private, set the authorized flag for the vault owner
|
// If the vault is private, set the authorized flag for the vault owner
|
||||||
if (vault->isFlag(tfVaultPrivate))
|
if (vault->isFlag(tfVaultPrivate))
|
||||||
{
|
{
|
||||||
if (auto const err = MPTokenAuthorize::authorize(
|
if (auto const err = authorizeMPToken(
|
||||||
view(),
|
view(),
|
||||||
|
mPriorBalance, // priorBalance
|
||||||
|
mptIssuanceID->value(), // mptIssuanceID
|
||||||
|
sleIssuance->at(sfIssuer), // account
|
||||||
ctx_.journal,
|
ctx_.journal,
|
||||||
{
|
{}, // flags
|
||||||
.priorBalance = mPriorBalance,
|
account_ // holderID
|
||||||
.mptIssuanceID = mptIssuanceID->value(),
|
);
|
||||||
.account = sleIssuance->at(sfIssuer),
|
|
||||||
.holderID = account_,
|
|
||||||
});
|
|
||||||
!isTesSuccess(err))
|
!isTesSuccess(err))
|
||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -600,6 +600,16 @@ addEmptyHolding(
|
|||||||
asset.value());
|
asset.value());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[[nodiscard]] TER
|
||||||
|
authorizeMPToken(
|
||||||
|
ApplyView& view,
|
||||||
|
XRPAmount const& priorBalance,
|
||||||
|
MPTID const& mptIssuanceID,
|
||||||
|
AccountID const& account,
|
||||||
|
beast::Journal journal,
|
||||||
|
std::uint32_t flags = 0,
|
||||||
|
std::optional<AccountID> holderID = std::nullopt);
|
||||||
|
|
||||||
// VFALCO NOTE Both STAmount parameters should just
|
// VFALCO NOTE Both STAmount parameters should just
|
||||||
// be "Amount", a unit-less number.
|
// be "Amount", a unit-less number.
|
||||||
//
|
//
|
||||||
|
|||||||
@@ -18,7 +18,6 @@
|
|||||||
//==============================================================================
|
//==============================================================================
|
||||||
|
|
||||||
#include <xrpld/app/misc/CredentialHelpers.h>
|
#include <xrpld/app/misc/CredentialHelpers.h>
|
||||||
#include <xrpld/app/tx/detail/MPTokenAuthorize.h>
|
|
||||||
#include <xrpld/ledger/ReadView.h>
|
#include <xrpld/ledger/ReadView.h>
|
||||||
#include <xrpld/ledger/View.h>
|
#include <xrpld/ledger/View.h>
|
||||||
|
|
||||||
@@ -1215,12 +1214,115 @@ addEmptyHolding(
|
|||||||
if (view.peek(keylet::mptoken(mptID, accountID)))
|
if (view.peek(keylet::mptoken(mptID, accountID)))
|
||||||
return tecDUPLICATE;
|
return tecDUPLICATE;
|
||||||
|
|
||||||
return MPTokenAuthorize::authorize(
|
return authorizeMPToken(view, priorBalance, mptID, accountID, journal);
|
||||||
view,
|
}
|
||||||
journal,
|
|
||||||
{.priorBalance = priorBalance,
|
[[nodiscard]] TER
|
||||||
.mptIssuanceID = mptID,
|
authorizeMPToken(
|
||||||
.account = accountID});
|
ApplyView& view,
|
||||||
|
XRPAmount const& priorBalance,
|
||||||
|
MPTID const& mptIssuanceID,
|
||||||
|
AccountID const& account,
|
||||||
|
beast::Journal journal,
|
||||||
|
std::uint32_t flags,
|
||||||
|
std::optional<AccountID> holderID)
|
||||||
|
{
|
||||||
|
auto const sleAcct = view.peek(keylet::account(account));
|
||||||
|
if (!sleAcct)
|
||||||
|
return tecINTERNAL;
|
||||||
|
|
||||||
|
// If the account that submitted the tx is a holder
|
||||||
|
// Note: `account_` is holder's account
|
||||||
|
// `holderID` is NOT used
|
||||||
|
if (!holderID)
|
||||||
|
{
|
||||||
|
// When a holder wants to unauthorize/delete a MPT, the ledger must
|
||||||
|
// - delete mptokenKey from owner directory
|
||||||
|
// - delete the MPToken
|
||||||
|
if (flags & tfMPTUnauthorize)
|
||||||
|
{
|
||||||
|
auto const mptokenKey = keylet::mptoken(mptIssuanceID, account);
|
||||||
|
auto const sleMpt = view.peek(mptokenKey);
|
||||||
|
if (!sleMpt || (*sleMpt)[sfMPTAmount] != 0)
|
||||||
|
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||||
|
|
||||||
|
if (!view.dirRemove(
|
||||||
|
keylet::ownerDir(account),
|
||||||
|
(*sleMpt)[sfOwnerNode],
|
||||||
|
sleMpt->key(),
|
||||||
|
false))
|
||||||
|
return tecINTERNAL; // LCOV_EXCL_LINE
|
||||||
|
|
||||||
|
adjustOwnerCount(view, sleAcct, -1, journal);
|
||||||
|
|
||||||
|
view.erase(sleMpt);
|
||||||
|
return tesSUCCESS;
|
||||||
|
}
|
||||||
|
|
||||||
|
// A potential holder wants to authorize/hold a mpt, the ledger must:
|
||||||
|
// - add the new mptokenKey to the owner directory
|
||||||
|
// - create the MPToken object for the holder
|
||||||
|
|
||||||
|
// The reserve that is required to create the MPToken. Note
|
||||||
|
// that although the reserve increases with every item
|
||||||
|
// an account owns, in the case of MPTokens we only
|
||||||
|
// *enforce* a reserve if the user owns more than two
|
||||||
|
// items. This is similar to the reserve requirements of trust lines.
|
||||||
|
std::uint32_t const uOwnerCount = sleAcct->getFieldU32(sfOwnerCount);
|
||||||
|
XRPAmount const reserveCreate(
|
||||||
|
(uOwnerCount < 2) ? XRPAmount(beast::zero)
|
||||||
|
: view.fees().accountReserve(uOwnerCount + 1));
|
||||||
|
|
||||||
|
if (priorBalance < reserveCreate)
|
||||||
|
return tecINSUFFICIENT_RESERVE;
|
||||||
|
|
||||||
|
auto const mptokenKey = keylet::mptoken(mptIssuanceID, account);
|
||||||
|
auto mptoken = std::make_shared<SLE>(mptokenKey);
|
||||||
|
if (auto ter = dirLink(view, account, mptoken))
|
||||||
|
return ter; // LCOV_EXCL_LINE
|
||||||
|
|
||||||
|
(*mptoken)[sfAccount] = account;
|
||||||
|
(*mptoken)[sfMPTokenIssuanceID] = mptIssuanceID;
|
||||||
|
(*mptoken)[sfFlags] = 0;
|
||||||
|
view.insert(mptoken);
|
||||||
|
|
||||||
|
// Update owner count.
|
||||||
|
adjustOwnerCount(view, sleAcct, 1, journal);
|
||||||
|
|
||||||
|
return tesSUCCESS;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto const sleMptIssuance = view.read(keylet::mptIssuance(mptIssuanceID));
|
||||||
|
if (!sleMptIssuance)
|
||||||
|
return tecINTERNAL;
|
||||||
|
|
||||||
|
// If the account that submitted this tx is the issuer of the MPT
|
||||||
|
// Note: `account_` is issuer's account
|
||||||
|
// `holderID` is holder's account
|
||||||
|
if (account != (*sleMptIssuance)[sfIssuer])
|
||||||
|
return tecINTERNAL;
|
||||||
|
|
||||||
|
auto const sleMpt = view.peek(keylet::mptoken(mptIssuanceID, *holderID));
|
||||||
|
if (!sleMpt)
|
||||||
|
return tecINTERNAL;
|
||||||
|
|
||||||
|
std::uint32_t const flagsIn = sleMpt->getFieldU32(sfFlags);
|
||||||
|
std::uint32_t flagsOut = flagsIn;
|
||||||
|
|
||||||
|
// Issuer wants to unauthorize the holder, unset lsfMPTAuthorized on
|
||||||
|
// their MPToken
|
||||||
|
if (flags & tfMPTUnauthorize)
|
||||||
|
flagsOut &= ~lsfMPTAuthorized;
|
||||||
|
// Issuer wants to authorize a holder, set lsfMPTAuthorized on their
|
||||||
|
// MPToken
|
||||||
|
else
|
||||||
|
flagsOut |= lsfMPTAuthorized;
|
||||||
|
|
||||||
|
if (flagsIn != flagsOut)
|
||||||
|
sleMpt->setFieldU32(sfFlags, flagsOut);
|
||||||
|
|
||||||
|
view.update(sleMpt);
|
||||||
|
return tesSUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
TER
|
TER
|
||||||
@@ -1418,13 +1520,14 @@ removeEmptyHolding(
|
|||||||
if (mptoken->at(sfMPTAmount) != 0)
|
if (mptoken->at(sfMPTAmount) != 0)
|
||||||
return tecHAS_OBLIGATIONS;
|
return tecHAS_OBLIGATIONS;
|
||||||
|
|
||||||
return MPTokenAuthorize::authorize(
|
return authorizeMPToken(
|
||||||
view,
|
view,
|
||||||
|
{}, // priorBalance
|
||||||
|
mptID,
|
||||||
|
accountID,
|
||||||
journal,
|
journal,
|
||||||
{.priorBalance = {},
|
tfMPTUnauthorize // flags
|
||||||
.mptIssuanceID = mptID,
|
);
|
||||||
.account = accountID,
|
|
||||||
.flags = tfMPTUnauthorize});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
TER
|
TER
|
||||||
@@ -2497,15 +2600,12 @@ enforceMPTokenAuthorization(
|
|||||||
XRPL_ASSERT(
|
XRPL_ASSERT(
|
||||||
maybeDomainID.has_value() && sleToken == nullptr,
|
maybeDomainID.has_value() && sleToken == nullptr,
|
||||||
"ripple::enforceMPTokenAuthorization : new MPToken for domain");
|
"ripple::enforceMPTokenAuthorization : new MPToken for domain");
|
||||||
if (auto const err = MPTokenAuthorize::authorize(
|
if (auto const err = authorizeMPToken(
|
||||||
view,
|
view,
|
||||||
j,
|
priorBalance, // priorBalance
|
||||||
{
|
mptIssuanceID, // mptIssuanceID
|
||||||
.priorBalance = priorBalance,
|
account, // account
|
||||||
.mptIssuanceID = mptIssuanceID,
|
j);
|
||||||
.account = account,
|
|
||||||
.flags = 0,
|
|
||||||
});
|
|
||||||
!isTesSuccess(err))
|
!isTesSuccess(err))
|
||||||
return err;
|
return err;
|
||||||
|
|
||||||
|
|||||||
@@ -114,7 +114,7 @@ getCountsJson(Application& app, int minObjectCount)
|
|||||||
ret[jss::treenode_cache_size] =
|
ret[jss::treenode_cache_size] =
|
||||||
app.getNodeFamily().getTreeNodeCache()->getCacheSize();
|
app.getNodeFamily().getTreeNodeCache()->getCacheSize();
|
||||||
ret[jss::treenode_track_size] =
|
ret[jss::treenode_track_size] =
|
||||||
app.getNodeFamily().getTreeNodeCache()->getTrackSize();
|
static_cast<int>(app.getNodeFamily().getTreeNodeCache()->size());
|
||||||
|
|
||||||
std::string uptime;
|
std::string uptime;
|
||||||
auto s = UptimeClock::now();
|
auto s = UptimeClock::now();
|
||||||
|
|||||||
Reference in New Issue
Block a user