Compare commits

..

7 Commits

Author SHA1 Message Date
Valentin Balaschenko
4697fed95b base branch anotation - this is temp
Signed-off-by: Valentin Balaschenko <13349202+vlntb@users.noreply.github.com>
2025-07-31 13:41:17 +01:00
Shawn Xie
baf4b8381f fix DeliveredAmount and delivered_amount in transaction metadata for direct MPT transfer (#5569)
The Payment transaction metadata is missing the `DeliveredAmount` field that displays the actual amount delivered to the destination excluding transfer fees. This amendment fixes this problem.
2025-07-29 17:02:33 +00:00
Ayaz Salikhov
9b45b6888b ci: Build all conan dependencies from source for now (#5623) 2025-07-29 15:29:38 +00:00
Bronek Kozicki
7179ce9c58 Build options cleanup (#5581)
As we no longer support old compiler versions, we are bringing back some warnings by removing no longer relevant `-Wno-...` options.
2025-07-25 15:48:22 -04:00
Bart
921aef9934 Updates Conan dependencies: Boost 1.86 (#5264) 2025-07-25 11:54:02 -04:00
Bronek Kozicki
e7a7bb83c1 VaultWithdraw destination account bugfix (#5572)
#5224 added (among other things) a `VaultWithdraw` transaction that allows setting the recipient of the withdrawn funds in the `Destination` transaction field. This technically turns this transaction into a payment, and in some respect the implementation does follow payment rules, e.g. enforcement of `lsfRequireDestTag` or `lsfDepositAuth`, or that MPT transfer has destination `MPToken`. However for IOUs, it missed verification that the destination account has a trust line to the asset issuer. Since the default behavior of `accountSendIOU` is to create this trust line (if missing), this is what `VaultWithdraw` currently does. This is incorrect, since the `Destination` might not be interested in holding the asset in question; this basically enables spammy transfers. This change, therefore, removes automatic creation of a trust line to the `Destination` account in `VaultWithdraw`.
2025-07-25 13:53:25 +00:00
Bart
5c2a3a2779 refactor: Update rocksdb (#5568)
This change updates RocksDB to its latest version. RocksDB is backward-compatible, so even though this is a major version bump, databases created with previous versions will continue to function.

The external RocksDB folder is removed, as the latest version available via Conan Center no longer needs custom patches.
2025-07-24 14:53:14 -04:00
49 changed files with 467 additions and 1481 deletions

View File

@@ -10,9 +10,7 @@ runs:
shell: bash
run: |
conan export --version 1.1.10 external/snappy
conan export --version 9.7.3 external/rocksdb
conan export --version 4.0.3 external/soci
conan export --version 6.30.1 external/protobuf
- name: add Ripple Conan remote
if: env.CONAN_URL != ''
shell: bash
@@ -23,16 +21,14 @@ runs:
fi
conan remote add --index 0 ripple "${CONAN_URL}"
echo "Added conan remote ripple at ${CONAN_URL}"
- name: try to authenticate to Ripple Conan remote
if: env.CONAN_URL != '' && env.CONAN_LOGIN_USERNAME_RIPPLE != '' && env.CONAN_PASSWORD_RIPPLE != ''
if: env.CONAN_LOGIN_USERNAME_RIPPLE != '' && env.CONAN_PASSWORD_RIPPLE != ''
id: remote
shell: bash
run: |
echo "Authenticating to ripple remote..."
conan remote auth ripple --force
conan remote list-users
- name: list missing binaries
id: binaries
shell: bash
@@ -47,7 +43,7 @@ runs:
cd ${build_dir}
conan install \
--output-folder . \
--build missing \
--build '*' \
--options:host "&:tests=True" \
--options:host "&:xrpld=True" \
--settings:all build_type=${{ inputs.configuration }} \

View File

@@ -18,7 +18,7 @@ concurrency:
# This part of Conan configuration is specific to this workflow only; we do not want
# to pollute conan/profiles directory with settings which might not work for others
env:
#CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
CONAN_GLOBAL_CONF: |
@@ -94,9 +94,7 @@ jobs:
shell: bash
run: |
conan export --version 1.1.10 external/snappy
conan export --version 9.7.3 external/rocksdb
conan export --version 4.0.3 external/soci
conan export --version 6.30.1 external/protobuf
- name: add Ripple Conan remote
if: env.CONAN_URL != ''
shell: bash

View File

@@ -19,7 +19,7 @@ concurrency:
# This part of Conan configuration is specific to this workflow only; we do not want
# to pollute conan/profiles directory with settings which might not work for others
env:
#CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
CONAN_GLOBAL_CONF: |
@@ -99,7 +99,6 @@ jobs:
run: tar -czf conan.tar.gz -C ${CONAN_HOME} .
- name: build dependencies
uses: ./.github/actions/dependencies
with:
configuration: ${{ matrix.configuration }}
- name: upload archive
@@ -368,10 +367,12 @@ jobs:
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
name: linux-clang-Debug
- name: extract cache
run: |
mkdir -p ${CONAN_HOME}
tar -xzf conan.tar.gz -C ${CONAN_HOME}
- name: check environment
run: |
echo ${PATH} | tr ':' '\n'
@@ -379,17 +380,21 @@ jobs:
cmake --version
env | sort
ls ${CONAN_HOME}
- name: checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: dependencies
uses: ./.github/actions/dependencies
with:
configuration: Debug
- name: prepare environment
run: |
mkdir -p ${build_dir}
echo "SOURCE_DIR=$(pwd)" >> $GITHUB_ENV
echo "BUILD_DIR=$(pwd)/${build_dir}" >> $GITHUB_ENV
- name: build with instrumentation
run: |
cd ${BUILD_DIR}
@@ -403,10 +408,12 @@ jobs:
-DSECP256K1_BUILD_EXHAUSTIVE_TESTS=OFF \
-DCMAKE_TOOLCHAIN_FILE=${BUILD_DIR}/build/generators/conan_toolchain.cmake
cmake --build . --parallel $(nproc)
- name: verify instrumentation enabled
run: |
cd ${BUILD_DIR}
./rippled --version | grep libvoidstar
- name: run unit tests
run: |
cd ${BUILD_DIR}

View File

@@ -21,7 +21,7 @@ concurrency:
# This part of Conan configuration is specific to this workflow only; we do not want
# to pollute conan/profiles directory with settings which might not work for others
env:
#CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/dev
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
CONAN_GLOBAL_CONF: |
@@ -89,9 +89,7 @@ jobs:
shell: bash
run: |
conan export --version 1.1.10 external/snappy
conan export --version 9.7.3 external/rocksdb
conan export --version 4.0.3 external/soci
conan export --version 6.30.1 external/protobuf
- name: add Ripple Conan remote
if: env.CONAN_URL != ''
shell: bash

View File

@@ -171,14 +171,6 @@ which allows you to statically link it with GCC, if you want.
conan export --version 1.1.10 external/snappy
```
Export our [Conan recipe for RocksDB](./external/rocksdb).
It does not override paths to dependencies when building with Visual Studio.
```
# Conan 2.x
conan export --version 9.7.3 external/rocksdb
```
Export our [Conan recipe for SOCI](./external/soci).
It patches their CMake to correctly import its dependencies.
@@ -187,14 +179,6 @@ It patches their CMake to correctly import its dependencies.
conan export --version 4.0.3 external/soci
```
Export our [Conan recipe for Protobuf](./external/rocksdb).
It fixes compilation errors with clang-16 and Visual Studio.
```
# Conan 2.x
conan export --version 6.30.1 external/protobuf
```
### Build and Test
1. Create a build directory and move into it.
@@ -386,18 +370,13 @@ and can be helpful for detecting `#include` omissions.
## Troubleshooting
### Conan
After any updates or changes to dependencies, you may need to do the following:
1. Remove your build directory.
2. Remove the Conan cache:
```
rm -rf ~/.conan/data
```
4. Re-run [conan install](#build-and-test).
2. Remove the Conan cache: `conan remove "*" -c`
3. Re-run [conan install](#build-and-test).
### 'protobuf/port_def.inc' file not found
@@ -415,54 +394,6 @@ For example, if you want to build Debug:
1. For conan install, pass `--settings build_type=Debug`
2. For cmake, pass `-DCMAKE_BUILD_TYPE=Debug`
### no std::result_of
If your compiler version is recent enough to have removed `std::result_of` as
part of C++20, e.g. Apple Clang 15.0, then you might need to add a preprocessor
definition to your build.
```
conan profile update 'options.boost:extra_b2_flags="define=BOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
conan profile update 'env.CFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
conan profile update 'conf.tools.build:cflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
```
### call to 'async_teardown' is ambiguous
If you are compiling with an early version of Clang 16, then you might hit
a [regression][6] when compiling C++20 that manifests as an [error in a Boost
header][7]. You can workaround it by adding this preprocessor definition:
```
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS"' default
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
```
### recompile with -fPIC
If you get a linker error suggesting that you recompile Boost with
position-independent code, such as:
```
/usr/bin/ld.gold: error: /home/username/.conan/data/boost/1.77.0/_/_/package/.../lib/libboost_container.a(alloc_lib.o):
requires unsupported dynamic reloc 11; recompile with -fPIC
```
Conan most likely downloaded a bad binary distribution of the dependency.
This seems to be a [bug][1] in Conan just for Boost 1.77.0 compiled with GCC
for Linux. The solution is to build the dependency locally by passing
`--build boost` when calling `conan install`.
```
conan install --build boost ...
```
## Add a Dependency
If you want to experiment with a new package, follow these steps:

View File

@@ -90,28 +90,15 @@ if (MSVC)
-errorreport:none
-machine:X64)
else ()
# HACK : because these need to come first, before any warning demotion
string (APPEND CMAKE_CXX_FLAGS " -Wall -Wdeprecated")
if (wextra)
string (APPEND CMAKE_CXX_FLAGS " -Wextra -Wno-unused-parameter")
endif ()
# not MSVC
target_compile_options (common
INTERFACE
-Wall
-Wdeprecated
$<$<BOOL:${wextra}>:-Wextra -Wno-unused-parameter>
$<$<BOOL:${werr}>:-Werror>
$<$<COMPILE_LANGUAGE:CXX>:
-frtti
-Wnon-virtual-dtor
>
-Wno-sign-compare
-Wno-char-subscripts
-Wno-format
-Wno-unused-local-typedefs
-fstack-protector
$<$<BOOL:${is_gcc}>:
-Wno-unused-but-set-variable
-Wno-deprecated
>
-Wno-sign-compare
-Wno-unused-but-set-variable
$<$<NOT:$<CONFIG:Debug>>:-fno-strict-aliasing>
# tweak gcc optimization for debug
$<$<AND:$<BOOL:${is_gcc}>,$<CONFIG:Debug>>:-O0>

View File

@@ -26,9 +26,6 @@ tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
{% if compiler == "apple-clang" and compiler_version >= 17 %}
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
{% endif %}
{% if compiler == "clang" and compiler_version == 16 %}
tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
{% endif %}
{% if compiler == "gcc" and compiler_version < 13 %}
tools.build:cxxflags=['-Wno-restrict']
{% endif %}

View File

@@ -24,7 +24,7 @@ class Xrpl(ConanFile):
}
requires = [
'grpc/1.72.0',
'grpc/1.50.1',
'libarchive/3.8.1',
'nudb/2.0.9',
'openssl/1.1.1w',
@@ -37,7 +37,7 @@ class Xrpl(ConanFile):
]
tool_requires = [
'protobuf/6.30.1',
'protobuf/3.21.12',
]
default_options = {
@@ -104,15 +104,15 @@ class Xrpl(ConanFile):
def requirements(self):
# Conan 2 requires transitive headers to be specified
transitive_headers_opt = {'transitive_headers': True} if conan_version.split('.')[0] == '2' else {}
self.requires('boost/1.83.0', force=True, **transitive_headers_opt)
self.requires('boost/1.86.0', force=True, **transitive_headers_opt)
self.requires('date/3.0.4', **transitive_headers_opt)
self.requires('lz4/1.10.0', force=True)
self.requires('protobuf/6.30.1', force=True)
self.requires('protobuf/3.21.12', force=True)
self.requires('sqlite3/3.49.1', force=True)
if self.options.jemalloc:
self.requires('jemalloc/5.3.0')
if self.options.rocksdb:
self.requires('rocksdb/9.7.3')
self.requires('rocksdb/10.0.1')
self.requires('xxhash/0.8.3', **transitive_headers_opt)
exports_sources = (

View File

@@ -17,6 +17,9 @@ add_library(ed25519 STATIC
)
add_library(ed25519::ed25519 ALIAS ed25519)
target_link_libraries(ed25519 PUBLIC OpenSSL::SSL)
if(NOT MSVC)
target_compile_options(ed25519 PRIVATE -Wno-implicit-fallthrough)
endif()
include(GNUInstallDirs)

View File

@@ -1,54 +0,0 @@
sources:
"6.30.1":
url: "https://github.com/protocolbuffers/protobuf/archive/refs/tags/v6.30.1.tar.gz"
sha256: "c97cc064278ef2b8c4da66c1f85613642ecbd5a0c4217c0defdf7ad1b3de9fa5"
patches:
"6.30.1":
- patch_file: "patches/protobuf-6.30.1-change-empty-string.patch"
patch_description: "Change how we decide which empty string implementation to use"
patch_type: "backport"
patch_source: "https://github.com/protocolbuffers/protobuf/issues/20645"
- patch_file: "patches/protobuf-6.30.1-disable-fixed-string-MSVC.patch"
patch_description: "Disable the optimization for fixed_address_empty_string for MSVC"
patch_type: "backport"
patch_source: "https://github.com/protocolbuffers/protobuf/issues/21957"
absl_deps:
# reference: https://github.com/protocolbuffers/protobuf/blob/main/cmake/abseil-cpp.cmake
"6.30.1":
- absl_absl_check
- absl_absl_log
- absl_algorithm
- absl_base
- absl_bind_front
- absl_bits
- absl_btree
- absl_cleanup
- absl_cord
- absl_core_headers
- absl_debugging
- absl_die_if_null
- absl_dynamic_annotations
- absl_flags
- absl_flat_hash_map
- absl_flat_hash_set
- absl_function_ref
- absl_hash
- absl_layout
- absl_log_initialize
- absl_log_globals
- absl_log_severity
- absl_memory
- absl_node_hash_map
- absl_node_hash_set
- absl_optional
- absl_random_distributions
- absl_random_random
- absl_span
- absl_status
- absl_statusor
- absl_strings
- absl_synchronization
- absl_time
- absl_type_traits
- absl_utility
- absl_variant

View File

@@ -1,330 +0,0 @@
from conan import ConanFile
from conan.errors import ConanInvalidConfiguration
from conan.tools.apple import is_apple_os
from conan.tools.build import check_min_cppstd
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
from conan.tools.files import copy, rename, get, apply_conandata_patches, export_conandata_patches, replace_in_file, rmdir, rm, save
from conan.tools.microsoft import check_min_vs, msvc_runtime_flag, is_msvc, is_msvc_static_runtime
from conan.tools.scm import Version
import os
required_conan_version = ">=1.53"
class ProtobufConan(ConanFile):
name = "protobuf"
description = "Protocol Buffers - Google's data interchange format"
topics = ("protocol-buffers", "protocol-compiler", "serialization", "rpc", "protocol-compiler")
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/protocolbuffers/protobuf"
license = "BSD-3-Clause"
package_type = "library"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
"with_zlib": [True, False],
"with_rtti": [True, False],
"lite": [True, False],
"upb": [True, False],
"debug_suffix": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
"with_zlib": True,
"with_rtti": True,
"lite": False,
"upb": False,
"debug_suffix": True,
}
short_paths = True
@property
def _is_clang_cl(self):
return self.settings.compiler == "clang" and self.settings.os == "Windows"
@property
def _is_clang_x86(self):
return self.settings.compiler == "clang" and self.settings.arch == "x86"
@property
def _protobuf_release(self):
current_ver = Version(self.version)
return Version(f"{current_ver.minor}.{current_ver.patch}")
def export_sources(self):
export_conandata_patches(self)
copy(self, "protobuf-conan-protoc-target.cmake", self.recipe_folder, os.path.join(self.export_sources_folder, "src"))
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def configure(self):
if self.options.shared:
self.options.rm_safe("fPIC")
if self._protobuf_release < "27.0":
self.options.rm_safe("upb")
def layout(self):
cmake_layout(self, src_folder="src")
def requirements(self):
if self.options.with_zlib:
self.requires("zlib/[>=1.2.11 <2]")
if self._protobuf_release >= "22.0":
self.requires("abseil/[>=20230802.1 <=20250127.0]", transitive_headers=True)
@property
def _compilers_minimum_version(self):
return {
"gcc": "6",
"clang": "5",
"apple-clang": "10",
"Visual Studio": "15",
"msvc": "191",
}
def validate(self):
if self.options.shared and is_msvc_static_runtime(self):
raise ConanInvalidConfiguration("Protobuf can't be built with shared + MT(d) runtimes")
if is_msvc(self) and self._protobuf_release >= "22" and self.options.shared and \
not self.dependencies["abseil"].options.shared:
raise ConanInvalidConfiguration("When building protobuf as a shared library on Windows, "
"abseil needs to be a shared library too")
if self._protobuf_release >= "30.1":
check_min_cppstd(self, 17)
elif self._protobuf_release >= "22.0":
if self.settings.compiler.get_safe("cppstd"):
check_min_cppstd(self, 14)
else:
minimum_version = self._compilers_minimum_version.get(str(self.settings.compiler), None)
compiler_version = Version(self.settings.compiler.version)
if minimum_version and compiler_version < minimum_version:
raise ConanInvalidConfiguration(
f"{self.ref} requires C++14, which your compiler does not support.",
)
check_min_vs(self, "190")
if self.settings.compiler == "clang":
if Version(self.settings.compiler.version) < "4":
raise ConanInvalidConfiguration(f"{self.ref} doesn't support clang < 4")
if "abseil" in self.dependencies.host:
abseil_cppstd = self.dependencies.host['abseil'].info.settings.compiler.cppstd
if abseil_cppstd != self.settings.compiler.cppstd:
raise ConanInvalidConfiguration(f"Protobuf and abseil must be built with the same compiler.cppstd setting")
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def build_requirements(self):
if self._protobuf_release >= "30.1":
self.tool_requires("cmake/[>=3.16 <4]")
@property
def _cmake_install_base_path(self):
return os.path.join("lib", "cmake", "protobuf")
def generate(self):
tc = CMakeToolchain(self)
if self._protobuf_release >= "30.1":
tc.cache_variables["protobuf_LOCAL_DEPENDENCIES_ONLY"] = True
tc.cache_variables["CMAKE_INSTALL_CMAKEDIR"] = self._cmake_install_base_path.replace("\\", "/")
tc.cache_variables["protobuf_WITH_ZLIB"] = self.options.with_zlib
tc.cache_variables["protobuf_BUILD_TESTS"] = False
tc.cache_variables["protobuf_BUILD_PROTOC_BINARIES"] = self.settings.os != "tvOS"
if not self.options.debug_suffix:
tc.cache_variables["protobuf_DEBUG_POSTFIX"] = ""
tc.cache_variables["protobuf_BUILD_LIBPROTOC"] = self.settings.os != "tvOS"
tc.cache_variables["protobuf_DISABLE_RTTI"] = not self.options.with_rtti
tc.cache_variables["protobuf_BUILD_LIBUPB"] = self.options.get_safe("upb")
if self._protobuf_release >= "22.0":
tc.cache_variables["protobuf_ABSL_PROVIDER"] = "package"
if not self.settings.compiler.get_safe("cppstd") and self._protobuf_release >= "22.0":
tc.variables["CMAKE_CXX_STANDARD"] = 14
if is_msvc(self) or self._is_clang_cl:
runtime = self.settings.get_safe("compiler.runtime")
if runtime:
tc.cache_variables["protobuf_MSVC_STATIC_RUNTIME"] = runtime == "static"
if is_apple_os(self) and self.options.shared:
# Workaround against SIP on macOS for consumers while invoking protoc when protobuf lib is shared
tc.variables["CMAKE_INSTALL_RPATH"] = "@loader_path/../lib"
if self.settings.os == "Linux":
# Use RPATH instead of RUNPATH to help with specific case
# in the grpc recipe when grpc_cpp_plugin is run with protoc
# in the same build. RPATH ensures that the rpath in the binary
# is respected for transitive dependencies too
project_include = os.path.join(self.generators_folder, "protobuf_project_include.cmake")
save(self, project_include, "add_link_options(-Wl,--disable-new-dtags)")
tc.variables["CMAKE_PROJECT_INCLUDE"] = project_include
# Note: conan2 only could be:
# tc.extra_exelinkflags.append("-Wl,--disable-new-dtags")
# tc.extra_sharedlinkflags.append("-Wl,--disable-new-dtags")
tc.generate()
deps = CMakeDeps(self)
deps.generate()
def _patch_sources(self):
apply_conandata_patches(self)
if self._protobuf_release < "22.0":
# In older versions of protobuf, this file defines the `protobuf_generate` function
protobuf_config_cmake = os.path.join(self.source_folder, "cmake", "protobuf-config.cmake.in")
replace_in_file(self, protobuf_config_cmake, "@_protobuf_FIND_ZLIB@", "")
replace_in_file(self, protobuf_config_cmake,
"include(\"${CMAKE_CURRENT_LIST_DIR}/protobuf-targets.cmake\")",
""
)
# Disable a potential warning in protobuf-module.cmake.in
# TODO: remove this patch? Is it really useful?
protobuf_module_cmake = os.path.join(self.source_folder, "cmake", "protobuf-module.cmake.in")
replace_in_file(self,
protobuf_module_cmake,
"if(DEFINED Protobuf_SRC_ROOT_FOLDER)",
"if(0)\nif(DEFINED Protobuf_SRC_ROOT_FOLDER)",
)
replace_in_file(self,
protobuf_module_cmake,
"# Define upper case versions of output variables",
"endif()",
)
def build(self):
self._patch_sources()
cmake = CMake(self)
cmake_root = "cmake" if Version(self.version) < "3.21" else None
cmake.configure(build_script_folder=cmake_root)
cmake.build()
def package(self):
copy(self, "LICENSE", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
cmake = CMake(self)
cmake.install()
rmdir(self, os.path.join(self.package_folder, "lib", "pkgconfig"))
rmdir(self, os.path.join(self.package_folder, "lib", "cmake", "utf8_range"))
if self._protobuf_release < "22.0":
rename(self, os.path.join(self.package_folder, self._cmake_install_base_path, "protobuf-config.cmake"),
os.path.join(self.package_folder, self._cmake_install_base_path, "protobuf-generate.cmake"))
cmake_config_folder = os.path.join(self.package_folder, self._cmake_install_base_path)
rm(self, "protobuf-config*.cmake", folder=cmake_config_folder)
rm(self, "protobuf-targets*.cmake", folder=cmake_config_folder)
copy(self, "protobuf-conan-protoc-target.cmake", src=self.source_folder, dst=cmake_config_folder)
if not self.options.lite:
rm(self, "libprotobuf-lite*", os.path.join(self.package_folder, "lib"))
rm(self, "libprotobuf-lite*", os.path.join(self.package_folder, "bin"))
def package_info(self):
self.cpp_info.set_property("cmake_find_mode", "both")
self.cpp_info.set_property("cmake_module_file_name", "Protobuf")
self.cpp_info.set_property("cmake_file_name", "protobuf")
self.cpp_info.set_property("pkg_config_name", "protobuf_full_package") # unofficial, but required to avoid side effects (libprotobuf component "steals" the default global pkg_config name)
build_modules = [
os.path.join(self._cmake_install_base_path, "protobuf-generate.cmake"),
os.path.join(self._cmake_install_base_path, "protobuf-module.cmake"),
os.path.join(self._cmake_install_base_path, "protobuf-options.cmake"),
os.path.join(self._cmake_install_base_path, "protobuf-conan-protoc-target.cmake"),
]
self.cpp_info.set_property("cmake_build_modules", build_modules)
lib_prefix = "lib" if (is_msvc(self) or self._is_clang_cl) else ""
lib_suffix = "d" if self.settings.build_type == "Debug" and self.options.debug_suffix else ""
if self._protobuf_release >= "22.0":
absl_deps = [f"abseil::{c}" for c in self.conan_data["absl_deps"][self.version]]
if self._protobuf_release >= "22.0" and (not self.options.shared or self.options.get_safe("upb")):
# utf8 libraries
# it's a private dependency and unconditionally built as a static library, should only
# be exposed when protobuf itself is static (or if upb is being built)
self.cpp_info.components["utf8_range"].set_property("cmake_target_name", "utf8_range::utf8_range")
self.cpp_info.components["utf8_validity"].set_property("cmake_target_name", "utf8_range::utf8_validity")
# https://github.com/protocolbuffers/protobuf/blob/0d815c5b74281f081c1ee4b431a4d5bbb1615c97/third_party/utf8_range/CMakeLists.txt#L24
if self._protobuf_release >= "30.1" and self.settings.os == "Windows":
self.cpp_info.components["utf8_range"].libs = ["libutf8_range"]
self.cpp_info.components["utf8_validity"].libs = ["libutf8_validity"]
else:
self.cpp_info.components["utf8_range"].libs = ["utf8_range"]
self.cpp_info.components["utf8_validity"].libs = ["utf8_validity"]
self.cpp_info.components["utf8_validity"].requires = ["abseil::absl_strings"]
if self.options.get_safe("upb"):
# upb libraries: note that these are unconditionally static
self.cpp_info.components["upb"].set_property("cmake_target_name", "protobuf::libupb")
self.cpp_info.components["upb"].libs = [lib_prefix + "upb" + lib_suffix]
self.cpp_info.components["upb"].requires = ["utf8_range"]
# libprotobuf
self.cpp_info.components["libprotobuf"].set_property("cmake_target_name", "protobuf::libprotobuf")
self.cpp_info.components["libprotobuf"].set_property("pkg_config_name", "protobuf")
self.cpp_info.components["libprotobuf"].builddirs.append(self._cmake_install_base_path)
self.cpp_info.components["libprotobuf"].libs = [lib_prefix + "protobuf" + lib_suffix]
if self.options.with_zlib:
self.cpp_info.components["libprotobuf"].requires = ["zlib::zlib"]
if self._protobuf_release >= "22.0":
self.cpp_info.components["libprotobuf"].requires.extend(absl_deps)
if not self.options.shared:
self.cpp_info.components["libprotobuf"].requires.extend(["utf8_validity"])
if self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.components["libprotobuf"].system_libs.extend(["m", "pthread"])
if self._is_clang_x86 or "arm" in str(self.settings.arch):
self.cpp_info.components["libprotobuf"].system_libs.append("atomic")
if self.settings.os == "Android":
self.cpp_info.components["libprotobuf"].system_libs.append("log")
if self.settings.os == "Windows":
if self.options.shared:
self.cpp_info.components["libprotobuf"].defines = ["PROTOBUF_USE_DLLS"]
# libprotoc
if self.settings.os != "tvOS":
self.cpp_info.components["libprotoc"].set_property("cmake_target_name", "protobuf::libprotoc")
self.cpp_info.components["libprotoc"].libs = [lib_prefix + "protoc" + lib_suffix]
self.cpp_info.components["libprotoc"].requires = ["libprotobuf"]
if self._protobuf_release >= "22.0":
self.cpp_info.components["libprotoc"].requires.extend(absl_deps)
# libprotobuf-lite
if self.options.lite:
self.cpp_info.components["libprotobuf-lite"].set_property("cmake_target_name", "protobuf::libprotobuf-lite")
self.cpp_info.components["libprotobuf-lite"].set_property("pkg_config_name", "protobuf-lite")
self.cpp_info.components["libprotobuf-lite"].builddirs.append(self._cmake_install_base_path)
self.cpp_info.components["libprotobuf-lite"].libs = [lib_prefix + "protobuf-lite" + lib_suffix]
if self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.components["libprotobuf-lite"].system_libs.extend(["m", "pthread"])
if self._is_clang_x86 or "arm" in str(self.settings.arch):
self.cpp_info.components["libprotobuf-lite"].system_libs.append("atomic")
if self.settings.os == "Windows":
if self.options.shared:
self.cpp_info.components["libprotobuf-lite"].defines = ["PROTOBUF_USE_DLLS"]
if self.settings.os == "Android":
self.cpp_info.components["libprotobuf-lite"].system_libs.append("log")
if self._protobuf_release >= "22.0":
self.cpp_info.components["libprotobuf-lite"].requires.extend(absl_deps)
if not self.options.shared:
self.cpp_info.components["libprotobuf-lite"].requires.extend(["utf8_validity"])
# TODO: to remove in conan v2 once cmake_find_package* & pkg_config generators removed
self.cpp_info.filenames["cmake_find_package"] = "Protobuf"
self.cpp_info.filenames["cmake_find_package_multi"] = "protobuf"
self.cpp_info.names["pkg_config"] ="protobuf_full_package"
for generator in ["cmake_find_package", "cmake_find_package_multi"]:
self.cpp_info.components["libprotobuf"].build_modules[generator] = build_modules
if self.options.lite:
for generator in ["cmake_find_package", "cmake_find_package_multi"]:
self.cpp_info.components["libprotobuf-lite"].build_modules[generator] = build_modules
self.env_info.PATH.append(os.path.join(self.package_folder, "bin"))

View File

@@ -1,70 +0,0 @@
diff --git a/src/google/protobuf/port.cc b/src/google/protobuf/port.cc
index af668e9e2..d60c2b89f 100644
--- a/src/google/protobuf/port.cc
+++ b/src/google/protobuf/port.cc
@@ -97,14 +97,9 @@ void RealDebugCounter::Register(absl::string_view name) {
}
}
-#if defined(__cpp_lib_constexpr_string) && __cpp_lib_constexpr_string >= 201907L
-PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT const GlobalEmptyString
- fixed_address_empty_string{};
-#else
PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT
PROTOBUF_ATTRIBUTE_INIT_PRIORITY1 GlobalEmptyString
fixed_address_empty_string{};
-#endif
} // namespace internal
} // namespace protobuf
diff --git a/src/google/protobuf/port.h b/src/google/protobuf/port.h
index 5f9e909a0..386ecc02a 100644
--- a/src/google/protobuf/port.h
+++ b/src/google/protobuf/port.h
@@ -494,20 +494,27 @@ class NoopDebugCounter {
// Default empty string object. Don't use this directly. Instead, call
// GetEmptyString() to get the reference. This empty string is aligned with a
// minimum alignment of 8 bytes to match the requirement of ArenaStringPtr.
-#if defined(__cpp_lib_constexpr_string) && __cpp_lib_constexpr_string >= 201907L
+
// Take advantage of C++20 constexpr support in std::string.
-class alignas(8) GlobalEmptyString {
+class alignas(8) GlobalEmptyStringConstexpr {
public:
const std::string& get() const { return value_; }
// Nothing to init, or destroy.
std::string* Init() const { return nullptr; }
+ template <typename T = std::string, bool = (T(), true)>
+ static constexpr std::true_type HasConstexprDefaultConstructor(int) {
+ return {};
+ }
+ static constexpr std::false_type HasConstexprDefaultConstructor(char) {
+ return {};
+ }
+
private:
std::string value_;
};
-PROTOBUF_EXPORT extern const GlobalEmptyString fixed_address_empty_string;
-#else
-class alignas(8) GlobalEmptyString {
+
+class alignas(8) GlobalEmptyStringDynamicInit {
public:
const std::string& get() const {
return *reinterpret_cast<const std::string*>(internal::Launder(buffer_));
@@ -519,8 +526,12 @@ class alignas(8) GlobalEmptyString {
private:
alignas(std::string) char buffer_[sizeof(std::string)];
};
+
+using GlobalEmptyString = std::conditional_t<
+ GlobalEmptyStringConstexpr::HasConstexprDefaultConstructor(0),
+ const GlobalEmptyStringConstexpr, GlobalEmptyStringDynamicInit>;
+
PROTOBUF_EXPORT extern GlobalEmptyString fixed_address_empty_string;
-#endif
} // namespace internal
} // namespace protobuf

View File

@@ -1,21 +0,0 @@
diff --git a/src/google/protobuf/port.h b/src/google/protobuf/port.h
index 386ecc02a..32d260c42 100644
--- a/src/google/protobuf/port.h
+++ b/src/google/protobuf/port.h
@@ -502,10 +502,16 @@ class alignas(8) GlobalEmptyStringConstexpr {
// Nothing to init, or destroy.
std::string* Init() const { return nullptr; }
+ // Disable the optimization for MSVC.
+ // There are some builds where the default constructed string can't be used as
+ // `constinit` even though the constructor is `constexpr` and can be used
+ // during constant evaluation.
+#if !defined(_MSC_VER)
template <typename T = std::string, bool = (T(), true)>
static constexpr std::true_type HasConstexprDefaultConstructor(int) {
return {};
}
+#endif
static constexpr std::false_type HasConstexprDefaultConstructor(char) {
return {};
}

View File

@@ -1,25 +0,0 @@
if(NOT TARGET protobuf::protoc)
# Locate protoc executable
## Workaround for legacy "cmake" generator in case of cross-build
if(CMAKE_CROSSCOMPILING)
find_program(PROTOC_PROGRAM NAMES protoc PATHS ENV PATH NO_DEFAULT_PATH)
endif()
## And here this will work fine with "CMakeToolchain" (for native & cross-build)
## and legacy "cmake" generator in case of native build
if(NOT PROTOC_PROGRAM)
find_program(PROTOC_PROGRAM NAMES protoc)
endif()
## Last resort: we search in package folder directly
if(NOT PROTOC_PROGRAM)
set(PROTOC_PROGRAM "${CMAKE_CURRENT_LIST_DIR}/../../../bin/protoc${CMAKE_EXECUTABLE_SUFFIX}")
endif()
get_filename_component(PROTOC_PROGRAM "${PROTOC_PROGRAM}" ABSOLUTE)
# Give opportunity to users to provide an external protoc executable
# (this is a feature of official FindProtobuf.cmake)
set(Protobuf_PROTOC_EXECUTABLE ${PROTOC_PROGRAM} CACHE FILEPATH "The protoc compiler")
# Create executable imported target protobuf::protoc
add_executable(protobuf::protoc IMPORTED)
set_property(TARGET protobuf::protoc PROPERTY IMPORTED_LOCATION ${Protobuf_PROTOC_EXECUTABLE})
endif()

View File

@@ -1,28 +0,0 @@
cmake_minimum_required(VERSION 3.15)
project(test_package LANGUAGES CXX)
find_package(protobuf CONFIG REQUIRED)
add_executable(${PROJECT_NAME} test_package.cpp)
if(CONAN_TEST_USE_CXXSTD_14)
target_compile_features(${PROJECT_NAME} PRIVATE cxx_std_14)
else()
target_compile_features(${PROJECT_NAME} PRIVATE cxx_std_11)
endif()
if (protobuf_LITE)
target_link_libraries(${PROJECT_NAME} PRIVATE protobuf::libprotobuf-lite)
target_compile_definitions(${PROJECT_NAME} PRIVATE CONANTEST_PROTOBUF_LITE=1)
else()
target_link_libraries(${PROJECT_NAME} PRIVATE protobuf::libprotobuf)
endif()
if(NOT TARGET protobuf::protoc)
message(FATAL_ERROR "protoc executable should have been defined as part of find_package(protobuf)")
endif()
if(NOT COMMAND protobuf_generate)
message(FATAL_ERROR "protobuf_generate should have been defined as part of find_package(protobuf)")
endif()

View File

@@ -1,64 +0,0 @@
// See README.txt for information and build instructions.
//
// Note: START and END tags are used in comments to define sections used in
// tutorials. They are not part of the syntax for Protocol Buffers.
//
// To get an in-depth walkthrough of this file and the related examples, see:
// https://developers.google.com/protocol-buffers/docs/tutorials
// [START declaration]
syntax = "proto3";
package tutorial;
import "google/protobuf/timestamp.proto";
// [END declaration]
// [START java_declaration]
option java_package = "com.example.tutorial";
option java_outer_classname = "AddressBookProtos";
// [END java_declaration]
// [START csharp_declaration]
option csharp_namespace = "Google.Protobuf.Examples.AddressBook";
// [END csharp_declaration]
// [START messages]
message Person {
string name = 1;
int32 id = 2; // Unique ID number for this person.
string email = 3;
enum PhoneType {
MOBILE = 0;
HOME = 1;
WORK = 2;
}
message PhoneNumber {
string number = 1;
PhoneType type = 2;
}
repeated PhoneNumber phones = 4;
google.protobuf.Timestamp last_updated = 5;
}
// Our address book file is just one of these.
message AddressBook {
repeated Person people = 1;
}
// [END messages]
// Artibrary message with a GID type, to cause
// the .pb.h file to define `GID_MAX`, which
// may conflict with a macro defined by system
// headers on macOS.
message FooBar {
enum GID {
FOO = 0;
}
enum UID {
BAR = 0;
}
}

View File

@@ -1,40 +0,0 @@
from conan import ConanFile
from conan.tools.build import can_run
from conan.tools.cmake import cmake_layout, CMake, CMakeToolchain
from conan.tools.scm import Version
import os
class TestPackageConan(ConanFile):
settings = "os", "arch", "compiler", "build_type"
generators = "CMakeDeps", "VirtualBuildEnv", "VirtualRunEnv"
test_type = "explicit"
def layout(self):
cmake_layout(self)
def requirements(self):
# note `run=True` so that the runenv can find protoc
self.requires(self.tested_reference_str, run=True)
def generate(self):
tc = CMakeToolchain(self)
tc.cache_variables["protobuf_LITE"] = self.dependencies[self.tested_reference_str].options.lite
protobuf_version = Version(self.dependencies[self.tested_reference_str].ref.version)
tc.cache_variables["CONAN_TEST_USE_CXXSTD_14"] = protobuf_version >= "3.22"
tc.generate()
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def test(self):
if can_run(self):
bin_path = os.path.join(self.cpp.build.bindirs[0], "test_package")
self.run(bin_path, env="conanrun")
# Invoke protoc in the same way CMake would
self.run(f"protoc --proto_path={self.source_folder} --cpp_out={self.build_folder} {self.source_folder}/addressbook.proto", env="conanrun")
assert os.path.exists(os.path.join(self.build_folder,"addressbook.pb.cc"))
assert os.path.exists(os.path.join(self.build_folder,"addressbook.pb.h"))

View File

@@ -1,23 +0,0 @@
#include <cstdlib>
#include <iostream>
#if !defined(CONANTEST_PROTOBUF_LITE)
#include <google/protobuf/timestamp.pb.h>
#include <google/protobuf/util/time_util.h>
#else
#include <google/protobuf/message_lite.h>
#endif
int main()
{
#if !defined(CONANTEST_PROTOBUF_LITE)
google::protobuf::Timestamp ts;
google::protobuf::util::TimeUtil::FromString("1972-01-01T10:00:20.021Z", &ts);
const auto nanoseconds = ts.nanos();
std::cout << "1972-01-01T10:00:20.021Z in nanoseconds: " << nanoseconds << "\n";
#else
google::protobuf::ShutdownProtobufLibrary();
#endif
return EXIT_SUCCESS;
}

View File

@@ -1,12 +0,0 @@
sources:
"9.7.3":
url: "https://github.com/facebook/rocksdb/archive/refs/tags/v9.7.3.tar.gz"
sha256: "acfabb989cbfb5b5c4d23214819b059638193ec33dad2d88373c46448d16d38b"
patches:
"9.7.3":
- patch_file: "patches/9.x.x-0001-exclude-thirdparty.patch"
patch_description: "Do not include thirdparty.inc"
patch_type: "portability"
- patch_file: "patches/9.7.3-0001-memory-leak.patch"
patch_description: "Fix a leak of obsolete blob files left open until DB::Close()"
patch_type: "portability"

View File

@@ -1,235 +0,0 @@
import os
import glob
import shutil
from conan import ConanFile
from conan.errors import ConanInvalidConfiguration
from conan.tools.build import check_min_cppstd
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
from conan.tools.files import apply_conandata_patches, collect_libs, copy, export_conandata_patches, get, rm, rmdir
from conan.tools.microsoft import check_min_vs, is_msvc, is_msvc_static_runtime
from conan.tools.scm import Version
required_conan_version = ">=1.53.0"
class RocksDBConan(ConanFile):
name = "rocksdb"
description = "A library that provides an embeddable, persistent key-value store for fast storage"
license = ("GPL-2.0-only", "Apache-2.0")
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/facebook/rocksdb"
topics = ("database", "leveldb", "facebook", "key-value")
package_type = "library"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
"lite": [True, False],
"with_gflags": [True, False],
"with_snappy": [True, False],
"with_lz4": [True, False],
"with_zlib": [True, False],
"with_zstd": [True, False],
"with_tbb": [True, False],
"with_jemalloc": [True, False],
"enable_sse": [False, "sse42", "avx2"],
"use_rtti": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
"lite": False,
"with_snappy": False,
"with_lz4": False,
"with_zlib": False,
"with_zstd": False,
"with_gflags": False,
"with_tbb": False,
"with_jemalloc": False,
"enable_sse": False,
"use_rtti": False,
}
@property
def _min_cppstd(self):
return "11" if Version(self.version) < "8.8.1" else "17"
@property
def _compilers_minimum_version(self):
return {} if self._min_cppstd == "11" else {
"apple-clang": "10",
"clang": "7",
"gcc": "7",
"msvc": "191",
"Visual Studio": "15",
}
def export_sources(self):
export_conandata_patches(self)
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
if self.settings.arch != "x86_64":
del self.options.with_tbb
if self.settings.build_type == "Debug":
self.options.use_rtti = True # Rtti are used in asserts for debug mode...
def configure(self):
if self.options.shared:
self.options.rm_safe("fPIC")
def layout(self):
cmake_layout(self, src_folder="src")
def requirements(self):
if self.options.with_gflags:
self.requires("gflags/2.2.2")
if self.options.with_snappy:
self.requires("snappy/1.1.10")
if self.options.with_lz4:
self.requires("lz4/1.10.0")
if self.options.with_zlib:
self.requires("zlib/[>=1.2.11 <2]")
if self.options.with_zstd:
self.requires("zstd/1.5.6")
if self.options.get_safe("with_tbb"):
self.requires("onetbb/2021.12.0")
if self.options.with_jemalloc:
self.requires("jemalloc/5.3.0")
def validate(self):
if self.settings.compiler.get_safe("cppstd"):
check_min_cppstd(self, self._min_cppstd)
minimum_version = self._compilers_minimum_version.get(str(self.settings.compiler), False)
if minimum_version and Version(self.settings.compiler.version) < minimum_version:
raise ConanInvalidConfiguration(
f"{self.ref} requires C++{self._min_cppstd}, which your compiler does not support."
)
if self.settings.arch not in ["x86_64", "ppc64le", "ppc64", "mips64", "armv8"]:
raise ConanInvalidConfiguration("Rocksdb requires 64 bits")
check_min_vs(self, "191")
if self.version == "6.20.3" and \
self.settings.os == "Linux" and \
self.settings.compiler == "gcc" and \
Version(self.settings.compiler.version) < "5":
raise ConanInvalidConfiguration("Rocksdb 6.20.3 is not compilable with gcc <5.") # See https://github.com/facebook/rocksdb/issues/3522
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def generate(self):
tc = CMakeToolchain(self)
tc.variables["FAIL_ON_WARNINGS"] = False
tc.variables["WITH_TESTS"] = False
tc.variables["WITH_TOOLS"] = False
tc.variables["WITH_CORE_TOOLS"] = False
tc.variables["WITH_BENCHMARK_TOOLS"] = False
tc.variables["WITH_FOLLY_DISTRIBUTED_MUTEX"] = False
if is_msvc(self):
tc.variables["WITH_MD_LIBRARY"] = not is_msvc_static_runtime(self)
tc.variables["ROCKSDB_INSTALL_ON_WINDOWS"] = self.settings.os == "Windows"
tc.variables["ROCKSDB_LITE"] = self.options.lite
tc.variables["WITH_GFLAGS"] = self.options.with_gflags
tc.variables["WITH_SNAPPY"] = self.options.with_snappy
tc.variables["WITH_LZ4"] = self.options.with_lz4
tc.variables["WITH_ZLIB"] = self.options.with_zlib
tc.variables["WITH_ZSTD"] = self.options.with_zstd
tc.variables["WITH_TBB"] = self.options.get_safe("with_tbb", False)
tc.variables["WITH_JEMALLOC"] = self.options.with_jemalloc
tc.variables["ROCKSDB_BUILD_SHARED"] = self.options.shared
tc.variables["ROCKSDB_LIBRARY_EXPORTS"] = self.settings.os == "Windows" and self.options.shared
tc.variables["ROCKSDB_DLL" ] = self.settings.os == "Windows" and self.options.shared
tc.variables["USE_RTTI"] = self.options.use_rtti
if not bool(self.options.enable_sse):
tc.variables["PORTABLE"] = True
tc.variables["FORCE_SSE42"] = False
elif self.options.enable_sse == "sse42":
tc.variables["PORTABLE"] = True
tc.variables["FORCE_SSE42"] = True
elif self.options.enable_sse == "avx2":
tc.variables["PORTABLE"] = False
tc.variables["FORCE_SSE42"] = False
# not available yet in CCI
tc.variables["WITH_NUMA"] = False
tc.generate()
deps = CMakeDeps(self)
if self.options.with_jemalloc:
deps.set_property("jemalloc", "cmake_file_name", "JeMalloc")
deps.set_property("jemalloc", "cmake_target_name", "JeMalloc::JeMalloc")
if self.options.with_zstd:
deps.set_property("zstd", "cmake_target_name", "zstd::zstd")
deps.generate()
def build(self):
apply_conandata_patches(self)
cmake = CMake(self)
cmake.configure()
cmake.build()
def _remove_static_libraries(self):
rm(self, "rocksdb.lib", os.path.join(self.package_folder, "lib"))
for lib in glob.glob(os.path.join(self.package_folder, "lib", "*.a")):
if not lib.endswith(".dll.a"):
os.remove(lib)
def _remove_cpp_headers(self):
for path in glob.glob(os.path.join(self.package_folder, "include", "rocksdb", "*")):
if path != os.path.join(self.package_folder, "include", "rocksdb", "c.h"):
if os.path.isfile(path):
os.remove(path)
else:
shutil.rmtree(path)
def package(self):
copy(self, "COPYING", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
copy(self, "LICENSE*", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
cmake = CMake(self)
cmake.install()
if self.options.shared:
self._remove_static_libraries()
self._remove_cpp_headers() # Force stable ABI for shared libraries
rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
rmdir(self, os.path.join(self.package_folder, "lib", "pkgconfig"))
def package_info(self):
cmake_target = "rocksdb-shared" if self.options.shared else "rocksdb"
self.cpp_info.set_property("cmake_file_name", "RocksDB")
self.cpp_info.set_property("cmake_target_name", f"RocksDB::{cmake_target}")
# TODO: back to global scope in conan v2 once cmake_find_package* generators removed
self.cpp_info.components["librocksdb"].libs = collect_libs(self)
if self.settings.os == "Windows":
self.cpp_info.components["librocksdb"].system_libs = ["shlwapi", "rpcrt4"]
if self.options.shared:
self.cpp_info.components["librocksdb"].defines = ["ROCKSDB_DLL"]
elif self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.components["librocksdb"].system_libs = ["pthread", "m"]
if self.options.lite:
self.cpp_info.components["librocksdb"].defines.append("ROCKSDB_LITE")
# TODO: to remove in conan v2 once cmake_find_package* generators removed
self.cpp_info.names["cmake_find_package"] = "RocksDB"
self.cpp_info.names["cmake_find_package_multi"] = "RocksDB"
self.cpp_info.components["librocksdb"].names["cmake_find_package"] = cmake_target
self.cpp_info.components["librocksdb"].names["cmake_find_package_multi"] = cmake_target
self.cpp_info.components["librocksdb"].set_property("cmake_target_name", f"RocksDB::{cmake_target}")
if self.options.with_gflags:
self.cpp_info.components["librocksdb"].requires.append("gflags::gflags")
if self.options.with_snappy:
self.cpp_info.components["librocksdb"].requires.append("snappy::snappy")
if self.options.with_lz4:
self.cpp_info.components["librocksdb"].requires.append("lz4::lz4")
if self.options.with_zlib:
self.cpp_info.components["librocksdb"].requires.append("zlib::zlib")
if self.options.with_zstd:
self.cpp_info.components["librocksdb"].requires.append("zstd::zstd")
if self.options.get_safe("with_tbb"):
self.cpp_info.components["librocksdb"].requires.append("onetbb::onetbb")
if self.options.with_jemalloc:
self.cpp_info.components["librocksdb"].requires.append("jemalloc::jemalloc")

View File

@@ -1,319 +0,0 @@
diff --git a/HISTORY.md b/HISTORY.md
index 36d472229..05ad1a202 100644
--- a/HISTORY.md
+++ b/HISTORY.md
@@ -1,6 +1,10 @@
# Rocksdb Change Log
> NOTE: Entries for next release do not go here. Follow instructions in `unreleased_history/README.txt`
+## 9.7.4 (10/31/2024)
+### Bug Fixes
+* Fix a leak of obsolete blob files left open until DB::Close(). This bug was introduced in version 9.4.0.
+
## 9.7.3 (10/16/2024)
### Behavior Changes
* OPTIONS file to be loaded by remote worker is now preserved so that it does not get purged by the primary host. A similar technique as how we are preserving new SST files from getting purged is used for this. min_options_file_numbers_ is tracked like pending_outputs_ is tracked.
diff --git a/db/blob/blob_file_cache.cc b/db/blob/blob_file_cache.cc
index 5f340aadf..1b9faa238 100644
--- a/db/blob/blob_file_cache.cc
+++ b/db/blob/blob_file_cache.cc
@@ -42,6 +42,7 @@ Status BlobFileCache::GetBlobFileReader(
assert(blob_file_reader);
assert(blob_file_reader->IsEmpty());
+ // NOTE: sharing same Cache with table_cache
const Slice key = GetSliceForKey(&blob_file_number);
assert(cache_);
@@ -98,4 +99,13 @@ Status BlobFileCache::GetBlobFileReader(
return Status::OK();
}
+void BlobFileCache::Evict(uint64_t blob_file_number) {
+ // NOTE: sharing same Cache with table_cache
+ const Slice key = GetSliceForKey(&blob_file_number);
+
+ assert(cache_);
+
+ cache_.get()->Erase(key);
+}
+
} // namespace ROCKSDB_NAMESPACE
diff --git a/db/blob/blob_file_cache.h b/db/blob/blob_file_cache.h
index 740e67ada..6858d012b 100644
--- a/db/blob/blob_file_cache.h
+++ b/db/blob/blob_file_cache.h
@@ -36,6 +36,15 @@ class BlobFileCache {
uint64_t blob_file_number,
CacheHandleGuard<BlobFileReader>* blob_file_reader);
+ // Called when a blob file is obsolete to ensure it is removed from the cache
+ // to avoid effectively leaking the open file and assicated memory
+ void Evict(uint64_t blob_file_number);
+
+ // Used to identify cache entries for blob files (not normally useful)
+ static const Cache::CacheItemHelper* GetHelper() {
+ return CacheInterface::GetBasicHelper();
+ }
+
private:
using CacheInterface =
BasicTypedCacheInterface<BlobFileReader, CacheEntryRole::kMisc>;
diff --git a/db/column_family.h b/db/column_family.h
index e4b7adde8..86637736a 100644
--- a/db/column_family.h
+++ b/db/column_family.h
@@ -401,6 +401,7 @@ class ColumnFamilyData {
SequenceNumber earliest_seq);
TableCache* table_cache() const { return table_cache_.get(); }
+ BlobFileCache* blob_file_cache() const { return blob_file_cache_.get(); }
BlobSource* blob_source() const { return blob_source_.get(); }
// See documentation in compaction_picker.h
diff --git a/db/db_impl/db_impl.cc b/db/db_impl/db_impl.cc
index 261593423..06573ac2e 100644
--- a/db/db_impl/db_impl.cc
+++ b/db/db_impl/db_impl.cc
@@ -659,8 +659,9 @@ Status DBImpl::CloseHelper() {
// We need to release them before the block cache is destroyed. The block
// cache may be destroyed inside versions_.reset(), when column family data
// list is destroyed, so leaving handles in table cache after
- // versions_.reset() may cause issues.
- // Here we clean all unreferenced handles in table cache.
+ // versions_.reset() may cause issues. Here we clean all unreferenced handles
+ // in table cache, and (for certain builds/conditions) assert that no obsolete
+ // files are hanging around unreferenced (leak) in the table/blob file cache.
// Now we assume all user queries have finished, so only version set itself
// can possibly hold the blocks from block cache. After releasing unreferenced
// handles here, only handles held by version set left and inside
@@ -668,6 +669,9 @@ Status DBImpl::CloseHelper() {
// time a handle is released, we erase it from the cache too. By doing that,
// we can guarantee that after versions_.reset(), table cache is empty
// so the cache can be safely destroyed.
+#ifndef NDEBUG
+ TEST_VerifyNoObsoleteFilesCached(/*db_mutex_already_held=*/true);
+#endif // !NDEBUG
table_cache_->EraseUnRefEntries();
for (auto& txn_entry : recovered_transactions_) {
@@ -3227,6 +3231,8 @@ Status DBImpl::MultiGetImpl(
s = Status::Aborted();
break;
}
+ // This could be a long-running operation
+ ROCKSDB_THREAD_YIELD_HOOK();
}
// Post processing (decrement reference counts and record statistics)
diff --git a/db/db_impl/db_impl.h b/db/db_impl/db_impl.h
index 5e4fa310b..ccc0abfa7 100644
--- a/db/db_impl/db_impl.h
+++ b/db/db_impl/db_impl.h
@@ -1241,9 +1241,14 @@ class DBImpl : public DB {
static Status TEST_ValidateOptions(const DBOptions& db_options) {
return ValidateOptions(db_options);
}
-
#endif // NDEBUG
+ // In certain configurations, verify that the table/blob file cache only
+ // contains entries for live files, to check for effective leaks of open
+ // files. This can only be called when purging of obsolete files has
+ // "settled," such as during parts of DB Close().
+ void TEST_VerifyNoObsoleteFilesCached(bool db_mutex_already_held) const;
+
// persist stats to column family "_persistent_stats"
void PersistStats();
diff --git a/db/db_impl/db_impl_debug.cc b/db/db_impl/db_impl_debug.cc
index 790a50d7a..67f5b4aaf 100644
--- a/db/db_impl/db_impl_debug.cc
+++ b/db/db_impl/db_impl_debug.cc
@@ -9,6 +9,7 @@
#ifndef NDEBUG
+#include "db/blob/blob_file_cache.h"
#include "db/column_family.h"
#include "db/db_impl/db_impl.h"
#include "db/error_handler.h"
@@ -328,5 +329,49 @@ size_t DBImpl::TEST_EstimateInMemoryStatsHistorySize() const {
InstrumentedMutexLock l(&const_cast<DBImpl*>(this)->stats_history_mutex_);
return EstimateInMemoryStatsHistorySize();
}
+
+void DBImpl::TEST_VerifyNoObsoleteFilesCached(
+ bool db_mutex_already_held) const {
+ // This check is somewhat expensive and obscure to make a part of every
+ // unit test in every build variety. Thus, we only enable it for ASAN builds.
+ if (!kMustFreeHeapAllocations) {
+ return;
+ }
+
+ std::optional<InstrumentedMutexLock> l;
+ if (db_mutex_already_held) {
+ mutex_.AssertHeld();
+ } else {
+ l.emplace(&mutex_);
+ }
+
+ std::vector<uint64_t> live_files;
+ for (auto cfd : *versions_->GetColumnFamilySet()) {
+ if (cfd->IsDropped()) {
+ continue;
+ }
+ // Sneakily add both SST and blob files to the same list
+ cfd->current()->AddLiveFiles(&live_files, &live_files);
+ }
+ std::sort(live_files.begin(), live_files.end());
+
+ auto fn = [&live_files](const Slice& key, Cache::ObjectPtr, size_t,
+ const Cache::CacheItemHelper* helper) {
+ if (helper != BlobFileCache::GetHelper()) {
+ // Skip non-blob files for now
+ // FIXME: diagnose and fix the leaks of obsolete SST files revealed in
+ // unit tests.
+ return;
+ }
+ // See TableCache and BlobFileCache
+ assert(key.size() == sizeof(uint64_t));
+ uint64_t file_number;
+ GetUnaligned(reinterpret_cast<const uint64_t*>(key.data()), &file_number);
+ // Assert file is in sorted live_files
+ assert(
+ std::binary_search(live_files.begin(), live_files.end(), file_number));
+ };
+ table_cache_->ApplyToAllEntries(fn, {});
+}
} // namespace ROCKSDB_NAMESPACE
#endif // NDEBUG
diff --git a/db/db_iter.cc b/db/db_iter.cc
index e02586377..bf4749eb9 100644
--- a/db/db_iter.cc
+++ b/db/db_iter.cc
@@ -540,6 +540,8 @@ bool DBIter::FindNextUserEntryInternal(bool skipping_saved_key,
} else {
iter_.Next();
}
+ // This could be a long-running operation due to tombstones, etc.
+ ROCKSDB_THREAD_YIELD_HOOK();
} while (iter_.Valid());
valid_ = false;
diff --git a/db/table_cache.cc b/db/table_cache.cc
index 71fc29c32..8a5be75e8 100644
--- a/db/table_cache.cc
+++ b/db/table_cache.cc
@@ -164,6 +164,7 @@ Status TableCache::GetTableReader(
}
Cache::Handle* TableCache::Lookup(Cache* cache, uint64_t file_number) {
+ // NOTE: sharing same Cache with BlobFileCache
Slice key = GetSliceForFileNumber(&file_number);
return cache->Lookup(key);
}
@@ -179,6 +180,7 @@ Status TableCache::FindTable(
size_t max_file_size_for_l0_meta_pin, Temperature file_temperature) {
PERF_TIMER_GUARD_WITH_CLOCK(find_table_nanos, ioptions_.clock);
uint64_t number = file_meta.fd.GetNumber();
+ // NOTE: sharing same Cache with BlobFileCache
Slice key = GetSliceForFileNumber(&number);
*handle = cache_.Lookup(key);
TEST_SYNC_POINT_CALLBACK("TableCache::FindTable:0",
diff --git a/db/version_builder.cc b/db/version_builder.cc
index ed8ab8214..c98f53f42 100644
--- a/db/version_builder.cc
+++ b/db/version_builder.cc
@@ -24,6 +24,7 @@
#include <vector>
#include "cache/cache_reservation_manager.h"
+#include "db/blob/blob_file_cache.h"
#include "db/blob/blob_file_meta.h"
#include "db/dbformat.h"
#include "db/internal_stats.h"
@@ -744,12 +745,9 @@ class VersionBuilder::Rep {
return Status::Corruption("VersionBuilder", oss.str());
}
- // Note: we use C++11 for now but in C++14, this could be done in a more
- // elegant way using generalized lambda capture.
- VersionSet* const vs = version_set_;
- const ImmutableCFOptions* const ioptions = ioptions_;
-
- auto deleter = [vs, ioptions](SharedBlobFileMetaData* shared_meta) {
+ auto deleter = [vs = version_set_, ioptions = ioptions_,
+ bc = cfd_ ? cfd_->blob_file_cache()
+ : nullptr](SharedBlobFileMetaData* shared_meta) {
if (vs) {
assert(ioptions);
assert(!ioptions->cf_paths.empty());
@@ -758,6 +756,9 @@ class VersionBuilder::Rep {
vs->AddObsoleteBlobFile(shared_meta->GetBlobFileNumber(),
ioptions->cf_paths.front().path);
}
+ if (bc) {
+ bc->Evict(shared_meta->GetBlobFileNumber());
+ }
delete shared_meta;
};
@@ -766,7 +767,7 @@ class VersionBuilder::Rep {
blob_file_number, blob_file_addition.GetTotalBlobCount(),
blob_file_addition.GetTotalBlobBytes(),
blob_file_addition.GetChecksumMethod(),
- blob_file_addition.GetChecksumValue(), deleter);
+ blob_file_addition.GetChecksumValue(), std::move(deleter));
mutable_blob_file_metas_.emplace(
blob_file_number, MutableBlobFileMetaData(std::move(shared_meta)));
diff --git a/db/version_set.h b/db/version_set.h
index 9336782b1..024f869e7 100644
--- a/db/version_set.h
+++ b/db/version_set.h
@@ -1514,7 +1514,6 @@ class VersionSet {
void GetLiveFilesMetaData(std::vector<LiveFileMetaData>* metadata);
void AddObsoleteBlobFile(uint64_t blob_file_number, std::string path) {
- // TODO: Erase file from BlobFileCache?
obsolete_blob_files_.emplace_back(blob_file_number, std::move(path));
}
diff --git a/include/rocksdb/version.h b/include/rocksdb/version.h
index 2a19796b8..0afa2cab1 100644
--- a/include/rocksdb/version.h
+++ b/include/rocksdb/version.h
@@ -13,7 +13,7 @@
// minor or major version number planned for release.
#define ROCKSDB_MAJOR 9
#define ROCKSDB_MINOR 7
-#define ROCKSDB_PATCH 3
+#define ROCKSDB_PATCH 4
// Do not use these. We made the mistake of declaring macros starting with
// double underscore. Now we have to live with our choice. We'll deprecate these
diff --git a/port/port.h b/port/port.h
index 13aa56d47..141716e5b 100644
--- a/port/port.h
+++ b/port/port.h
@@ -19,3 +19,19 @@
#elif defined(OS_WIN)
#include "port/win/port_win.h"
#endif
+
+#ifdef OS_LINUX
+// A temporary hook into long-running RocksDB threads to support modifying their
+// priority etc. This should become a public API hook once the requirements
+// are better understood.
+extern "C" void RocksDbThreadYield() __attribute__((__weak__));
+#define ROCKSDB_THREAD_YIELD_HOOK() \
+ { \
+ if (RocksDbThreadYield) { \
+ RocksDbThreadYield(); \
+ } \
+ }
+#else
+#define ROCKSDB_THREAD_YIELD_HOOK() \
+ {}
+#endif

View File

@@ -1,30 +0,0 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 93b884d..b715cb6 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -106,14 +106,9 @@ endif()
include(CMakeDependentOption)
if(MSVC)
- option(WITH_GFLAGS "build with GFlags" OFF)
option(WITH_XPRESS "build with windows built in compression" OFF)
- option(ROCKSDB_SKIP_THIRDPARTY "skip thirdparty.inc" OFF)
-
- if(NOT ROCKSDB_SKIP_THIRDPARTY)
- include(${CMAKE_CURRENT_SOURCE_DIR}/thirdparty.inc)
- endif()
-else()
+endif()
+if(TRUE)
if(CMAKE_SYSTEM_NAME MATCHES "FreeBSD" AND NOT CMAKE_SYSTEM_NAME MATCHES "kFreeBSD")
# FreeBSD has jemalloc as default malloc
# but it does not have all the jemalloc files in include/...
@@ -126,7 +121,7 @@ else()
endif()
endif()
- if(MINGW)
+ if(MSVC OR MINGW)
option(WITH_GFLAGS "build with GFlags" OFF)
else()
option(WITH_GFLAGS "build with GFlags" ON)

View File

@@ -70,7 +70,7 @@ class SociConan(ConanFile):
if self.options.with_postgresql:
self.requires("libpq/15.5")
if self.options.with_boost:
self.requires("boost/1.83.0")
self.requires("boost/1.86.0")
@property
def _minimum_compilers_version(self):
@@ -154,7 +154,7 @@ class SociConan(ConanFile):
self.cpp_info.components["soci_core"].set_property("cmake_target_name", "SOCI::soci_core{}".format(target_suffix))
self.cpp_info.components["soci_core"].libs = ["{}soci_core{}".format(lib_prefix, lib_suffix)]
if self.options.with_boost:
self.cpp_info.components["soci_core"].requires.append("boost::boost")
self.cpp_info.components["soci_core"].requires.append("boost::headers")
# soci_empty
if self.options.empty:

View File

@@ -26,6 +26,7 @@
#include <boost/beast/core/string.hpp>
#include <boost/filesystem.hpp>
#include <fstream>
#include <map>
#include <memory>
#include <mutex>

View File

@@ -22,6 +22,7 @@
#include <xrpl/basics/IntrusivePointer.ipp>
#include <xrpl/basics/TaggedCache.h>
#include <xrpl/beast/core/CurrentThreadName.h>
namespace ripple {
@@ -898,6 +899,8 @@ TaggedCache<
std::lock_guard<std::recursive_mutex> const&)
{
return std::thread([&, this]() {
beast::setCurrentThreadName("sweep-KeyValueCache");
int cacheRemovals = 0;
int mapRemovals = 0;
@@ -987,6 +990,8 @@ TaggedCache<
std::lock_guard<std::recursive_mutex> const&)
{
return std::thread([&, this]() {
beast::setCurrentThreadName("sweep-KeyOnlyCache");
int cacheRemovals = 0;
int mapRemovals = 0;

View File

@@ -3257,7 +3257,6 @@ operator==(aged_unordered_container<
{
if (size() != other.size())
return false;
using EqRng = std::pair<const_iterator, const_iterator>;
for (auto iter(cbegin()), last(cend()); iter != last;)
{
auto const& k(extract(*iter));

View File

@@ -32,6 +32,7 @@
// If you add an amendment here, then do not forget to increment `numFeatures`
// in include/xrpl/protocol/Feature.h.
XRPL_FIX (MPTDeliveredAmount, Supported::no, VoteBehavior::DefaultNo)
XRPL_FIX (AMMClawbackRounding, Supported::no, VoteBehavior::DefaultNo)
XRPL_FEATURE(TokenEscrow, Supported::yes, VoteBehavior::DefaultNo)
XRPL_FIX (EnforceNFTokenTrustlineV2, Supported::yes, VoteBehavior::DefaultNo)

View File

@@ -509,6 +509,7 @@ TRANSACTION(ttVAULT_WITHDRAW, 69, VaultWithdraw, Delegation::delegatable, ({
{sfVaultID, soeREQUIRED},
{sfAmount, soeREQUIRED, soeMPTSupported},
{sfDestination, soeOPTIONAL},
{sfDestinationTag, soeOPTIONAL},
}))
/** This transaction claws back tokens from a vault. */

View File

@@ -28,6 +28,7 @@
#include <cerrno>
#include <cstddef>
#include <fstream>
#include <ios>
#include <iterator>
#include <optional>
@@ -55,7 +56,7 @@ getFileContents(
return {};
}
ifstream fileStream(fullPath, std::ios::in);
std::ifstream fileStream(fullPath.string(), std::ios::in);
if (!fileStream)
{
@@ -85,7 +86,8 @@ writeFileContents(
using namespace boost::filesystem;
using namespace boost::system::errc;
ofstream fileStream(destPath, std::ios::out | std::ios::trunc);
std::ofstream fileStream(
destPath.string(), std::ios::out | std::ios::trunc);
if (!fileStream)
{

View File

@@ -107,8 +107,9 @@ sliceToHex(Slice const& slice)
}
for (int i = 0; i < slice.size(); ++i)
{
s += "0123456789ABCDEF"[((slice[i] & 0xf0) >> 4)];
s += "0123456789ABCDEF"[((slice[i] & 0x0f) >> 0)];
constexpr char hex[] = "0123456789ABCDEF";
s += hex[((slice[i] & 0xf0) >> 4)];
s += hex[((slice[i] & 0x0f) >> 0)];
}
return s;
}

View File

@@ -671,12 +671,12 @@ isMemoOkay(STObject const& st, std::string& reason)
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz");
for (char c : symbols)
for (unsigned char c : symbols)
a[c] = 1;
return a;
}();
for (auto c : *optData)
for (unsigned char c : *optData)
{
if (!allowedSymbols[c])
{

View File

@@ -544,7 +544,7 @@ b58_to_b256_be(std::string_view input, std::span<std::uint8_t> out)
XRPL_ASSERT(
num_b_58_10_coeffs <= b_58_10_coeff.size(),
"ripple::b58_fast::detail::b58_to_b256_be : maximum coeff");
for (auto c : input.substr(0, partial_coeff_len))
for (unsigned char c : input.substr(0, partial_coeff_len))
{
auto cur_val = ::ripple::alphabetReverse[c];
if (cur_val < 0)
@@ -558,7 +558,7 @@ b58_to_b256_be(std::string_view input, std::span<std::uint8_t> out)
{
for (int j = 0; j < num_full_coeffs; ++j)
{
auto c = input[partial_coeff_len + j * 10 + i];
unsigned char c = input[partial_coeff_len + j * 10 + i];
auto cur_val = ::ripple::alphabetReverse[c];
if (cur_val < 0)
{

View File

@@ -229,7 +229,6 @@ class RCLValidations_test : public beast::unit_test::suite
// support for a ledger hash which is already in the trie.
using Seq = RCLValidatedLedger::Seq;
using ID = RCLValidatedLedger::ID;
// Max known ancestors for each ledger
Seq const maxAncestors = 256;

View File

@@ -234,6 +234,28 @@ class Vault_test : public beast::unit_test::suite
env(tx, ter{tecNO_PERMISSION});
}
{
testcase(prefix + " fail to withdraw to zero destination");
auto tx = vault.withdraw(
{.depositor = depositor,
.id = keylet.key,
.amount = asset(1000)});
tx[sfDestination] = "0";
env(tx, ter(temMALFORMED));
}
{
testcase(
prefix +
" fail to withdraw with tag but without destination");
auto tx = vault.withdraw(
{.depositor = depositor,
.id = keylet.key,
.amount = asset(1000)});
tx[sfDestinationTag] = "0";
env(tx, ter(temMALFORMED));
}
if (!asset.raw().native())
{
testcase(
@@ -1335,6 +1357,7 @@ class Vault_test : public beast::unit_test::suite
struct CaseArgs
{
bool enableClawback = true;
bool requireAuth = true;
};
auto testCase = [this](
@@ -1356,16 +1379,20 @@ class Vault_test : public beast::unit_test::suite
Vault vault{env};
MPTTester mptt{env, issuer, mptInitNoFund};
auto const none = LedgerSpecificFlags(0);
mptt.create(
{.flags = tfMPTCanTransfer | tfMPTCanLock |
(args.enableClawback ? lsfMPTCanClawback
: LedgerSpecificFlags(0)) |
tfMPTRequireAuth});
(args.enableClawback ? tfMPTCanClawback : none) |
(args.requireAuth ? tfMPTRequireAuth : none)});
PrettyAsset asset = mptt.issuanceID();
mptt.authorize({.account = owner});
mptt.authorize({.account = issuer, .holder = owner});
mptt.authorize({.account = depositor});
mptt.authorize({.account = issuer, .holder = depositor});
if (args.requireAuth)
{
mptt.authorize({.account = issuer, .holder = owner});
mptt.authorize({.account = issuer, .holder = depositor});
}
env(pay(issuer, depositor, asset(1000)));
env.close();
@@ -1514,6 +1541,100 @@ class Vault_test : public beast::unit_test::suite
}
});
testCase(
[this](
Env& env,
Account const& issuer,
Account const& owner,
Account const& depositor,
PrettyAsset const& asset,
Vault& vault,
MPTTester& mptt) {
testcase(
"MPT 3rd party without MPToken cannot be withdrawal "
"destination");
auto [tx, keylet] =
vault.create({.owner = owner, .asset = asset});
env(tx);
env.close();
tx = vault.deposit(
{.depositor = depositor,
.id = keylet.key,
.amount = asset(100)});
env(tx);
env.close();
{
// Set destination to 3rd party without MPToken
Account charlie{"charlie"};
env.fund(XRP(1000), charlie);
env.close();
tx = vault.withdraw(
{.depositor = depositor,
.id = keylet.key,
.amount = asset(100)});
tx[sfDestination] = charlie.human();
env(tx, ter(tecNO_AUTH));
}
},
{.requireAuth = false});
testCase(
[this](
Env& env,
Account const& issuer,
Account const& owner,
Account const& depositor,
PrettyAsset const& asset,
Vault& vault,
MPTTester& mptt) {
testcase("MPT depositor without MPToken cannot withdraw");
auto [tx, keylet] =
vault.create({.owner = owner, .asset = asset});
env(tx);
env.close();
tx = vault.deposit(
{.depositor = depositor,
.id = keylet.key,
.amount = asset(1000)});
env(tx);
env.close();
{
// Remove depositor's MPToken and withdraw will fail
mptt.authorize(
{.account = depositor, .flags = tfMPTUnauthorize});
env.close();
auto const mptoken =
env.le(keylet::mptoken(mptt.issuanceID(), depositor));
BEAST_EXPECT(mptoken == nullptr);
tx = vault.withdraw(
{.depositor = depositor,
.id = keylet.key,
.amount = asset(100)});
env(tx, ter(tecNO_AUTH));
}
{
// Restore depositor's MPToken and withdraw will succeed
mptt.authorize({.account = depositor});
env.close();
tx = vault.withdraw(
{.depositor = depositor,
.id = keylet.key,
.amount = asset(100)});
env(tx);
}
},
{.requireAuth = false});
testCase([this](
Env& env,
Account const& issuer,
@@ -1803,6 +1924,7 @@ class Vault_test : public beast::unit_test::suite
PrettyAsset const asset = issuer["IOU"];
env.trust(asset(1000), owner);
env.trust(asset(1000), charlie);
env(pay(issuer, owner, asset(200)));
env(rate(issuer, 1.25));
env.close();
@@ -2118,6 +2240,79 @@ class Vault_test : public beast::unit_test::suite
env.close();
});
testCase([&, this](
Env& env,
Account const& owner,
Account const& issuer,
Account const& charlie,
auto,
Vault& vault,
PrettyAsset const& asset,
auto&&...) {
testcase("IOU no trust line to 3rd party");
auto [tx, keylet] = vault.create({.owner = owner, .asset = asset});
env(tx);
env.close();
env(vault.deposit(
{.depositor = owner, .id = keylet.key, .amount = asset(100)}));
env.close();
Account const erin{"erin"};
env.fund(XRP(1000), erin);
env.close();
// Withdraw to 3rd party without trust line
auto const tx1 = [&](ripple::Keylet keylet) {
auto tx = vault.withdraw(
{.depositor = owner,
.id = keylet.key,
.amount = asset(10)});
tx[sfDestination] = erin.human();
return tx;
}(keylet);
env(tx1, ter{tecNO_LINE});
});
testCase([&, this](
Env& env,
Account const& owner,
Account const& issuer,
Account const& charlie,
auto,
Vault& vault,
PrettyAsset const& asset,
auto&&...) {
testcase("IOU no trust line to depositor");
auto [tx, keylet] = vault.create({.owner = owner, .asset = asset});
env(tx);
env.close();
// reset limit, so deposit of all funds will delete the trust line
env.trust(asset(0), owner);
env.close();
env(vault.deposit(
{.depositor = owner, .id = keylet.key, .amount = asset(200)}));
env.close();
auto trustline =
env.le(keylet::line(owner, asset.raw().get<Issue>()));
BEAST_EXPECT(trustline == nullptr);
// Withdraw without trust line, will succeed
auto const tx1 = [&](ripple::Keylet keylet) {
auto tx = vault.withdraw(
{.depositor = owner,
.id = keylet.key,
.amount = asset(10)});
return tx;
}(keylet);
env(tx1);
});
testCase([&, this](
Env& env,
Account const& owner,

View File

@@ -703,10 +703,6 @@ aged_associative_container_test_base::checkContentsRefRef(
Values const& v)
{
using Cont = typename std::remove_reference<C>::type;
using Traits = TestTraits<
Cont::is_unordered::value,
Cont::is_multi::value,
Cont::is_map::value>;
using size_type = typename Cont::size_type;
BEAST_EXPECT(c.size() == v.size());
@@ -761,10 +757,6 @@ typename std::enable_if<!IsUnordered>::type
aged_associative_container_test_base::testConstructEmpty()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Key = typename Traits::Key;
using T = typename Traits::T;
using Clock = typename Traits::Clock;
using Comp = typename Traits::Comp;
using Alloc = typename Traits::Alloc;
using MyComp = typename Traits::MyComp;
@@ -802,10 +794,6 @@ typename std::enable_if<IsUnordered>::type
aged_associative_container_test_base::testConstructEmpty()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Key = typename Traits::Key;
using T = typename Traits::T;
using Clock = typename Traits::Clock;
using Hash = typename Traits::Hash;
using Equal = typename Traits::Equal;
using Alloc = typename Traits::Alloc;
@@ -870,10 +858,6 @@ typename std::enable_if<!IsUnordered>::type
aged_associative_container_test_base::testConstructRange()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Key = typename Traits::Key;
using T = typename Traits::T;
using Clock = typename Traits::Clock;
using Comp = typename Traits::Comp;
using Alloc = typename Traits::Alloc;
using MyComp = typename Traits::MyComp;
@@ -925,10 +909,6 @@ typename std::enable_if<IsUnordered>::type
aged_associative_container_test_base::testConstructRange()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Key = typename Traits::Key;
using T = typename Traits::T;
using Clock = typename Traits::Clock;
using Hash = typename Traits::Hash;
using Equal = typename Traits::Equal;
using Alloc = typename Traits::Alloc;
@@ -996,14 +976,6 @@ typename std::enable_if<!IsUnordered>::type
aged_associative_container_test_base::testConstructInitList()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Key = typename Traits::Key;
using T = typename Traits::T;
using Clock = typename Traits::Clock;
using Comp = typename Traits::Comp;
using Alloc = typename Traits::Alloc;
using MyComp = typename Traits::MyComp;
using MyAlloc = typename Traits::MyAlloc;
typename Traits::ManualClock clock;
// testcase (Traits::name() + " init-list");
@@ -1020,16 +992,6 @@ typename std::enable_if<IsUnordered>::type
aged_associative_container_test_base::testConstructInitList()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Key = typename Traits::Key;
using T = typename Traits::T;
using Clock = typename Traits::Clock;
using Hash = typename Traits::Hash;
using Equal = typename Traits::Equal;
using Alloc = typename Traits::Alloc;
using MyHash = typename Traits::MyHash;
using MyEqual = typename Traits::MyEqual;
using MyAlloc = typename Traits::MyAlloc;
typename Traits::ManualClock clock;
// testcase (Traits::name() + " init-list");
@@ -1050,7 +1012,6 @@ void
aged_associative_container_test_base::testCopyMove()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Alloc = typename Traits::Alloc;
typename Traits::ManualClock clock;
auto const v(Traits::values());
@@ -1121,8 +1082,6 @@ void
aged_associative_container_test_base::testIterator()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Alloc = typename Traits::Alloc;
typename Traits::ManualClock clock;
auto const v(Traits::values());
@@ -1179,8 +1138,6 @@ typename std::enable_if<!IsUnordered>::type
aged_associative_container_test_base::testReverseIterator()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
using Alloc = typename Traits::Alloc;
typename Traits::ManualClock clock;
auto const v(Traits::values());
@@ -1190,7 +1147,6 @@ aged_associative_container_test_base::testReverseIterator()
typename Traits::template Cont<> c{clock};
using iterator = decltype(c.begin());
using const_iterator = decltype(c.cbegin());
using reverse_iterator = decltype(c.rbegin());
using const_reverse_iterator = decltype(c.crbegin());
@@ -1394,7 +1350,6 @@ void
aged_associative_container_test_base::testChronological()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
typename Traits::ManualClock clock;
auto const v(Traits::values());
@@ -1760,7 +1715,6 @@ typename std::enable_if<!IsUnordered>::type
aged_associative_container_test_base::testCompare()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
using Value = typename Traits::Value;
typename Traits::ManualClock clock;
auto const v(Traits::values());
@@ -1832,8 +1786,6 @@ template <bool IsUnordered, bool IsMulti, bool IsMap>
void
aged_associative_container_test_base::testMaybeUnorderedMultiMap()
{
using Traits = TestTraits<IsUnordered, IsMulti, IsMap>;
testConstructEmpty<IsUnordered, IsMulti, IsMap>();
testConstructRange<IsUnordered, IsMulti, IsMap>();
testConstructInitList<IsUnordered, IsMulti, IsMap>();

View File

@@ -313,7 +313,6 @@ class LedgerTrie_test : public beast::unit_test::suite
testSupport()
{
using namespace csf;
using Seq = Ledger::Seq;
LedgerTrie<Ledger> t;
LedgerHistoryHelper h;
@@ -596,7 +595,6 @@ class LedgerTrie_test : public beast::unit_test::suite
testRootRelated()
{
using namespace csf;
using Seq = Ledger::Seq;
// Since the root is a special node that breaks the no-single child
// invariant, do some tests that exercise it.

View File

@@ -805,7 +805,6 @@ class Validations_test : public beast::unit_test::suite
Ledger ledgerACD = h["acd"];
using Seq = Ledger::Seq;
using ID = Ledger::ID;
auto pref = [](Ledger ledger) {
return std::make_pair(ledger.seq(), ledger.id());

View File

@@ -33,6 +33,7 @@
#include <boost/asio.hpp>
#include <boost/asio/ip/tcp.hpp>
#include <boost/asio/ssl/stream.hpp>
#include <boost/beast/core/flat_buffer.hpp>
#include <boost/beast/http.hpp>
#include <boost/beast/ssl.hpp>
#include <boost/beast/version.hpp>
@@ -220,9 +221,8 @@ public:
getList_ = [blob = blob, sig, manifest, version](int interval) {
// Build the contents of a version 1 format UNL file
std::stringstream l;
l << "{\"blob\":\"" << blob << "\""
<< ",\"signature\":\"" << sig << "\""
<< ",\"manifest\":\"" << manifest << "\""
l << "{\"blob\":\"" << blob << "\"" << ",\"signature\":\"" << sig
<< "\"" << ",\"manifest\":\"" << manifest << "\""
<< ",\"refresh_interval\": " << interval
<< ",\"version\":" << version << '}';
return l.str();
@@ -257,15 +257,14 @@ public:
std::stringstream l;
for (auto const& info : blobInfo)
{
l << "{\"blob\":\"" << info.blob << "\""
<< ",\"signature\":\"" << info.signature << "\"},";
l << "{\"blob\":\"" << info.blob << "\"" << ",\"signature\":\""
<< info.signature << "\"},";
}
std::string blobs = l.str();
blobs.pop_back();
l.str(std::string());
l << "{\"blobs_v2\": [ " << blobs << "],\"manifest\":\"" << manifest
<< "\""
<< ",\"refresh_interval\": " << interval
<< "\"" << ",\"refresh_interval\": " << interval
<< ",\"version\":" << (version + 1) << '}';
return l.str();
};

View File

@@ -21,6 +21,7 @@
#include <test/jtx/WSClient.h>
#include <xrpl/beast/unit_test.h>
#include <xrpl/beast/unit_test/suite.h>
#include <xrpl/protocol/jss.h>
namespace ripple {
@@ -329,12 +330,95 @@ class DeliveredAmount_test : public beast::unit_test::suite
}
}
void
testMPTDeliveredAmountRPC(FeatureBitset features)
{
testcase("MPT DeliveredAmount");
using namespace jtx;
Account const alice("alice");
Account const carol("carol");
Account const bob("bob");
Env env{*this, features};
MPTTester mptAlice(
env, alice, {.holders = {bob, carol}, .close = false});
mptAlice.create(
{.transferFee = 25000,
.ownerCount = 1,
.holderCount = 0,
.flags = tfMPTCanTransfer});
auto const MPT = mptAlice["MPT"];
mptAlice.authorize({.account = bob});
mptAlice.authorize({.account = carol});
// issuer to holder
mptAlice.pay(alice, bob, 10000);
// holder to holder
env(pay(bob, carol, mptAlice.mpt(1000)), txflags(tfPartialPayment));
env.close();
// Get the hash for the most recent transaction.
std::string txHash{
env.tx()->getJson(JsonOptions::none)[jss::hash].asString()};
Json::Value meta = env.rpc("tx", txHash)[jss::result][jss::meta];
if (features[fixMPTDeliveredAmount])
{
BEAST_EXPECT(
meta[sfDeliveredAmount.jsonName] ==
STAmount{MPT(800)}.getJson(JsonOptions::none));
BEAST_EXPECT(
meta[jss::delivered_amount] ==
STAmount{MPT(800)}.getJson(JsonOptions::none));
}
else
{
BEAST_EXPECT(!meta.isMember(sfDeliveredAmount.jsonName));
BEAST_EXPECT(
meta[jss::delivered_amount] = Json::Value("unavailable"));
}
env(pay(bob, carol, MPT(1000)),
sendmax(MPT(1200)),
txflags(tfPartialPayment));
env.close();
txHash = env.tx()->getJson(JsonOptions::none)[jss::hash].asString();
meta = env.rpc("tx", txHash)[jss::result][jss::meta];
if (features[fixMPTDeliveredAmount])
{
BEAST_EXPECT(
meta[sfDeliveredAmount.jsonName] ==
STAmount{MPT(960)}.getJson(JsonOptions::none));
BEAST_EXPECT(
meta[jss::delivered_amount] ==
STAmount{MPT(960)}.getJson(JsonOptions::none));
}
else
{
BEAST_EXPECT(!meta.isMember(sfDeliveredAmount.jsonName));
BEAST_EXPECT(
meta[jss::delivered_amount] = Json::Value("unavailable"));
}
}
public:
void
run() override
{
using namespace test::jtx;
FeatureBitset const all{testable_amendments()};
testTxDeliveredAmountRPC();
testAccountDeliveredAmountSubscribe();
testMPTDeliveredAmountRPC(all - fixMPTDeliveredAmount);
testMPTDeliveredAmountRPC(all);
}
};

View File

@@ -681,7 +681,7 @@ class ServerStatus_test : public beast::unit_test::suite,
resp["Upgrade"] == "websocket");
BEAST_EXPECT(
resp.find("Connection") != resp.end() &&
resp["Connection"] == "upgrade");
resp["Connection"] == "Upgrade");
}
void

View File

@@ -26,6 +26,8 @@ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#include <boost/filesystem.hpp>
#include <fstream>
namespace ripple {
namespace test {
namespace detail {

View File

@@ -79,7 +79,7 @@
#include <chrono>
#include <condition_variable>
#include <cstring>
#include <iostream>
#include <fstream>
#include <limits>
#include <mutex>
#include <optional>

View File

@@ -39,7 +39,7 @@
#include <google/protobuf/stubs/common.h>
#include <cstdlib>
#include <iostream>
#include <fstream>
#include <stdexcept>
#include <utility>

View File

@@ -315,14 +315,14 @@ escrowCreatePreclaimHelper<MPTIssue>(
// authorized
auto const& mptIssue = amount.get<MPTIssue>();
if (auto const ter =
requireAuth(ctx.view, mptIssue, account, MPTAuthType::WeakAuth);
requireAuth(ctx.view, mptIssue, account, AuthType::WeakAuth);
ter != tesSUCCESS)
return ter;
// If the issuer has requireAuth set, check if the destination is
// authorized
if (auto const ter =
requireAuth(ctx.view, mptIssue, dest, MPTAuthType::WeakAuth);
requireAuth(ctx.view, mptIssue, dest, AuthType::WeakAuth);
ter != tesSUCCESS)
return ter;
@@ -746,7 +746,7 @@ escrowFinishPreclaimHelper<MPTIssue>(
// authorized
auto const& mptIssue = amount.get<MPTIssue>();
if (auto const ter =
requireAuth(ctx.view, mptIssue, dest, MPTAuthType::WeakAuth);
requireAuth(ctx.view, mptIssue, dest, AuthType::WeakAuth);
ter != tesSUCCESS)
return ter;
@@ -1259,7 +1259,7 @@ escrowCancelPreclaimHelper<MPTIssue>(
// authorized
auto const& mptIssue = amount.get<MPTIssue>();
if (auto const ter =
requireAuth(ctx.view, mptIssue, account, MPTAuthType::WeakAuth);
requireAuth(ctx.view, mptIssue, account, AuthType::WeakAuth);
ter != tesSUCCESS)
return ter;

View File

@@ -580,7 +580,16 @@ Payment::doApply()
auto res = accountSend(
pv, account_, dstAccountID, amountDeliver, ctx_.journal);
if (res == tesSUCCESS)
{
pv.apply(ctx_.rawView());
// If the actual amount delivered is different from the original
// amount due to partial payment or transfer fee, we need to update
// DelieveredAmount using the actual delivered amount
if (view().rules().enabled(fixMPTDeliveredAmount) &&
amountDeliver != dstAmount)
ctx_.deliver(amountDeliver);
}
else if (res == tecINSUFFICIENT_FUNDS || res == tecPATH_DRY)
res = tecPATH_PARTIAL;

View File

@@ -52,9 +52,19 @@ VaultWithdraw::preflight(PreflightContext const& ctx)
return temBAD_AMOUNT;
if (auto const destination = ctx.tx[~sfDestination];
destination && *destination == beast::zero)
destination.has_value())
{
JLOG(ctx.j.debug()) << "VaultWithdraw: zero/empty destination account.";
if (*destination == beast::zero)
{
JLOG(ctx.j.debug())
<< "VaultWithdraw: zero/empty destination account.";
return temMALFORMED;
}
}
else if (ctx.tx.isFieldPresent(sfDestinationTag))
{
JLOG(ctx.j.debug()) << "VaultWithdraw: sfDestinationTag is set but "
"sfDestination is not";
return temMALFORMED;
}
@@ -123,33 +133,39 @@ VaultWithdraw::preclaim(PreclaimContext const& ctx)
// Withdrawal to a 3rd party destination account is essentially a transfer,
// via shares in the vault. Enforce all the usual asset transfer checks.
AuthType authType = AuthType::Legacy;
if (account != dstAcct)
{
auto const sleDst = ctx.view.read(keylet::account(dstAcct));
if (sleDst == nullptr)
return tecNO_DST;
if (sleDst->getFlags() & lsfRequireDestTag)
if (sleDst->isFlag(lsfRequireDestTag) &&
!ctx.tx.isFieldPresent(sfDestinationTag))
return tecDST_TAG_NEEDED; // Cannot send without a tag
if (sleDst->getFlags() & lsfDepositAuth)
if (sleDst->isFlag(lsfDepositAuth))
{
if (!ctx.view.exists(keylet::depositPreauth(dstAcct, account)))
return tecNO_PERMISSION;
}
// The destination account must have consented to receive the asset by
// creating a RippleState or MPToken
authType = AuthType::StrongAuth;
}
// Destination MPToken must exist (if asset is an MPT)
if (auto const ter = requireAuth(ctx.view, vaultAsset, dstAcct);
// Destination MPToken (for an MPT) or trust line (for an IOU) must exist
// if not sending to Account.
if (auto const ter = requireAuth(ctx.view, vaultAsset, dstAcct, authType);
!isTesSuccess(ter))
return ter;
// Cannot withdraw from a Vault an Asset frozen for the destination account
if (isFrozen(ctx.view, dstAcct, vaultAsset))
return vaultAsset.holds<Issue>() ? tecFROZEN : tecLOCKED;
if (auto const ret = checkFrozen(ctx.view, dstAcct, vaultAsset))
return ret;
if (isFrozen(ctx.view, account, vaultShare))
return tecLOCKED;
if (auto const ret = checkFrozen(ctx.view, account, vaultShare))
return ret;
return tesSUCCESS;
}

View File

@@ -175,6 +175,29 @@ isFrozen(
asset.value());
}
[[nodiscard]] inline TER
checkFrozen(ReadView const& view, AccountID const& account, Issue const& issue)
{
return isFrozen(view, account, issue) ? (TER)tecFROZEN : (TER)tesSUCCESS;
}
[[nodiscard]] inline TER
checkFrozen(
ReadView const& view,
AccountID const& account,
MPTIssue const& mptIssue)
{
return isFrozen(view, account, mptIssue) ? (TER)tecLOCKED : (TER)tesSUCCESS;
}
[[nodiscard]] inline TER
checkFrozen(ReadView const& view, AccountID const& account, Asset const& asset)
{
return std::visit(
[&](auto const& issue) { return checkFrozen(view, account, issue); },
asset.value());
}
[[nodiscard]] bool
isAnyFrozen(
ReadView const& view,
@@ -725,19 +748,40 @@ transferXRP(
STAmount const& amount,
beast::Journal j);
/* Check if MPToken exists:
* - StrongAuth - before checking lsfMPTRequireAuth is set
* - WeakAuth - after checking if lsfMPTRequireAuth is set
/* Check if MPToken (for MPT) or trust line (for IOU) exists:
* - StrongAuth - before checking if authorization is required
* - WeakAuth
* for MPT - after checking lsfMPTRequireAuth flag
* for IOU - do not check if trust line exists
* - Legacy
* for MPT - before checking lsfMPTRequireAuth flag i.e. same as StrongAuth
* for IOU - do not check if trust line exists i.e. same as WeakAuth
*/
enum class MPTAuthType : bool { StrongAuth = true, WeakAuth = false };
enum class AuthType { StrongAuth, WeakAuth, Legacy };
/** Check if the account lacks required authorization.
*
* Return tecNO_AUTH or tecNO_LINE if it does
* and tesSUCCESS otherwise.
* Return tecNO_AUTH or tecNO_LINE if it does
* and tesSUCCESS otherwise.
*
* If StrongAuth then return tecNO_LINE if the RippleState doesn't exist. Return
* tecNO_AUTH if lsfRequireAuth is set on the issuer's AccountRoot, and the
* RippleState does exist, and the RippleState is not authorized.
*
* If WeakAuth then return tecNO_AUTH if lsfRequireAuth is set, and the
* RippleState exists, and is not authorized. Return tecNO_LINE if
* lsfRequireAuth is set and the RippleState doesn't exist. Consequently, if
* WeakAuth and lsfRequireAuth is *not* set, this function will return
* tesSUCCESS even if RippleState does *not* exist.
*
* The default "Legacy" auth type is equivalent to WeakAuth.
*/
[[nodiscard]] TER
requireAuth(ReadView const& view, Issue const& issue, AccountID const& account);
requireAuth(
ReadView const& view,
Issue const& issue,
AccountID const& account,
AuthType authType = AuthType::Legacy);
/** Check if the account lacks required authorization.
*
@@ -751,32 +795,33 @@ requireAuth(ReadView const& view, Issue const& issue, AccountID const& account);
* purely defensive, as we currently do not allow such vaults to be created.
*
* If StrongAuth then return tecNO_AUTH if MPToken doesn't exist or
* lsfMPTRequireAuth is set and MPToken is not authorized. If WeakAuth then
* return tecNO_AUTH if lsfMPTRequireAuth is set and MPToken doesn't exist or is
* not authorized (explicitly or via credentials, if DomainID is set in
* MPTokenIssuance). Consequently, if WeakAuth and lsfMPTRequireAuth is *not*
* set, this function will return true even if MPToken does *not* exist.
* lsfMPTRequireAuth is set and MPToken is not authorized.
*
* If WeakAuth then return tecNO_AUTH if lsfMPTRequireAuth is set and MPToken
* doesn't exist or is not authorized (explicitly or via credentials, if
* DomainID is set in MPTokenIssuance). Consequently, if WeakAuth and
* lsfMPTRequireAuth is *not* set, this function will return true even if
* MPToken does *not* exist.
*
* The default "Legacy" auth type is equivalent to StrongAuth.
*/
[[nodiscard]] TER
requireAuth(
ReadView const& view,
MPTIssue const& mptIssue,
AccountID const& account,
MPTAuthType authType = MPTAuthType::StrongAuth,
AuthType authType = AuthType::Legacy,
int depth = 0);
[[nodiscard]] TER inline requireAuth(
ReadView const& view,
Asset const& asset,
AccountID const& account,
MPTAuthType authType = MPTAuthType::StrongAuth)
AuthType authType = AuthType::Legacy)
{
return std::visit(
[&]<ValidIssueType TIss>(TIss const& issue_) {
if constexpr (std::is_same_v<TIss, Issue>)
return requireAuth(view, issue_, account);
else
return requireAuth(view, issue_, account, authType);
return requireAuth(view, issue_, account, authType);
},
asset.value());
}

View File

@@ -505,8 +505,8 @@ accountHolds(
if (zeroIfUnauthorized == ahZERO_IF_UNAUTHORIZED &&
view.rules().enabled(featureSingleAssetVault))
{
if (auto const err = requireAuth(
view, mptIssue, account, MPTAuthType::StrongAuth);
if (auto const err =
requireAuth(view, mptIssue, account, AuthType::StrongAuth);
!isTesSuccess(err))
amount.clear(mptIssue);
}
@@ -2298,15 +2298,27 @@ transferXRP(
}
TER
requireAuth(ReadView const& view, Issue const& issue, AccountID const& account)
requireAuth(
ReadView const& view,
Issue const& issue,
AccountID const& account,
AuthType authType)
{
if (isXRP(issue) || issue.account == account)
return tesSUCCESS;
auto const trustLine =
view.read(keylet::line(account, issue.account, issue.currency));
// If account has no line, and this is a strong check, fail
if (!trustLine && authType == AuthType::StrongAuth)
return tecNO_LINE;
// If this is a weak or legacy check, or if the account has a line, fail if
// auth is required and not set on the line
if (auto const issuerAccount = view.read(keylet::account(issue.account));
issuerAccount && (*issuerAccount)[sfFlags] & lsfRequireAuth)
{
if (auto const trustLine =
view.read(keylet::line(account, issue.account, issue.currency)))
if (trustLine)
return ((*trustLine)[sfFlags] &
((account > issue.account) ? lsfLowAuth : lsfHighAuth))
? tesSUCCESS
@@ -2322,7 +2334,7 @@ requireAuth(
ReadView const& view,
MPTIssue const& mptIssue,
AccountID const& account,
MPTAuthType authType,
AuthType authType,
int depth)
{
auto const mptID = keylet::mptIssuance(mptIssue.getMptID());
@@ -2357,7 +2369,7 @@ requireAuth(
if (auto const err = std::visit(
[&]<ValidIssueType TIss>(TIss const& issue) {
if constexpr (std::is_same_v<TIss, Issue>)
return requireAuth(view, issue, account);
return requireAuth(view, issue, account, authType);
else
return requireAuth(
view, issue, account, authType, depth + 1);
@@ -2372,7 +2384,8 @@ requireAuth(
auto const sleToken = view.read(mptokenID);
// if account has no MPToken, fail
if (!sleToken && authType == MPTAuthType::StrongAuth)
if (!sleToken &&
(authType == AuthType::StrongAuth || authType == AuthType::Legacy))
return tecNO_AUTH;
// Note, this check is not amendment-gated because DomainID will be always

View File

@@ -44,7 +44,6 @@ doLogLevel(RPC::JsonContext& context)
Logs::toString(Logs::fromSeverity(context.app.logs().threshold()));
std::vector<std::pair<std::string, std::string>> logTable(
context.app.logs().partition_severities());
using stringPair = std::map<std::string, std::string>::value_type;
for (auto const& [k, v] : logTable)
lev[k] = v;