mirror of
https://github.com/XRPLF/clio.git
synced 2025-11-05 04:15:51 +00:00
Compare commits
109 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
764601e7fc | ||
|
|
28bd58f1d1 | ||
|
|
b3da9adb03 | ||
|
|
670eaaa51d | ||
|
|
7dee6413ae | ||
|
|
27b37ade7b | ||
|
|
76ec9ccb0b | ||
|
|
5cb3908e4f | ||
|
|
cdb5882688 | ||
|
|
8cbbf6689d | ||
|
|
34ed4a1eb0 | ||
|
|
683e2ba54b | ||
|
|
e2f078e0af | ||
|
|
27ff532473 | ||
|
|
d5195e3e52 | ||
|
|
0595076c6e | ||
|
|
e835ad8b57 | ||
|
|
0a7ce8c5be | ||
|
|
ca2a3ccee9 | ||
|
|
a4f677be2c | ||
|
|
243e174f1e | ||
|
|
a369381594 | ||
|
|
2d52966806 | ||
|
|
02ccbf850f | ||
|
|
b87b32db86 | ||
|
|
d02da5d28a | ||
|
|
00aff5cfe0 | ||
|
|
59282f764d | ||
|
|
0cdfad3731 | ||
|
|
0b0794d9bf | ||
|
|
aa910ba889 | ||
|
|
dbfabd4102 | ||
|
|
ce41f5ba07 | ||
|
|
24c77752cb | ||
|
|
81f4386770 | ||
|
|
a60d01d272 | ||
|
|
59d27db5ab | ||
|
|
e75aa1add1 | ||
|
|
68f832a832 | ||
|
|
5e43c3b099 | ||
|
|
9af36e5235 | ||
|
|
90103431ea | ||
|
|
ed27c41641 | ||
|
|
3daa735b31 | ||
|
|
5d2c2a8bfc | ||
|
|
f2384a47f1 | ||
|
|
c774a40a59 | ||
|
|
477d8e1f5f | ||
|
|
7fd51d8a60 | ||
|
|
b2a1b34ae6 | ||
|
|
5860a90b94 | ||
|
|
f2f342f7c2 | ||
|
|
cc1da5afa9 | ||
|
|
de055934e1 | ||
|
|
0c201ed952 | ||
|
|
9b83eb4033 | ||
|
|
fadd60e68a | ||
|
|
eef85b520a | ||
|
|
8b99013ff8 | ||
|
|
af0a8af287 | ||
|
|
de43be887e | ||
|
|
b03cd63efd | ||
|
|
76cf2fc57e | ||
|
|
f7f619fd1b | ||
|
|
4a0267fbac | ||
|
|
b51ed8dd98 | ||
|
|
cb01c9f596 | ||
|
|
7c55529c90 | ||
|
|
424af5dfe0 | ||
|
|
bebc683ba2 | ||
|
|
1e0a2f5162 | ||
|
|
593d7298b1 | ||
|
|
79e6f07863 | ||
|
|
1cb09a01f6 | ||
|
|
9c92a2b51b | ||
|
|
99580a2602 | ||
|
|
ade6289de2 | ||
|
|
3916635037 | ||
|
|
f195a30a76 | ||
|
|
cc610a0964 | ||
|
|
c5012eb854 | ||
|
|
01ae4c4312 | ||
|
|
88d27a7265 | ||
|
|
e01d7d12cf | ||
|
|
1e38ad5ec0 | ||
|
|
bd9e39ee85 | ||
|
|
46514c8fe9 | ||
|
|
39d1ceace4 | ||
|
|
89eb962d85 | ||
|
|
4a5fee7548 | ||
|
|
fcd891148b | ||
|
|
99adb31184 | ||
|
|
2c1a90a20d | ||
|
|
2385bf547b | ||
|
|
1d011cf8d9 | ||
|
|
6896a2545a | ||
|
|
91484c64e4 | ||
|
|
bdf7382d44 | ||
|
|
8a3e71e91f | ||
|
|
e61ee30180 | ||
|
|
d3df6d10e4 | ||
|
|
60df3a1914 | ||
|
|
f454076fb6 | ||
|
|
66b3f40268 | ||
|
|
b31b7633c9 | ||
|
|
a36aa3618f | ||
|
|
7943f47939 | ||
|
|
67e451ec23 | ||
|
|
92789d5a91 |
@@ -23,7 +23,7 @@ BreakBeforeBraces: WebKit
|
||||
BreakBeforeTernaryOperators: true
|
||||
BreakConstructorInitializersBeforeComma: true
|
||||
ColumnLimit: 120
|
||||
CommentPragmas: '^ IWYU pragma:'
|
||||
CommentPragmas: "^ IWYU pragma:"
|
||||
ConstructorInitializerAllOnOneLineOrOnePerLine: true
|
||||
ConstructorInitializerIndentWidth: 4
|
||||
ContinuationIndentWidth: 4
|
||||
@@ -39,11 +39,11 @@ IncludeCategories:
|
||||
Priority: 1
|
||||
- Regex: '^<.*\.(h|hpp)>$'
|
||||
Priority: 2
|
||||
- Regex: '^<.*>$'
|
||||
- Regex: "^<.*>$"
|
||||
Priority: 3
|
||||
- Regex: '.*'
|
||||
- Regex: ".*"
|
||||
Priority: 4
|
||||
IncludeIsMainRegex: '$'
|
||||
IncludeIsMainRegex: "$"
|
||||
IndentCaseLabels: true
|
||||
IndentFunctionDeclarationAfterType: false
|
||||
IndentWidth: 4
|
||||
|
||||
24
.clang-tidy
24
.clang-tidy
@@ -1,5 +1,5 @@
|
||||
---
|
||||
Checks: '-*,
|
||||
Checks: "-*,
|
||||
bugprone-argument-comment,
|
||||
bugprone-assert-side-effect,
|
||||
bugprone-bad-signal-to-kill-thread,
|
||||
@@ -146,7 +146,7 @@ Checks: '-*,
|
||||
readability-static-definition-in-anonymous-namespace,
|
||||
readability-suspicious-call-argument,
|
||||
readability-use-std-min-max
|
||||
'
|
||||
"
|
||||
|
||||
CheckOptions:
|
||||
readability-braces-around-statements.ShortStatementLines: 2
|
||||
@@ -158,21 +158,21 @@ CheckOptions:
|
||||
readability-identifier-naming.EnumConstantCase: CamelCase
|
||||
readability-identifier-naming.ScopedEnumConstantCase: CamelCase
|
||||
readability-identifier-naming.GlobalConstantCase: UPPER_CASE
|
||||
readability-identifier-naming.GlobalConstantPrefix: 'k'
|
||||
readability-identifier-naming.GlobalConstantPrefix: "k"
|
||||
readability-identifier-naming.GlobalVariableCase: CamelCase
|
||||
readability-identifier-naming.GlobalVariablePrefix: 'g'
|
||||
readability-identifier-naming.GlobalVariablePrefix: "g"
|
||||
readability-identifier-naming.ConstexprFunctionCase: camelBack
|
||||
readability-identifier-naming.ConstexprMethodCase: camelBack
|
||||
readability-identifier-naming.ClassMethodCase: camelBack
|
||||
readability-identifier-naming.ClassMemberCase: camelBack
|
||||
readability-identifier-naming.ClassConstantCase: UPPER_CASE
|
||||
readability-identifier-naming.ClassConstantPrefix: 'k'
|
||||
readability-identifier-naming.ClassConstantPrefix: "k"
|
||||
readability-identifier-naming.StaticConstantCase: UPPER_CASE
|
||||
readability-identifier-naming.StaticConstantPrefix: 'k'
|
||||
readability-identifier-naming.StaticConstantPrefix: "k"
|
||||
readability-identifier-naming.StaticVariableCase: UPPER_CASE
|
||||
readability-identifier-naming.StaticVariablePrefix: 'k'
|
||||
readability-identifier-naming.StaticVariablePrefix: "k"
|
||||
readability-identifier-naming.ConstexprVariableCase: UPPER_CASE
|
||||
readability-identifier-naming.ConstexprVariablePrefix: 'k'
|
||||
readability-identifier-naming.ConstexprVariablePrefix: "k"
|
||||
readability-identifier-naming.LocalConstantCase: camelBack
|
||||
readability-identifier-naming.LocalVariableCase: camelBack
|
||||
readability-identifier-naming.TemplateParameterCase: CamelCase
|
||||
@@ -181,11 +181,11 @@ CheckOptions:
|
||||
readability-identifier-naming.MemberCase: camelBack
|
||||
readability-identifier-naming.PrivateMemberSuffix: _
|
||||
readability-identifier-naming.ProtectedMemberSuffix: _
|
||||
readability-identifier-naming.PublicMemberSuffix: ''
|
||||
readability-identifier-naming.FunctionIgnoredRegexp: '.*tag_invoke.*'
|
||||
readability-identifier-naming.PublicMemberSuffix: ""
|
||||
readability-identifier-naming.FunctionIgnoredRegexp: ".*tag_invoke.*"
|
||||
bugprone-unsafe-functions.ReportMoreUnsafeFunctions: true
|
||||
bugprone-unused-return-value.CheckedReturnTypes: ::std::error_code;::std::error_condition;::std::errc
|
||||
misc-include-cleaner.IgnoreHeaders: '.*/(detail|impl)/.*;.*(expected|unexpected).*;.*ranges_lower_bound\.h;time.h;stdlib.h'
|
||||
misc-include-cleaner.IgnoreHeaders: '.*/(detail|impl)/.*;.*(expected|unexpected).*;.*ranges_lower_bound\.h;time.h;stdlib.h;__chrono/.*;fmt/chrono.h;boost/uuid/uuid_hash.hpp'
|
||||
|
||||
HeaderFilterRegex: '^.*/(src|tests)/.*\.(h|hpp)$'
|
||||
WarningsAsErrors: '*'
|
||||
WarningsAsErrors: "*"
|
||||
|
||||
@@ -8,9 +8,9 @@ parse:
|
||||
- BAR
|
||||
- BAZ
|
||||
kwargs:
|
||||
HEADERS: '*'
|
||||
SOURCES: '*'
|
||||
DEPENDS: '*'
|
||||
HEADERS: "*"
|
||||
SOURCES: "*"
|
||||
DEPENDS: "*"
|
||||
_help_override_spec:
|
||||
- Override configurations per-command where available
|
||||
override_spec: {}
|
||||
@@ -43,7 +43,7 @@ format:
|
||||
- indicates how fractional indentions are handled during
|
||||
- whitespace replacement. If set to 'use-space', fractional
|
||||
- indentation is left as spaces (utf-8 0x20). If set to
|
||||
- '`round-up` fractional indentation is replaced with a single'
|
||||
- "`round-up` fractional indentation is replaced with a single"
|
||||
- tab character (utf-8 0x09) effectively shifting the column
|
||||
- to the next tabstop
|
||||
fractional_tab_policy: use-space
|
||||
@@ -73,9 +73,9 @@ format:
|
||||
dangle_parens: true
|
||||
_help_dangle_align:
|
||||
- If the trailing parenthesis must be 'dangled' on its on
|
||||
- 'line, then align it to this reference: `prefix`: the start'
|
||||
- 'of the statement, `prefix-indent`: the start of the'
|
||||
- 'statement, plus one indentation level, `child`: align to'
|
||||
- "line, then align it to this reference: `prefix`: the start"
|
||||
- "of the statement, `prefix-indent`: the start of the"
|
||||
- "statement, plus one indentation level, `child`: align to"
|
||||
- the column of the arguments
|
||||
dangle_align: prefix
|
||||
_help_min_prefix_chars:
|
||||
@@ -115,7 +115,7 @@ format:
|
||||
_help_require_valid_layout:
|
||||
- By default, if cmake-format cannot successfully fit
|
||||
- everything into the desired linewidth it will apply the
|
||||
- last, most agressive attempt that it made. If this flag is
|
||||
- last, most aggressive attempt that it made. If this flag is
|
||||
- True, however, cmake-format will print error, exit with non-
|
||||
- zero status code, and write-out nothing
|
||||
require_valid_layout: false
|
||||
@@ -127,7 +127,7 @@ _help_markup: Options affecting comment reflow and formatting.
|
||||
markup:
|
||||
_help_bullet_char:
|
||||
- What character to use for bulleted lists
|
||||
bullet_char: '*'
|
||||
bullet_char: "*"
|
||||
_help_enum_char:
|
||||
- What character to use as punctuation after numerals in an
|
||||
- enumerated list
|
||||
@@ -152,9 +152,9 @@ markup:
|
||||
ruler_pattern: ^\s*[^\w\s]{3}.*[^\w\s]{3}$
|
||||
_help_explicit_trailing_pattern:
|
||||
- If a comment line matches starts with this pattern then it
|
||||
- is explicitly a trailing comment for the preceeding
|
||||
- is explicitly a trailing comment for the preceding
|
||||
- argument. Default is '#<'
|
||||
explicit_trailing_pattern: '#<'
|
||||
explicit_trailing_pattern: "#<"
|
||||
_help_hashruler_min_length:
|
||||
- If a comment line starts with at least this many consecutive
|
||||
- hash characters, then don't lstrip() them off. This allows
|
||||
@@ -176,14 +176,14 @@ lint:
|
||||
disabled_codes: []
|
||||
_help_function_pattern:
|
||||
- regular expression pattern describing valid function names
|
||||
function_pattern: '[0-9a-z_]+'
|
||||
function_pattern: "[0-9a-z_]+"
|
||||
_help_macro_pattern:
|
||||
- regular expression pattern describing valid macro names
|
||||
macro_pattern: '[0-9A-Z_]+'
|
||||
macro_pattern: "[0-9A-Z_]+"
|
||||
_help_global_var_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- variables with global (cache) scope
|
||||
global_var_pattern: '[A-Z][0-9A-Z_]+'
|
||||
global_var_pattern: "[A-Z][0-9A-Z_]+"
|
||||
_help_internal_var_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- variables with global scope (but internal semantic)
|
||||
@@ -191,7 +191,7 @@ lint:
|
||||
_help_local_var_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- variables with local scope
|
||||
local_var_pattern: '[a-z][a-z0-9_]+'
|
||||
local_var_pattern: "[a-z][a-z0-9_]+"
|
||||
_help_private_var_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- privatedirectory variables
|
||||
@@ -199,15 +199,15 @@ lint:
|
||||
_help_public_var_pattern:
|
||||
- regular expression pattern describing valid names for public
|
||||
- directory variables
|
||||
public_var_pattern: '[A-Z][0-9A-Z_]+'
|
||||
public_var_pattern: "[A-Z][0-9A-Z_]+"
|
||||
_help_argument_var_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- function/macro arguments and loop variables.
|
||||
argument_var_pattern: '[a-z][a-z0-9_]+'
|
||||
argument_var_pattern: "[a-z][a-z0-9_]+"
|
||||
_help_keyword_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- keywords used in functions or macros
|
||||
keyword_pattern: '[A-Z][0-9A-Z_]+'
|
||||
keyword_pattern: "[A-Z][0-9A-Z_]+"
|
||||
_help_max_conditionals_custom_parser:
|
||||
- In the heuristic for C0201, how many conditionals to match
|
||||
- within a loop in before considering the loop a parser.
|
||||
|
||||
11
.codecov.yml
11
.codecov.yml
@@ -9,3 +9,14 @@ coverage:
|
||||
default:
|
||||
target: 20% # Need to bump this number https://docs.codecov.com/docs/commit-status#patch-status
|
||||
threshold: 2%
|
||||
|
||||
# `codecov/codecov-action` reruns `gcovr` if build files present
|
||||
# That's why we run it in a separate workflow
|
||||
# This ignore list is not currently used
|
||||
#
|
||||
# More info: https://github.com/XRPLF/clio/pull/2066
|
||||
ignore:
|
||||
- "tests"
|
||||
- "src/data/cassandra/"
|
||||
- "src/data/CassandraBackend.hpp"
|
||||
- "src/data/BackendFactory.*"
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Note: This script is intended to be run from the root of the repository.
|
||||
#
|
||||
# This script checks the format of the code and cmake files.
|
||||
# In many cases it will automatically fix the issues and abort the commit.
|
||||
|
||||
no_formatted_directories_staged() {
|
||||
staged_directories=$(git diff-index --cached --name-only HEAD | awk -F/ '{print $1}')
|
||||
for sd in $staged_directories; do
|
||||
if [[ "$sd" =~ ^(benchmark|cmake|src|tests)$ ]]; then
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
return 0
|
||||
}
|
||||
|
||||
if no_formatted_directories_staged ; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "+ Checking code format..."
|
||||
|
||||
# paths to check and re-format
|
||||
sources="src tests"
|
||||
formatter="clang-format -i"
|
||||
version=$($formatter --version | grep -o '[0-9\.]*')
|
||||
|
||||
if [[ "19.0.0" > "$version" ]]; then
|
||||
cat <<EOF
|
||||
|
||||
ERROR
|
||||
-----------------------------------------------------------------------------
|
||||
A minimum of version 19 of `which clang-format` is required.
|
||||
Your version is $version.
|
||||
Please fix paths and run again.
|
||||
-----------------------------------------------------------------------------
|
||||
|
||||
EOF
|
||||
exit 3
|
||||
fi
|
||||
|
||||
# check there is no .h headers, only .hpp
|
||||
wrong_headers=$(find $sources -name "*.h" | sed 's/^/ - /')
|
||||
if [[ ! -z "$wrong_headers" ]]; then
|
||||
cat <<EOF
|
||||
|
||||
ERROR
|
||||
-----------------------------------------------------------------------------
|
||||
Found .h headers in the source code. Please rename them to .hpp:
|
||||
|
||||
$wrong_headers
|
||||
-----------------------------------------------------------------------------
|
||||
|
||||
EOF
|
||||
exit 2
|
||||
fi
|
||||
|
||||
if ! command -v cmake-format &> /dev/null; then
|
||||
cat <<EOF
|
||||
|
||||
ERROR
|
||||
-----------------------------------------------------------------------------
|
||||
'cmake-format' is required to run this script.
|
||||
Please install it and run again.
|
||||
-----------------------------------------------------------------------------
|
||||
|
||||
EOF
|
||||
exit 3
|
||||
fi
|
||||
|
||||
function grep_code {
|
||||
grep -l "${1}" ${sources} -r --include \*.hpp --include \*.cpp
|
||||
}
|
||||
|
||||
GNU_SED=$(sed --version 2>&1 | grep -q 'GNU' && echo true || echo false)
|
||||
|
||||
if [[ "$GNU_SED" == "false" ]]; then # macOS sed
|
||||
# make all includes to be <...> style
|
||||
grep_code '#include ".*"' | xargs sed -i '' -E 's|#include "(.*)"|#include <\1>|g'
|
||||
|
||||
# make local includes to be "..." style
|
||||
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | tr '\n' '|' | sed 's/|$//' | sed 's/|/\\|/g')
|
||||
grep_code "#include <\($main_src_dirs\)/.*>" | xargs sed -i '' -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g"
|
||||
else
|
||||
# make all includes to be <...> style
|
||||
grep_code '#include ".*"' | xargs sed -i -E 's|#include "(.*)"|#include <\1>|g'
|
||||
|
||||
# make local includes to be "..." style
|
||||
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | paste -sd '|' | sed 's/|/\\|/g')
|
||||
grep_code "#include <\($main_src_dirs\)/.*>" | xargs sed -i -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g"
|
||||
fi
|
||||
|
||||
cmake_dirs=$(echo cmake $sources)
|
||||
cmake_files=$(find $cmake_dirs -type f \( -name "CMakeLists.txt" -o -name "*.cmake" \))
|
||||
cmake_files=$(echo $cmake_files ./CMakeLists.txt)
|
||||
|
||||
first=$(git diff $sources $cmake_files)
|
||||
find $sources -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.ipp' \) -print0 | xargs -0 $formatter
|
||||
cmake-format -i $cmake_files
|
||||
second=$(git diff $sources $cmake_files)
|
||||
changes=$(diff <(echo "$first") <(echo "$second"))
|
||||
changes_number=$(echo -n "$changes" | wc -l | sed -e 's/^[[:space:]]*//')
|
||||
|
||||
if [ "$changes_number" != "0" ]; then
|
||||
cat <<\EOF
|
||||
|
||||
WARNING
|
||||
-----------------------------------------------------------------------------
|
||||
Automatically re-formatted code with 'clang-format' - commit was aborted.
|
||||
Please manually add any updated files and commit again.
|
||||
-----------------------------------------------------------------------------
|
||||
|
||||
EOF
|
||||
if [[ "$1" == "--diff" ]]; then
|
||||
echo "$changes"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/bin/sh
|
||||
command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'post-checkout' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; }
|
||||
git lfs post-checkout "$@"
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/bin/sh
|
||||
command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'post-commit' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; }
|
||||
git lfs post-commit "$@"
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/bin/sh
|
||||
command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'post-merge' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; }
|
||||
git lfs post-merge "$@"
|
||||
@@ -1,7 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This script is intended to be run from the root of the repository.
|
||||
|
||||
source .githooks/check-format
|
||||
source .githooks/check-docs
|
||||
|
||||
9
.github/ISSUE_TEMPLATE/bug_report.md
vendored
9
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -3,29 +3,34 @@ name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: "[Title with short description] (Version: [Clio version])"
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
<!-- Please search existing issues to avoid creating duplicates. -->
|
||||
<!-- Kindly refrain from posting any credentials or sensitive information in this issue -->
|
||||
|
||||
## Issue Description
|
||||
|
||||
<!-- Provide a summary for your issue/bug. -->
|
||||
|
||||
## Steps to Reproduce
|
||||
|
||||
<!-- List in detail the exact steps to reproduce the unexpected behavior of the software. -->
|
||||
|
||||
## Expected Result
|
||||
|
||||
<!-- Explain in detail what behavior you expected to happen. -->
|
||||
|
||||
## Actual Result
|
||||
|
||||
<!-- Explain in detail what behavior actually happened. -->
|
||||
|
||||
## Environment
|
||||
|
||||
<!-- Please describe your environment setup (such as Ubuntu 20.04.2 with Boost 1.82). -->
|
||||
<!-- Please use the version returned by './clio_server --version' as the version number -->
|
||||
|
||||
## Supporting Files
|
||||
|
||||
<!-- If you have supporting files such as a log, feel free to post a link here using Github Gist. -->
|
||||
<!-- Consider adding configuration files with private information removed via Github Gist. -->
|
||||
|
||||
7
.github/ISSUE_TEMPLATE/feature_request.md
vendored
7
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -3,21 +3,24 @@ name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: "[Title with short description] (Version: [Clio version])"
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
<!-- Please search existing issues to avoid creating duplicates. -->
|
||||
<!-- Kindly refrain from posting any credentials or sensitive information in this issue -->
|
||||
|
||||
## Summary
|
||||
|
||||
<!-- Provide a summary to the feature request -->
|
||||
|
||||
## Motivation
|
||||
|
||||
<!-- Why do we need this feature? -->
|
||||
|
||||
## Solution
|
||||
|
||||
<!-- What is the solution? -->
|
||||
|
||||
## Paths Not Taken
|
||||
|
||||
<!-- What other alternatives have been considered? -->
|
||||
|
||||
5
.github/ISSUE_TEMPLATE/question.md
vendored
5
.github/ISSUE_TEMPLATE/question.md
vendored
@@ -3,8 +3,7 @@ name: Question
|
||||
about: A question in form of an issue
|
||||
title: "[Title with short description] (Version: Clio version)"
|
||||
labels: question
|
||||
assignees: ''
|
||||
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
<!-- Please search existing issues to avoid creating duplicates. -->
|
||||
@@ -12,7 +11,9 @@ assignees: ''
|
||||
<!-- Kindly refrain from posting any credentials or sensitive information in this issue -->
|
||||
|
||||
## Question
|
||||
|
||||
<!-- Your question -->
|
||||
|
||||
## Paths Not Taken
|
||||
|
||||
<!-- If applicable, what other alternatives have been considered? -->
|
||||
|
||||
19
.github/actions/build_clio/action.yml
vendored
19
.github/actions/build_clio/action.yml
vendored
@@ -1,13 +1,15 @@
|
||||
name: Build clio
|
||||
description: Build clio in build directory
|
||||
|
||||
inputs:
|
||||
target:
|
||||
description: Build target name
|
||||
targets:
|
||||
description: Space-separated build target names
|
||||
default: all
|
||||
substract_threads:
|
||||
subtract_threads:
|
||||
description: An option for the action get_number_of_threads. See get_number_of_threads
|
||||
required: true
|
||||
default: '0'
|
||||
default: "0"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
@@ -15,10 +17,13 @@ runs:
|
||||
uses: ./.github/actions/get_number_of_threads
|
||||
id: number_of_threads
|
||||
with:
|
||||
substract_threads: ${{ inputs.substract_threads }}
|
||||
subtract_threads: ${{ inputs.subtract_threads }}
|
||||
|
||||
- name: Build Clio
|
||||
- name: Build targets
|
||||
shell: bash
|
||||
run: |
|
||||
cd build
|
||||
cmake --build . --parallel ${{ steps.number_of_threads.outputs.threads_number }} --target ${{ inputs.target }}
|
||||
cmake \
|
||||
--build . \
|
||||
--parallel "${{ steps.number_of_threads.outputs.threads_number }}" \
|
||||
--target ${{ inputs.targets }}
|
||||
|
||||
30
.github/actions/build_docker_image/action.yml
vendored
30
.github/actions/build_docker_image/action.yml
vendored
@@ -1,8 +1,12 @@
|
||||
name: Build and push Docker image
|
||||
description: Build and push Docker image to DockerHub and GitHub Container Registry
|
||||
|
||||
inputs:
|
||||
image_name:
|
||||
description: Name of the image to build
|
||||
images:
|
||||
description: Name of the images to use as a base name
|
||||
required: true
|
||||
dockerhub_repo:
|
||||
description: DockerHub repository name
|
||||
required: true
|
||||
push_image:
|
||||
description: Whether to push the image to the registry (true/false)
|
||||
@@ -19,35 +23,38 @@ inputs:
|
||||
description:
|
||||
description: Short description of the image
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Login to DockerHub
|
||||
if: ${{ inputs.push_image == 'true' }}
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: ${{ env.DOCKERHUB_USER }}
|
||||
password: ${{ env.DOCKERHUB_PW }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: ${{ inputs.push_image == 'true' }}
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ env.GITHUB_TOKEN }}
|
||||
|
||||
- uses: docker/setup-qemu-action@v3
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
with:
|
||||
cache-image: false
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- uses: docker/metadata-action@v5
|
||||
- uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
id: meta
|
||||
with:
|
||||
images: ${{ inputs.image_name }}
|
||||
images: ${{ inputs.images }}
|
||||
tags: ${{ inputs.tags }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
with:
|
||||
context: ${{ inputs.directory }}
|
||||
platforms: ${{ inputs.platforms }}
|
||||
@@ -56,11 +63,10 @@ runs:
|
||||
|
||||
- name: Update DockerHub description
|
||||
if: ${{ inputs.push_image == 'true' }}
|
||||
uses: peter-evans/dockerhub-description@v4
|
||||
uses: peter-evans/dockerhub-description@432a30c9e07499fd01da9f8a49f0faf9e0ca5b77 # v4.0.2
|
||||
with:
|
||||
username: ${{ env.DOCKERHUB_USER }}
|
||||
password: ${{ env.DOCKERHUB_PW }}
|
||||
repository: ${{ inputs.image_name }}
|
||||
repository: ${{ inputs.dockerhub_repo }}
|
||||
short-description: ${{ inputs.description }}
|
||||
readme-filepath: ${{ inputs.directory }}/README.md
|
||||
|
||||
|
||||
9
.github/actions/code_coverage/action.yml
vendored
9
.github/actions/code_coverage/action.yml
vendored
@@ -1,21 +1,26 @@
|
||||
name: Generate code coverage report
|
||||
description: Run tests, generate code coverage report and upload it to codecov.io
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
|
||||
steps:
|
||||
- name: Run tests
|
||||
shell: bash
|
||||
run: |
|
||||
build/clio_tests
|
||||
|
||||
# Please keep exclude list in sync with .codecov.yml
|
||||
- name: Run gcovr
|
||||
shell: bash
|
||||
run: |
|
||||
gcovr -e tests \
|
||||
gcovr \
|
||||
-e tests \
|
||||
-e src/data/cassandra \
|
||||
-e src/data/CassandraBackend.hpp \
|
||||
-e 'src/data/BackendFactory.*' \
|
||||
--xml build/coverage_report.xml -j8 --exclude-throw-branches
|
||||
--xml build/coverage_report.xml \
|
||||
-j8 --exclude-throw-branches
|
||||
|
||||
- name: Archive coverage report
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
18
.github/actions/create_issue/action.yml
vendored
18
.github/actions/create_issue/action.yml
vendored
@@ -1,5 +1,6 @@
|
||||
name: Create an issue
|
||||
description: Create an issue
|
||||
|
||||
inputs:
|
||||
title:
|
||||
description: Issue title
|
||||
@@ -10,15 +11,17 @@ inputs:
|
||||
labels:
|
||||
description: Comma-separated list of labels
|
||||
required: true
|
||||
default: 'bug'
|
||||
default: "bug"
|
||||
assignees:
|
||||
description: Comma-separated list of assignees
|
||||
required: true
|
||||
default: 'cindyyan317,godexsoft,kuznetsss'
|
||||
default: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru"
|
||||
|
||||
outputs:
|
||||
created_issue_id:
|
||||
description: Created issue id
|
||||
value: ${{ steps.create_issue.outputs.created_issue }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
@@ -27,9 +30,12 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
echo -e '${{ inputs.body }}' > issue.md
|
||||
gh issue create --assignee '${{ inputs.assignees }}' --label '${{ inputs.labels }}' --title '${{ inputs.title }}' --body-file ./issue.md > create_issue.log
|
||||
created_issue=$(cat create_issue.log | sed 's|.*/||')
|
||||
gh issue create \
|
||||
--assignee '${{ inputs.assignees }}' \
|
||||
--label '${{ inputs.labels }}' \
|
||||
--title '${{ inputs.title }}' \
|
||||
--body-file ./issue.md \
|
||||
> create_issue.log
|
||||
created_issue="$(sed 's|.*/||' create_issue.log)"
|
||||
echo "created_issue=$created_issue" >> $GITHUB_OUTPUT
|
||||
rm create_issue.log issue.md
|
||||
|
||||
|
||||
|
||||
46
.github/actions/generate/action.yml
vendored
46
.github/actions/generate/action.yml
vendored
@@ -1,5 +1,6 @@
|
||||
name: Run conan and cmake
|
||||
description: Run conan and cmake
|
||||
|
||||
inputs:
|
||||
conan_profile:
|
||||
description: Conan profile name
|
||||
@@ -7,27 +8,37 @@ inputs:
|
||||
conan_cache_hit:
|
||||
description: Whether conan cache has been downloaded
|
||||
required: true
|
||||
default: 'false'
|
||||
default: "false"
|
||||
build_type:
|
||||
description: Build type for third-party libraries and clio. Could be 'Release', 'Debug'
|
||||
required: true
|
||||
default: 'Release'
|
||||
default: "Release"
|
||||
build_integration_tests:
|
||||
description: Whether to build integration tests
|
||||
required: true
|
||||
default: 'true'
|
||||
default: "true"
|
||||
code_coverage:
|
||||
description: Whether conan's coverage option should be on or not
|
||||
required: true
|
||||
default: 'false'
|
||||
default: "false"
|
||||
static:
|
||||
description: Whether Clio is to be statically linked
|
||||
required: true
|
||||
default: 'false'
|
||||
default: "false"
|
||||
sanitizer:
|
||||
description: Sanitizer to use
|
||||
required: true
|
||||
default: 'false' # false, tsan, asan or ubsan
|
||||
default: "false"
|
||||
choices:
|
||||
- "false"
|
||||
- "tsan"
|
||||
- "asan"
|
||||
- "ubsan"
|
||||
time_trace:
|
||||
description: Whether to enable compiler trace reports
|
||||
required: true
|
||||
default: "false"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
@@ -42,19 +53,36 @@ runs:
|
||||
CODE_COVERAGE: "${{ inputs.code_coverage == 'true' && 'True' || 'False' }}"
|
||||
STATIC_OPTION: "${{ inputs.static == 'true' && 'True' || 'False' }}"
|
||||
INTEGRATION_TESTS_OPTION: "${{ inputs.build_integration_tests == 'true' && 'True' || 'False' }}"
|
||||
TIME_TRACE: "${{ inputs.time_trace == 'true' && 'True' || 'False' }}"
|
||||
run: |
|
||||
cd build
|
||||
conan install .. -of . -b $BUILD_OPTION -s build_type=${{ inputs.build_type }} -o clio:static="${STATIC_OPTION}" -o clio:tests=True -o clio:integration_tests="${INTEGRATION_TESTS_OPTION}" -o clio:lint=False -o clio:coverage="${CODE_COVERAGE}" --profile ${{ inputs.conan_profile }}
|
||||
conan \
|
||||
install .. \
|
||||
-of . \
|
||||
-b $BUILD_OPTION \
|
||||
-s build_type="${{ inputs.build_type }}" \
|
||||
-o clio:static="${STATIC_OPTION}" \
|
||||
-o clio:tests=True \
|
||||
-o clio:integration_tests="${INTEGRATION_TESTS_OPTION}" \
|
||||
-o clio:lint=False \
|
||||
-o clio:coverage="${CODE_COVERAGE}" \
|
||||
-o clio:time_trace="${TIME_TRACE}" \
|
||||
--profile "${{ inputs.conan_profile }}"
|
||||
|
||||
- name: Run cmake
|
||||
shell: bash
|
||||
env:
|
||||
BUILD_TYPE: "${{ inputs.build_type }}"
|
||||
SANITIZER_OPTION: |
|
||||
SANITIZER_OPTION: |-
|
||||
${{ inputs.sanitizer == 'tsan' && '-Dsan=thread' ||
|
||||
inputs.sanitizer == 'ubsan' && '-Dsan=undefined' ||
|
||||
inputs.sanitizer == 'asan' && '-Dsan=address' ||
|
||||
'' }}
|
||||
run: |
|
||||
cd build
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE="${BUILD_TYPE}" ${SANITIZER_OPTION} .. -G Ninja
|
||||
cmake \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
|
||||
"${SANITIZER_OPTION}" \
|
||||
.. \
|
||||
-G Ninja
|
||||
|
||||
14
.github/actions/get_number_of_threads/action.yml
vendored
14
.github/actions/get_number_of_threads/action.yml
vendored
@@ -1,14 +1,16 @@
|
||||
name: Get number of threads
|
||||
description: Determines number of threads to use on macOS and Linux
|
||||
|
||||
inputs:
|
||||
substract_threads:
|
||||
description: How many threads to substract from the calculated number
|
||||
subtract_threads:
|
||||
description: How many threads to subtract from the calculated number
|
||||
required: true
|
||||
default: '0'
|
||||
default: "0"
|
||||
outputs:
|
||||
threads_number:
|
||||
description: Number of threads to use
|
||||
value: ${{ steps.number_of_threads_export.outputs.num }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
@@ -28,7 +30,7 @@ runs:
|
||||
id: number_of_threads_export
|
||||
shell: bash
|
||||
run: |
|
||||
num_of_threads=${{ steps.mac_threads.outputs.num || steps.linux_threads.outputs.num }}
|
||||
shift_by=${{ inputs.substract_threads }}
|
||||
shifted=$((num_of_threads - shift_by))
|
||||
num_of_threads="${{ steps.mac_threads.outputs.num || steps.linux_threads.outputs.num }}"
|
||||
shift_by="${{ inputs.subtract_threads }}"
|
||||
shifted="$((num_of_threads - shift_by))"
|
||||
echo "num=$(( shifted > 1 ? shifted : 1 ))" >> $GITHUB_OUTPUT
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
name: Git common ancestor
|
||||
description: Find the closest common commit
|
||||
|
||||
outputs:
|
||||
commit:
|
||||
description: Hash of commit
|
||||
value: ${{ steps.find_common_ancestor.outputs.commit }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
@@ -11,4 +13,4 @@ runs:
|
||||
id: find_common_ancestor
|
||||
shell: bash
|
||||
run: |
|
||||
echo "commit=$(git merge-base --fork-point origin/develop)" >> $GITHUB_OUTPUT
|
||||
echo "commit=\"$(git merge-base --fork-point origin/develop)\"" >> $GITHUB_OUTPUT
|
||||
|
||||
41
.github/actions/prepare_runner/action.yml
vendored
41
.github/actions/prepare_runner/action.yml
vendored
@@ -1,9 +1,11 @@
|
||||
name: Prepare runner
|
||||
description: Install packages, set environment variables, create directories
|
||||
|
||||
inputs:
|
||||
disable_ccache:
|
||||
description: Whether ccache should be disabled
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
@@ -11,13 +13,42 @@ runs:
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
shell: bash
|
||||
run: |
|
||||
brew install llvm@14 pkg-config ninja bison cmake ccache jq gh conan@1 ca-certificates
|
||||
brew install \
|
||||
bison \
|
||||
ca-certificates \
|
||||
ccache \
|
||||
clang-build-analyzer \
|
||||
conan@1 \
|
||||
gh \
|
||||
jq \
|
||||
llvm@14 \
|
||||
ninja \
|
||||
pkg-config
|
||||
echo "/opt/homebrew/opt/conan@1/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Install CMake 3.31.6 on mac
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
shell: bash
|
||||
run: |
|
||||
# Uninstall any existing cmake
|
||||
brew uninstall cmake --ignore-dependencies || true
|
||||
|
||||
# Download specific cmake formula
|
||||
FORMULA_URL="https://raw.githubusercontent.com/Homebrew/homebrew-core/b4e46db74e74a8c1650b38b1da222284ce1ec5ce/Formula/c/cmake.rb"
|
||||
FORMULA_EXPECTED_SHA256="c7ec95d86f0657638835441871e77541165e0a2581b53b3dd657cf13ad4228d4"
|
||||
|
||||
mkdir -p /tmp/homebrew-formula
|
||||
curl -s -L "$FORMULA_URL" -o /tmp/homebrew-formula/cmake.rb
|
||||
|
||||
echo "$FORMULA_EXPECTED_SHA256 /tmp/homebrew-formula/cmake.rb" | shasum -a 256 -c
|
||||
|
||||
# Install cmake from the specific formula with force flag
|
||||
brew install --formula --force /tmp/homebrew-formula/cmake.rb
|
||||
|
||||
- name: Fix git permissions on Linux
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
shell: bash
|
||||
run: git config --global --add safe.directory $PWD
|
||||
run: git config --global --add safe.directory "$PWD"
|
||||
|
||||
- name: Set env variables for macOS
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
@@ -42,7 +73,5 @@ runs:
|
||||
- name: Create directories
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p $CCACHE_DIR
|
||||
mkdir -p $CONAN_USER_HOME/.conan
|
||||
|
||||
|
||||
mkdir -p "$CCACHE_DIR"
|
||||
mkdir -p "$CONAN_USER_HOME/.conan"
|
||||
|
||||
8
.github/actions/restore_cache/action.yml
vendored
8
.github/actions/restore_cache/action.yml
vendored
@@ -1,5 +1,6 @@
|
||||
name: Restore cache
|
||||
description: Find and restores conan and ccache cache
|
||||
|
||||
inputs:
|
||||
conan_dir:
|
||||
description: Path to .conan directory
|
||||
@@ -17,7 +18,7 @@ inputs:
|
||||
code_coverage:
|
||||
description: Whether code coverage is on
|
||||
required: true
|
||||
default: 'false'
|
||||
default: "false"
|
||||
outputs:
|
||||
conan_hash:
|
||||
description: Hash to use as a part of conan cache key
|
||||
@@ -28,6 +29,7 @@ outputs:
|
||||
ccache_cache_hit:
|
||||
description: True if ccache cache has been downloaded
|
||||
value: ${{ steps.ccache_cache.outputs.cache-hit }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
@@ -40,9 +42,9 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
conan info . -j info.json -o clio:tests=True
|
||||
packages_info=$(cat info.json | jq '.[] | "\(.display_name): \(.id)"' | grep -v 'clio')
|
||||
packages_info="$(cat info.json | jq '.[] | "\(.display_name): \(.id)"' | grep -v 'clio')"
|
||||
echo "$packages_info"
|
||||
hash=$(echo "$packages_info" | shasum -a 256 | cut -d ' ' -f 1)
|
||||
hash="$(echo "$packages_info" | shasum -a 256 | cut -d ' ' -f 1)"
|
||||
rm info.json
|
||||
echo "hash=$hash" >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
6
.github/actions/save_cache/action.yml
vendored
6
.github/actions/save_cache/action.yml
vendored
@@ -1,5 +1,6 @@
|
||||
name: Save cache
|
||||
description: Save conan and ccache cache for develop branch
|
||||
|
||||
inputs:
|
||||
conan_dir:
|
||||
description: Path to .conan directory
|
||||
@@ -28,7 +29,8 @@ inputs:
|
||||
code_coverage:
|
||||
description: Whether code coverage is on
|
||||
required: true
|
||||
default: 'false'
|
||||
default: "false"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
@@ -55,5 +57,3 @@ runs:
|
||||
with:
|
||||
path: ${{ inputs.ccache_dir }}
|
||||
key: clio-ccache-${{ runner.os }}-${{ inputs.build_type }}${{ inputs.code_coverage == 'true' && '-code_coverage' || '' }}-${{ inputs.conan_profile }}-develop-${{ steps.git_common_ancestor.outputs.commit }}
|
||||
|
||||
|
||||
|
||||
41
.github/actions/setup_conan/action.yml
vendored
41
.github/actions/setup_conan/action.yml
vendored
@@ -1,52 +1,33 @@
|
||||
name: Setup conan
|
||||
description: Setup conan profile and artifactory
|
||||
|
||||
inputs:
|
||||
conan_profile:
|
||||
description: Conan profile name
|
||||
required: true
|
||||
outputs:
|
||||
conan_profile:
|
||||
description: Created conan profile name
|
||||
value: ${{ steps.conan_export_output.outputs.conan_profile }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: On mac
|
||||
- name: Create conan profile on macOS
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
shell: bash
|
||||
env:
|
||||
CONAN_PROFILE: apple_clang_16
|
||||
id: conan_setup_mac
|
||||
CONAN_PROFILE: ${{ inputs.conan_profile }}
|
||||
run: |
|
||||
echo "Creating $CONAN_PROFILE conan profile"
|
||||
conan profile new $CONAN_PROFILE --detect --force
|
||||
conan profile update settings.compiler.libcxx=libc++ $CONAN_PROFILE
|
||||
conan profile update settings.compiler.cppstd=20 $CONAN_PROFILE
|
||||
conan profile update env.CXXFLAGS=-DBOOST_ASIO_DISABLE_CONCEPTS $CONAN_PROFILE
|
||||
conan profile update "conf.tools.build:cxxflags+=[\"-DBOOST_ASIO_DISABLE_CONCEPTS\"]" $CONAN_PROFILE
|
||||
echo "created_conan_profile=$CONAN_PROFILE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: On linux
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
shell: bash
|
||||
id: conan_setup_linux
|
||||
run: |
|
||||
echo "created_conan_profile=${{ inputs.conan_profile }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Export output variable
|
||||
shell: bash
|
||||
id: conan_export_output
|
||||
run: |
|
||||
echo "conan_profile=${{ steps.conan_setup_mac.outputs.created_conan_profile || steps.conan_setup_linux.outputs.created_conan_profile }}" >> $GITHUB_OUTPUT
|
||||
echo "Creating \"$CONAN_PROFILE\" conan profile"
|
||||
conan profile new "$CONAN_PROFILE" --detect --force
|
||||
conan profile update settings.compiler.libcxx=libc++ "$CONAN_PROFILE"
|
||||
conan profile update settings.compiler.cppstd=20 "$CONAN_PROFILE"
|
||||
conan profile update env.CXXFLAGS=-DBOOST_ASIO_DISABLE_CONCEPTS "$CONAN_PROFILE"
|
||||
conan profile update "conf.tools.build:cxxflags+=[\"-DBOOST_ASIO_DISABLE_CONCEPTS\"]" "$CONAN_PROFILE"
|
||||
|
||||
- name: Add conan-non-prod artifactory
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ -z $(conan remote list | grep conan-non-prod) ]]; then
|
||||
if [[ -z "$(conan remote list | grep conan-non-prod)" ]]; then
|
||||
echo "Adding conan-non-prod"
|
||||
conan remote add --insert 0 conan-non-prod http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
||||
else
|
||||
echo "Conan-non-prod is available"
|
||||
fi
|
||||
|
||||
|
||||
|
||||
6
.github/actions/test/Dockerfile
vendored
6
.github/actions/test/Dockerfile
vendored
@@ -1,6 +0,0 @@
|
||||
FROM cassandra:4.0.4
|
||||
|
||||
RUN apt-get update && apt-get install -y postgresql
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
8
.github/actions/test/entrypoint.sh
vendored
8
.github/actions/test/entrypoint.sh
vendored
@@ -1,8 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
pg_ctlcluster 12 main start
|
||||
su postgres -c"psql -c\"alter user postgres with password 'postgres'\""
|
||||
su cassandra -c "/opt/cassandra/bin/cassandra -R"
|
||||
sleep 90
|
||||
chmod +x ./clio_tests
|
||||
./clio_tests
|
||||
161
.github/dependabot.yml
vendored
161
.github/dependabot.yml
vendored
@@ -1,16 +1,157 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
- package-ecosystem: github-actions
|
||||
directory: /
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "monday"
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: "Etc/GMT"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- "cindyyan317"
|
||||
- "godexsoft"
|
||||
- "kuznetsss"
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "[CI] "
|
||||
target-branch: "develop"
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/build_clio/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/build_docker_image/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/code_coverage/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/create_issue/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/generate/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/get_number_of_threads/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/git_common_ancestor/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/prepare_runner/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/restore_cache/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/save_cache/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/setup_conan/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
184
.github/workflows/build.yml
vendored
184
.github/workflows/build.yml
vendored
@@ -1,170 +1,120 @@
|
||||
name: Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, release/*, develop]
|
||||
pull_request:
|
||||
branches: [master, release/*, develop]
|
||||
paths:
|
||||
- .github/workflows/build.yml
|
||||
|
||||
- .github/workflows/build_and_test.yml
|
||||
- .github/workflows/build_impl.yml
|
||||
- .github/workflows/test_impl.yml
|
||||
- .github/workflows/upload_coverage_report.yml
|
||||
|
||||
- ".github/actions/**"
|
||||
- "!.github/actions/build_docker_image/**"
|
||||
- "!.github/actions/create_issue/**"
|
||||
|
||||
- CMakeLists.txt
|
||||
- "cmake/**"
|
||||
- "src/**"
|
||||
- "tests/**"
|
||||
|
||||
- docs/config-description.md
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check_format:
|
||||
name: Check format
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
steps:
|
||||
- name: Fix git permissions on Linux
|
||||
shell: bash
|
||||
run: git config --global --add safe.directory $PWD
|
||||
build-and-test:
|
||||
name: Build and Test
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- name: Run formatters
|
||||
id: run_formatters
|
||||
run: |
|
||||
./.githooks/check-format --diff
|
||||
shell: bash
|
||||
|
||||
check_docs:
|
||||
name: Check documentation
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Run linter
|
||||
id: run_linter
|
||||
run: |
|
||||
./.githooks/check-docs
|
||||
shell: bash
|
||||
|
||||
build:
|
||||
name: Build
|
||||
needs:
|
||||
- check_format
|
||||
- check_docs
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [heavy]
|
||||
conan_profile: [gcc, clang]
|
||||
build_type: [Release, Debug]
|
||||
container: ['{ "image": "ghcr.io/xrplf/clio-ci:latest" }']
|
||||
static: [true]
|
||||
|
||||
include:
|
||||
- os: heavy
|
||||
conan_profile: gcc
|
||||
build_type: Release
|
||||
container: '{ "image": "rippleci/clio_ci:latest" }'
|
||||
code_coverage: false
|
||||
static: true
|
||||
- os: heavy
|
||||
conan_profile: gcc
|
||||
build_type: Debug
|
||||
container: '{ "image": "rippleci/clio_ci:latest" }'
|
||||
code_coverage: true
|
||||
static: true
|
||||
- os: heavy
|
||||
conan_profile: clang
|
||||
build_type: Release
|
||||
container: '{ "image": "rippleci/clio_ci:latest" }'
|
||||
code_coverage: false
|
||||
static: true
|
||||
- os: heavy
|
||||
conan_profile: clang
|
||||
build_type: Debug
|
||||
container: '{ "image": "rippleci/clio_ci:latest" }'
|
||||
code_coverage: false
|
||||
static: true
|
||||
- os: macos15
|
||||
conan_profile: default_apple_clang
|
||||
build_type: Release
|
||||
code_coverage: false
|
||||
container: ""
|
||||
static: false
|
||||
uses: ./.github/workflows/build_impl.yml
|
||||
|
||||
uses: ./.github/workflows/build_and_test.yml
|
||||
with:
|
||||
runs_on: ${{ matrix.os }}
|
||||
container: ${{ matrix.container }}
|
||||
conan_profile: ${{ matrix.conan_profile }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
code_coverage: ${{ matrix.code_coverage }}
|
||||
static: ${{ matrix.static }}
|
||||
unit_tests: true
|
||||
integration_tests: true
|
||||
clio_server: true
|
||||
run_unit_tests: true
|
||||
run_integration_tests: false
|
||||
upload_clio_server: true
|
||||
|
||||
test:
|
||||
name: Run Tests
|
||||
needs: build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: heavy
|
||||
conan_profile: gcc
|
||||
build_type: Release
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
- os: heavy
|
||||
conan_profile: clang
|
||||
build_type: Release
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
- os: heavy
|
||||
conan_profile: clang
|
||||
build_type: Debug
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
- os: macos15
|
||||
conan_profile: apple_clang_16
|
||||
build_type: Release
|
||||
runs-on: ${{ matrix.os }}
|
||||
container: ${{ matrix.container }}
|
||||
code_coverage:
|
||||
name: Run Code Coverage
|
||||
|
||||
steps:
|
||||
- name: Clean workdir
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: kuznetsss/workspace-cleanup@1.0
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
uses: ./.github/workflows/build_impl.yml
|
||||
with:
|
||||
name: clio_tests_${{ runner.os }}_${{ matrix.build_type }}_${{ matrix.conan_profile }}
|
||||
|
||||
- name: Run clio_tests
|
||||
run: |
|
||||
chmod +x ./clio_tests
|
||||
./clio_tests
|
||||
runs_on: heavy
|
||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
||||
conan_profile: gcc
|
||||
build_type: Debug
|
||||
disable_cache: false
|
||||
code_coverage: true
|
||||
static: true
|
||||
upload_clio_server: false
|
||||
targets: all
|
||||
sanitizer: "false"
|
||||
analyze_build_time: false
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
check_config:
|
||||
name: Check Config Description
|
||||
needs: build
|
||||
needs: build-and-test
|
||||
runs-on: heavy
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
image: ghcr.io/xrplf/clio-ci:latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: clio_server_Linux_Release_gcc
|
||||
|
||||
- name: Compare Config Description
|
||||
shell: bash
|
||||
run: |
|
||||
repoConfigFile=docs/config-description.md
|
||||
if ! [ -f ${repoConfigFile} ]; then
|
||||
if ! [ -f "${repoConfigFile}" ]; then
|
||||
echo "Config Description markdown file is missing in docs folder"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
chmod +x ./clio_server
|
||||
configDescriptionFile=config_description_new.md
|
||||
./clio_server -d ${configDescriptionFile}
|
||||
./clio_server -d "${configDescriptionFile}"
|
||||
|
||||
configDescriptionHash=$(sha256sum ${configDescriptionFile} | cut -d' ' -f1)
|
||||
repoConfigHash=$(sha256sum ${repoConfigFile} | cut -d' ' -f1)
|
||||
configDescriptionHash=$(sha256sum "${configDescriptionFile}" | cut -d' ' -f1)
|
||||
repoConfigHash=$(sha256sum "${repoConfigFile}" | cut -d' ' -f1)
|
||||
|
||||
if [ ${configDescriptionHash} != ${repoConfigHash} ]; then
|
||||
if [ "${configDescriptionHash}" != "${repoConfigHash}" ]; then
|
||||
echo "Markdown file is not up to date"
|
||||
diff -u "${repoConfigFile}" "${configDescriptionFile}"
|
||||
rm -f ${configDescriptionFile}
|
||||
rm -f "${configDescriptionFile}"
|
||||
exit 1
|
||||
fi
|
||||
rm -f ${configDescriptionFile}
|
||||
rm -f "${configDescriptionFile}"
|
||||
exit 0
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
92
.github/workflows/build_and_test.yml
vendored
Normal file
92
.github/workflows/build_and_test.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
name: Reusable build and test
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs_on:
|
||||
description: Runner to run the job on
|
||||
required: true
|
||||
type: string
|
||||
|
||||
container:
|
||||
description: "The container object as a JSON string (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
conan_profile:
|
||||
description: Conan profile to use
|
||||
required: true
|
||||
type: string
|
||||
|
||||
build_type:
|
||||
description: Build type
|
||||
required: true
|
||||
type: string
|
||||
|
||||
disable_cache:
|
||||
description: Whether ccache and conan cache should be disabled
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
static:
|
||||
description: Whether to build static binaries
|
||||
required: true
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
run_unit_tests:
|
||||
description: Whether to run unit tests
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
run_integration_tests:
|
||||
description: Whether to run integration tests
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
upload_clio_server:
|
||||
description: Whether to upload clio_server
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
targets:
|
||||
description: Space-separated build target names
|
||||
required: false
|
||||
type: string
|
||||
default: all
|
||||
|
||||
sanitizer:
|
||||
description: Sanitizer to use
|
||||
required: false
|
||||
type: string
|
||||
default: "false"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
uses: ./.github/workflows/build_impl.yml
|
||||
with:
|
||||
runs_on: ${{ inputs.runs_on }}
|
||||
container: ${{ inputs.container }}
|
||||
conan_profile: ${{ inputs.conan_profile }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
disable_cache: ${{ inputs.disable_cache }}
|
||||
code_coverage: false
|
||||
static: ${{ inputs.static }}
|
||||
upload_clio_server: ${{ inputs.upload_clio_server }}
|
||||
targets: ${{ inputs.targets }}
|
||||
sanitizer: ${{ inputs.sanitizer }}
|
||||
analyze_build_time: false
|
||||
|
||||
test:
|
||||
needs: build
|
||||
uses: ./.github/workflows/test_impl.yml
|
||||
with:
|
||||
runs_on: ${{ inputs.runs_on }}
|
||||
container: ${{ inputs.container }}
|
||||
conan_profile: ${{ inputs.conan_profile }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
run_unit_tests: ${{ inputs.run_unit_tests }}
|
||||
run_integration_tests: ${{ inputs.run_integration_tests }}
|
||||
sanitizer: ${{ inputs.sanitizer }}
|
||||
@@ -1,4 +1,5 @@
|
||||
name: Build and publish Clio docker image
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
@@ -41,6 +42,7 @@ jobs:
|
||||
build_and_publish_image:
|
||||
name: Build and publish image
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
@@ -55,7 +57,7 @@ jobs:
|
||||
if: ${{ inputs.clio_server_binary_url != null }}
|
||||
shell: bash
|
||||
run: |
|
||||
wget ${{inputs.clio_server_binary_url}} -P ./docker/clio/artifact/
|
||||
wget "${{inputs.clio_server_binary_url}}" -P ./docker/clio/artifact/
|
||||
if [ "$(sha256sum ./docker/clio/clio_server | awk '{print $1}')" != "${{inputs.binary_sha256}}" ]; then
|
||||
echo "Binary sha256 sum doesn't match"
|
||||
exit 1
|
||||
@@ -87,7 +89,10 @@ jobs:
|
||||
DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
image_name: rippleci/clio
|
||||
images: |
|
||||
rippleci/clio
|
||||
ghcr.io/xrplf/clio
|
||||
dockerhub_repo: rippleci/clio
|
||||
push_image: ${{ inputs.publish_image }}
|
||||
directory: docker/clio
|
||||
tags: ${{ inputs.tags }}
|
||||
|
||||
96
.github/workflows/build_impl.yml
vendored
96
.github/workflows/build_impl.yml
vendored
@@ -1,4 +1,5 @@
|
||||
name: Reusable build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
@@ -6,13 +7,11 @@ on:
|
||||
description: Runner to run the job on
|
||||
required: true
|
||||
type: string
|
||||
default: heavy
|
||||
|
||||
container:
|
||||
description: "The container object as a JSON string (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
default: ""
|
||||
|
||||
conan_profile:
|
||||
description: Conan profile to use
|
||||
@@ -28,49 +27,40 @@ on:
|
||||
description: Whether ccache and conan cache should be disabled
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
code_coverage:
|
||||
description: Whether to enable code coverage
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
static:
|
||||
description: Whether to build static binaries
|
||||
required: true
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
unit_tests:
|
||||
description: Whether to run unit tests
|
||||
upload_clio_server:
|
||||
description: Whether to upload clio_server
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
integration_tests:
|
||||
description: Whether to run integration tests
|
||||
targets:
|
||||
description: Space-separated build target names
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
clio_server:
|
||||
description: Whether to build clio_server
|
||||
required: true
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
target:
|
||||
description: Build target name
|
||||
required: false
|
||||
type: string
|
||||
default: all
|
||||
|
||||
sanitizer:
|
||||
description: Sanitizer to use
|
||||
required: false
|
||||
required: true
|
||||
type: string
|
||||
default: 'false'
|
||||
|
||||
analyze_build_time:
|
||||
description: Whether to enable build time analysis
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -81,7 +71,7 @@ jobs:
|
||||
steps:
|
||||
- name: Clean workdir
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: kuznetsss/workspace-cleanup@1.0
|
||||
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -94,7 +84,6 @@ jobs:
|
||||
|
||||
- name: Setup conan
|
||||
uses: ./.github/actions/setup_conan
|
||||
id: conan
|
||||
with:
|
||||
conan_profile: ${{ inputs.conan_profile }}
|
||||
|
||||
@@ -104,7 +93,7 @@ jobs:
|
||||
id: restore_cache
|
||||
with:
|
||||
conan_dir: ${{ env.CONAN_USER_HOME }}/.conan
|
||||
conan_profile: ${{ steps.conan.outputs.conan_profile }}
|
||||
conan_profile: ${{ inputs.conan_profile }}
|
||||
ccache_dir: ${{ env.CCACHE_DIR }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
code_coverage: ${{ inputs.code_coverage }}
|
||||
@@ -112,17 +101,33 @@ jobs:
|
||||
- name: Run conan and cmake
|
||||
uses: ./.github/actions/generate
|
||||
with:
|
||||
conan_profile: ${{ steps.conan.outputs.conan_profile }}
|
||||
conan_profile: ${{ inputs.conan_profile }}
|
||||
conan_cache_hit: ${{ !inputs.disable_cache && steps.restore_cache.outputs.conan_cache_hit }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
code_coverage: ${{ inputs.code_coverage }}
|
||||
static: ${{ inputs.static }}
|
||||
sanitizer: ${{ inputs.sanitizer }}
|
||||
time_trace: ${{ inputs.analyze_build_time }}
|
||||
|
||||
- name: Build Clio
|
||||
uses: ./.github/actions/build_clio
|
||||
with:
|
||||
target: ${{ inputs.target }}
|
||||
targets: ${{ inputs.targets }}
|
||||
|
||||
- name: Show build time analyze report
|
||||
if: ${{ inputs.analyze_build_time }}
|
||||
run: |
|
||||
ClangBuildAnalyzer --all build/ build_time_report.bin
|
||||
ClangBuildAnalyzer --analyze build_time_report.bin > build_time_report.txt
|
||||
cat build_time_report.txt
|
||||
shell: bash
|
||||
|
||||
- name: Upload build time analyze report
|
||||
if: ${{ inputs.analyze_build_time }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build_time_report_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||
path: build_time_report.txt
|
||||
|
||||
- name: Show ccache's statistics
|
||||
if: ${{ !inputs.disable_cache }}
|
||||
@@ -135,32 +140,32 @@ jobs:
|
||||
cat /tmp/ccache.stats
|
||||
|
||||
- name: Strip unit_tests
|
||||
if: ${{ inputs.unit_tests && !inputs.code_coverage && inputs.sanitizer == 'false' }}
|
||||
if: inputs.sanitizer == 'false' && !inputs.code_coverage && !inputs.analyze_build_time
|
||||
run: strip build/clio_tests
|
||||
|
||||
- name: Strip integration_tests
|
||||
if: ${{ inputs.integration_tests && !inputs.code_coverage }}
|
||||
if: inputs.sanitizer == 'false' && !inputs.code_coverage && !inputs.analyze_build_time
|
||||
run: strip build/clio_integration_tests
|
||||
|
||||
- name: Upload clio_server
|
||||
if: ${{ inputs.clio_server }}
|
||||
if: inputs.upload_clio_server && !inputs.code_coverage && !inputs.analyze_build_time
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: clio_server_${{ runner.os }}_${{ inputs.build_type }}_${{ steps.conan.outputs.conan_profile }}
|
||||
name: clio_server_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||
path: build/clio_server
|
||||
|
||||
- name: Upload clio_tests
|
||||
if: ${{ inputs.unit_tests && !inputs.code_coverage }}
|
||||
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ steps.conan.outputs.conan_profile }}
|
||||
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||
path: build/clio_tests
|
||||
|
||||
- name: Upload clio_integration_tests
|
||||
if: ${{ inputs.integration_tests && !inputs.code_coverage }}
|
||||
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ steps.conan.outputs.conan_profile }}
|
||||
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||
path: build/clio_integration_tests
|
||||
|
||||
- name: Save cache
|
||||
@@ -175,14 +180,23 @@ jobs:
|
||||
ccache_cache_miss_rate: ${{ steps.ccache_stats.outputs.miss_rate }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
code_coverage: ${{ inputs.code_coverage }}
|
||||
conan_profile: ${{ steps.conan.outputs.conan_profile }}
|
||||
conan_profile: ${{ inputs.conan_profile }}
|
||||
|
||||
# TODO: This is not a part of build process but it is the easiest way to do it here.
|
||||
# It will be refactored in https://github.com/XRPLF/clio/issues/1075
|
||||
# This is run as part of the build job, because it requires the following:
|
||||
# - source code
|
||||
# - generated source code (Build.cpp)
|
||||
# - conan packages
|
||||
# - .gcno files in build directory
|
||||
#
|
||||
# It's all available in the build job, but not in the test job
|
||||
- name: Run code coverage
|
||||
if: ${{ inputs.code_coverage }}
|
||||
uses: ./.github/actions/code_coverage
|
||||
|
||||
# `codecov/codecov-action` will rerun `gcov` if it's available and build directory is present
|
||||
# To prevent this from happening, we run this action in a separate workflow
|
||||
#
|
||||
# More info: https://github.com/XRPLF/clio/pull/2066
|
||||
upload_coverage_report:
|
||||
if: ${{ inputs.code_coverage }}
|
||||
name: Codecov
|
||||
|
||||
24
.github/workflows/check_libxrpl.yml
vendored
24
.github/workflows/check_libxrpl.yml
vendored
@@ -1,14 +1,23 @@
|
||||
name: Check new libXRPL
|
||||
|
||||
on:
|
||||
repository_dispatch:
|
||||
types: [check_libxrpl]
|
||||
|
||||
concurrency:
|
||||
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CONAN_PROFILE: gcc
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Clio / `libXRPL ${{ github.event.client_payload.version }}`
|
||||
runs-on: [self-hosted, heavy]
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
image: ghcr.io/xrplf/clio-ci:latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -27,14 +36,13 @@ jobs:
|
||||
|
||||
- name: Setup conan
|
||||
uses: ./.github/actions/setup_conan
|
||||
id: conan
|
||||
with:
|
||||
conan_profile: gcc
|
||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||
|
||||
- name: Run conan and cmake
|
||||
uses: ./.github/actions/generate
|
||||
with:
|
||||
conan_profile: ${{ steps.conan.outputs.conan_profile }}
|
||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||
conan_cache_hit: ${{ steps.restore_cache.outputs.conan_cache_hit }}
|
||||
build_type: Release
|
||||
|
||||
@@ -55,7 +63,7 @@ jobs:
|
||||
needs: build
|
||||
runs-on: [self-hosted, heavy]
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
image: ghcr.io/xrplf/clio-ci:latest
|
||||
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
@@ -72,9 +80,11 @@ jobs:
|
||||
needs: [build, run_tests]
|
||||
if: ${{ always() && contains(needs.*.result, 'failure') }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
@@ -83,8 +93,8 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
with:
|
||||
labels: 'compatibility,bug'
|
||||
title: 'Proposed libXRPL check failed'
|
||||
labels: "compatibility,bug"
|
||||
title: "Proposed libXRPL check failed"
|
||||
body: >
|
||||
Clio build or tests failed against `libXRPL ${{ github.event.client_payload.version }}`.
|
||||
|
||||
|
||||
9
.github/workflows/check_pr_title.yml
vendored
9
.github/workflows/check_pr_title.yml
vendored
@@ -1,4 +1,5 @@
|
||||
name: Check PR title
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, edited, reopened, synchronize]
|
||||
@@ -7,12 +8,10 @@ on:
|
||||
jobs:
|
||||
check_title:
|
||||
runs-on: ubuntu-latest
|
||||
# permissions:
|
||||
# pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: ytanikin/PRConventionalCommits@1.3.0
|
||||
- uses: ytanikin/pr-conventional-commits@8267db1bacc237419f9ed0228bb9d94e94271a1d # v1.4.1
|
||||
with:
|
||||
task_types: '["build","feat","fix","docs","test","ci","style","refactor","perf","chore"]'
|
||||
add_label: false
|
||||
# Turned off labelling because it leads to an error, see https://github.com/ytanikin/PRConventionalCommits/issues/19
|
||||
# custom_labels: '{"build":"build", "feat":"enhancement", "fix":"bug", "docs":"documentation", "test":"testability", "ci":"ci", "style":"refactoring", "refactor":"refactoring", "perf":"performance", "chore":"tooling"}'
|
||||
custom_labels: '{"build":"build", "feat":"enhancement", "fix":"bug", "docs":"documentation", "test":"testability", "ci":"ci", "style":"refactoring", "refactor":"refactoring", "perf":"performance", "chore":"tooling"}'
|
||||
|
||||
46
.github/workflows/clang-tidy.yml
vendored
46
.github/workflows/clang-tidy.yml
vendored
@@ -1,4 +1,5 @@
|
||||
name: Clang-tidy check
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9 * * 1-5"
|
||||
@@ -6,15 +7,24 @@ on:
|
||||
pull_request:
|
||||
branches: [develop]
|
||||
paths:
|
||||
- .clang_tidy
|
||||
- .github/workflows/clang-tidy.yml
|
||||
workflow_call:
|
||||
|
||||
- .clang_tidy
|
||||
|
||||
concurrency:
|
||||
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CONAN_PROFILE: clang
|
||||
|
||||
jobs:
|
||||
clang_tidy:
|
||||
runs-on: heavy
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
image: ghcr.io/xrplf/clio-ci:latest
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
issues: write
|
||||
@@ -32,9 +42,8 @@ jobs:
|
||||
|
||||
- name: Setup conan
|
||||
uses: ./.github/actions/setup_conan
|
||||
id: conan
|
||||
with:
|
||||
conan_profile: clang
|
||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||
|
||||
- name: Restore cache
|
||||
uses: ./.github/actions/restore_cache
|
||||
@@ -42,12 +51,12 @@ jobs:
|
||||
with:
|
||||
conan_dir: ${{ env.CONAN_USER_HOME }}/.conan
|
||||
ccache_dir: ${{ env.CCACHE_DIR }}
|
||||
conan_profile: ${{ steps.conan.outputs.conan_profile }}
|
||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||
|
||||
- name: Run conan and cmake
|
||||
uses: ./.github/actions/generate
|
||||
with:
|
||||
conan_profile: ${{ steps.conan.outputs.conan_profile }}
|
||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||
conan_cache_hit: ${{ steps.restore_cache.outputs.conan_cache_hit }}
|
||||
build_type: Release
|
||||
|
||||
@@ -60,13 +69,14 @@ jobs:
|
||||
shell: bash
|
||||
id: run_clang_tidy
|
||||
run: |
|
||||
run-clang-tidy-19 -p build -j ${{ steps.number_of_threads.outputs.threads_number }} -fix -quiet 1>output.txt
|
||||
run-clang-tidy-19 -p build -j "${{ steps.number_of_threads.outputs.threads_number }}" -fix -quiet 1>output.txt
|
||||
|
||||
- name: Check format
|
||||
- name: Fix local includes and clang-format style
|
||||
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
run: ./.githooks/check-format
|
||||
run: |
|
||||
pre-commit run --all-files fix-local-includes || true
|
||||
pre-commit run --all-files clang-format || true
|
||||
|
||||
- name: Print issues found
|
||||
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
|
||||
@@ -77,20 +87,20 @@ jobs:
|
||||
rm output.txt
|
||||
|
||||
- name: Create an issue
|
||||
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
|
||||
if: ${{ steps.run_clang_tidy.outcome != 'success' && github.event_name != 'pull_request' }}
|
||||
id: create_issue
|
||||
uses: ./.github/actions/create_issue
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
with:
|
||||
title: 'Clang-tidy found bugs in code 🐛'
|
||||
title: "Clang-tidy found bugs in code 🐛"
|
||||
body: >
|
||||
Clang-tidy found issues in the code:
|
||||
|
||||
List of the issues found: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/
|
||||
|
||||
- uses: crazy-max/ghaction-import-gpg@v6
|
||||
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
|
||||
- uses: crazy-max/ghaction-import-gpg@e89d40939c28e39f97cf32126055eeae86ba74ec # v6.3.0
|
||||
if: ${{ steps.run_clang_tidy.outcome != 'success' && github.event_name != 'pull_request' }}
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.ACTIONS_GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.ACTIONS_GPG_PASSPHRASE }}
|
||||
@@ -98,8 +108,8 @@ jobs:
|
||||
git_commit_gpgsign: true
|
||||
|
||||
- name: Create PR with fixes
|
||||
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
if: ${{ steps.run_clang_tidy.outcome != 'success' && github.event_name != 'pull_request' }}
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
env:
|
||||
GH_REPO: ${{ github.repository }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
@@ -111,7 +121,7 @@ jobs:
|
||||
delete-branch: true
|
||||
title: "style: clang-tidy auto fixes"
|
||||
body: "Fixes #${{ steps.create_issue.outputs.created_issue_id }}. Please review and commit clang-tidy fixes."
|
||||
reviewers: "cindyyan317,godexsoft,kuznetsss"
|
||||
reviewers: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru"
|
||||
|
||||
- name: Fail the job
|
||||
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
name: Restart clang-tidy workflow
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [develop]
|
||||
@@ -17,8 +18,8 @@ jobs:
|
||||
id: check
|
||||
shell: bash
|
||||
run: |
|
||||
passed=$(if [[ $(git log -1 --pretty=format:%s | grep 'style: clang-tidy auto fixes') ]]; then echo 'true' ; else echo 'false' ; fi)
|
||||
echo "passed=$passed" >> $GITHUB_OUTPUT
|
||||
passed=$(if [[ "$(git log -1 --pretty=format:%s | grep 'style: clang-tidy auto fixes')" ]]; then echo 'true' ; else echo 'false' ; fi)
|
||||
echo "passed=\"$passed\"" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Run clang-tidy workflow
|
||||
if: ${{ contains(steps.check.outputs.passed, 'true') }}
|
||||
|
||||
7
.github/workflows/docs.yml
vendored
7
.github/workflows/docs.yml
vendored
@@ -1,4 +1,5 @@
|
||||
name: Documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [develop]
|
||||
@@ -10,7 +11,8 @@ permissions:
|
||||
id-token: write
|
||||
|
||||
concurrency:
|
||||
group: "pages"
|
||||
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
@@ -21,7 +23,8 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
image: ghcr.io/xrplf/clio-ci:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
190
.github/workflows/nightly.yml
vendored
190
.github/workflows/nightly.yml
vendored
@@ -1,151 +1,105 @@
|
||||
name: Nightly release
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 8 * * 1-5'
|
||||
- cron: "0 8 * * 1-5"
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/nightly.yml'
|
||||
- '.github/workflows/build_clio_docker_image.yml'
|
||||
- .github/workflows/nightly.yml
|
||||
|
||||
- .github/workflows/release_impl.yml
|
||||
- .github/workflows/build_and_test.yml
|
||||
- .github/workflows/build_impl.yml
|
||||
- .github/workflows/build_clio_docker_image.yml
|
||||
|
||||
- ".github/actions/**"
|
||||
- "!.github/actions/code_coverage/**"
|
||||
|
||||
concurrency:
|
||||
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build clio
|
||||
build-and-test:
|
||||
name: Build and Test
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos15
|
||||
conan_profile: default_apple_clang
|
||||
build_type: Release
|
||||
static: false
|
||||
- os: heavy
|
||||
conan_profile: gcc
|
||||
build_type: Release
|
||||
static: true
|
||||
container: '{ "image": "rippleci/clio_ci:latest" }'
|
||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
||||
- os: heavy
|
||||
conan_profile: gcc
|
||||
build_type: Debug
|
||||
static: true
|
||||
container: '{ "image": "rippleci/clio_ci:latest" }'
|
||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
||||
|
||||
uses: ./.github/workflows/build_and_test.yml
|
||||
with:
|
||||
runs_on: ${{ matrix.os }}
|
||||
container: ${{ matrix.container }}
|
||||
conan_profile: ${{ matrix.conan_profile }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
static: ${{ matrix.static }}
|
||||
run_unit_tests: true
|
||||
run_integration_tests: true
|
||||
upload_clio_server: true
|
||||
disable_cache: true
|
||||
|
||||
analyze_build_time:
|
||||
name: Analyze Build Time
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# TODO: Enable when we have at least ubuntu 22.04
|
||||
# as ClangBuildAnalyzer requires relatively modern glibc
|
||||
#
|
||||
# - os: heavy
|
||||
# conan_profile: clang
|
||||
# container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
||||
# static: true
|
||||
- os: macos15
|
||||
conan_profile: default_apple_clang
|
||||
container: ""
|
||||
static: false
|
||||
uses: ./.github/workflows/build_impl.yml
|
||||
with:
|
||||
runs_on: ${{ matrix.os }}
|
||||
container: ${{ matrix.container }}
|
||||
conan_profile: gcc
|
||||
build_type: ${{ matrix.build_type }}
|
||||
conan_profile: ${{ matrix.conan_profile }}
|
||||
build_type: Release
|
||||
disable_cache: true
|
||||
code_coverage: false
|
||||
static: ${{ matrix.static }}
|
||||
unit_tests: true
|
||||
integration_tests: true
|
||||
clio_server: true
|
||||
disable_cache: true
|
||||
|
||||
run_tests:
|
||||
needs: build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos15
|
||||
conan_profile: apple_clang_16
|
||||
build_type: Release
|
||||
integration_tests: false
|
||||
- os: heavy
|
||||
conan_profile: gcc
|
||||
build_type: Release
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
integration_tests: true
|
||||
- os: heavy
|
||||
conan_profile: gcc
|
||||
build_type: Debug
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
integration_tests: true
|
||||
runs-on: [self-hosted, "${{ matrix.os }}"]
|
||||
container: ${{ matrix.container }}
|
||||
|
||||
services:
|
||||
scylladb:
|
||||
image: ${{ (matrix.integration_tests) && 'scylladb/scylla' || '' }}
|
||||
options: >-
|
||||
--health-cmd "cqlsh -e 'describe cluster'"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- name: Clean workdir
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: kuznetsss/workspace-cleanup@1.0
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: clio_tests_${{ runner.os }}_${{ matrix.build_type }}_${{ matrix.conan_profile }}
|
||||
|
||||
- name: Run clio_tests
|
||||
run: |
|
||||
chmod +x ./clio_tests
|
||||
./clio_tests
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: clio_integration_tests_${{ runner.os }}_${{ matrix.build_type }}_${{ matrix.conan_profile }}
|
||||
|
||||
# To be enabled back once docker in mac runner arrives
|
||||
# https://github.com/XRPLF/clio/issues/1400
|
||||
- name: Run clio_integration_tests
|
||||
if: matrix.integration_tests
|
||||
run: |
|
||||
chmod +x ./clio_integration_tests
|
||||
./clio_integration_tests --backend_host=scylladb
|
||||
upload_clio_server: false
|
||||
targets: all
|
||||
sanitizer: "false"
|
||||
analyze_build_time: true
|
||||
|
||||
nightly_release:
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
needs: run_tests
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GH_REPO: ${{ github.repository }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
needs: build-and-test
|
||||
uses: ./.github/workflows/release_impl.yml
|
||||
with:
|
||||
path: nightly_release
|
||||
pattern: clio_server_*
|
||||
|
||||
- name: Prepare files
|
||||
shell: bash
|
||||
run: |
|
||||
cp ${{ github.workspace }}/.github/workflows/nightly_notes.md "${RUNNER_TEMP}/nightly_notes.md"
|
||||
cd nightly_release
|
||||
for d in $(ls); do
|
||||
archive_name=$(ls $d)
|
||||
mv ${d}/${archive_name} ./
|
||||
rm -r $d
|
||||
sha256sum ./$archive_name > ./${archive_name}.sha256sum
|
||||
cat ./$archive_name.sha256sum >> "${RUNNER_TEMP}/nightly_notes.md"
|
||||
done
|
||||
echo '```' >> "${RUNNER_TEMP}/nightly_notes.md"
|
||||
|
||||
- name: Remove current nightly release and nightly tag
|
||||
shell: bash
|
||||
run: |
|
||||
gh release delete nightly --yes || true
|
||||
git push origin :nightly || true
|
||||
|
||||
- name: Publish nightly release
|
||||
shell: bash
|
||||
run: |
|
||||
gh release create nightly --prerelease --title "Clio development (nightly) build" \
|
||||
--target $GITHUB_SHA --notes-file "${RUNNER_TEMP}/nightly_notes.md" \
|
||||
./nightly_release/clio_server*
|
||||
overwrite_release: true
|
||||
title: "Clio development (nightly) build"
|
||||
version: nightly
|
||||
notes_header_file: nightly_notes.md
|
||||
|
||||
build_and_publish_docker_image:
|
||||
uses: ./.github/workflows/build_clio_docker_image.yml
|
||||
needs: run_tests
|
||||
needs: build-and-test
|
||||
secrets: inherit
|
||||
with:
|
||||
tags: |
|
||||
@@ -156,12 +110,14 @@ jobs:
|
||||
publish_image: ${{ github.event_name != 'pull_request' }}
|
||||
|
||||
create_issue_on_failure:
|
||||
needs: [build, run_tests, nightly_release, build_and_publish_docker_image]
|
||||
needs: [build-and-test, nightly_release, build_and_publish_docker_image]
|
||||
if: ${{ always() && contains(needs.*.result, 'failure') && github.event_name != 'pull_request' }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
@@ -170,7 +126,7 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
with:
|
||||
title: 'Nightly release failed 🌙'
|
||||
title: "Nightly release failed 🌙"
|
||||
body: >
|
||||
Nightly release failed:
|
||||
|
||||
|
||||
5
.github/workflows/nightly_notes.md
vendored
5
.github/workflows/nightly_notes.md
vendored
@@ -1,6 +1,7 @@
|
||||
# Release notes
|
||||
|
||||
> **Note:** Please remember that this is a development release and it is not recommended for production use.
|
||||
|
||||
Changelog (including previous releases): https://github.com/XRPLF/clio/commits/nightly
|
||||
Changelog (including previous releases): <https://github.com/XRPLF/clio/commits/nightly>
|
||||
|
||||
## SHA256 checksums
|
||||
```
|
||||
|
||||
39
.github/workflows/pre-commit-autoupdate.yml
vendored
Normal file
39
.github/workflows/pre-commit-autoupdate.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
name: Pre-commit auto-update
|
||||
|
||||
on:
|
||||
# every first day of the month
|
||||
schedule:
|
||||
- cron: "0 0 1 * *"
|
||||
# on demand
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
auto-update:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.x
|
||||
|
||||
- run: pip install pre-commit
|
||||
- run: pre-commit autoupdate --freeze
|
||||
- run: pre-commit run --all-files || true
|
||||
|
||||
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
if: always()
|
||||
env:
|
||||
GH_REPO: ${{ github.repository }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
with:
|
||||
branch: update/pre-commit-hooks
|
||||
title: Update pre-commit hooks
|
||||
commit-message: "style: update pre-commit hooks"
|
||||
body: Update versions of pre-commit hooks to latest version.
|
||||
reviewers: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru"
|
||||
28
.github/workflows/pre-commit.yml
vendored
Normal file
28
.github/workflows/pre-commit.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
name: Run pre-commit hooks
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
run-hooks:
|
||||
runs-on: heavy
|
||||
container:
|
||||
image: ghcr.io/xrplf/clio-ci:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Repo ⚡️
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Prepare runner
|
||||
uses: ./.github/actions/prepare_runner
|
||||
with:
|
||||
disable_ccache: true
|
||||
|
||||
- name: Run pre-commit ✅
|
||||
run: pre-commit run --all-files
|
||||
78
.github/workflows/release_impl.yml
vendored
Normal file
78
.github/workflows/release_impl.yml
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
name: Make release
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
overwrite_release:
|
||||
description: "Overwrite the current release and tag"
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
title:
|
||||
description: "Release title"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
version:
|
||||
description: "Release version"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
notes_header_file:
|
||||
description: "Release notes header file"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GH_REPO: ${{ github.repository }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: release_artifacts
|
||||
pattern: clio_server_*
|
||||
|
||||
- name: Prepare files
|
||||
shell: bash
|
||||
working-directory: release_artifacts
|
||||
run: |
|
||||
cp ${{ github.workspace }}/.github/workflows/${{ inputs.notes_header_file }} "${RUNNER_TEMP}/release_notes.md"
|
||||
echo '' >> "${RUNNER_TEMP}/release_notes.md"
|
||||
echo '```' >> "${RUNNER_TEMP}/release_notes.md"
|
||||
|
||||
for d in $(ls); do
|
||||
archive_name=$(ls $d)
|
||||
mv ${d}/${archive_name} ./
|
||||
rm -r $d
|
||||
sha256sum ./$archive_name > ./${archive_name}.sha256sum
|
||||
cat ./$archive_name.sha256sum >> "${RUNNER_TEMP}/release_notes.md"
|
||||
done
|
||||
|
||||
echo '```' >> "${RUNNER_TEMP}/release_notes.md"
|
||||
|
||||
- name: Remove current release and tag
|
||||
if: ${{ github.event_name != 'pull_request' && inputs.overwrite_release }}
|
||||
shell: bash
|
||||
run: |
|
||||
gh release delete ${{ inputs.version }} --yes || true
|
||||
git push origin :${{ inputs.version }} || true
|
||||
|
||||
- name: Publish release
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
shell: bash
|
||||
run: |
|
||||
gh release create ${{ inputs.version }} \
|
||||
${{ inputs.overwrite_release && '--prerelease' || '' }} \
|
||||
--title "${{ inputs.title }}" \
|
||||
--target $GITHUB_SHA \
|
||||
--notes-file "${RUNNER_TEMP}/release_notes.md" \
|
||||
./release_artifacts/clio_server*
|
||||
118
.github/workflows/sanitizers.yml
vendored
118
.github/workflows/sanitizers.yml
vendored
@@ -1,15 +1,37 @@
|
||||
name: Run tests with sanitizers
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 1-5"
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/sanitizers.yml'
|
||||
- .github/workflows/sanitizers.yml
|
||||
|
||||
- .github/workflows/build_and_test.yml
|
||||
- .github/workflows/build_impl.yml
|
||||
- .github/workflows/test_impl.yml
|
||||
|
||||
- ".github/actions/**"
|
||||
- "!.github/actions/build_docker_image/**"
|
||||
- "!.github/actions/create_issue/**"
|
||||
- .github/scripts/execute-tests-under-sanitizer
|
||||
|
||||
- CMakeLists.txt
|
||||
- "cmake/**"
|
||||
# We don't run sanitizer on code change, because it takes too long
|
||||
# - "src/**"
|
||||
# - "tests/**"
|
||||
|
||||
concurrency:
|
||||
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build clio tests
|
||||
build-and-test:
|
||||
name: Build and Test
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -18,89 +40,19 @@ jobs:
|
||||
compiler: gcc
|
||||
- sanitizer: asan
|
||||
compiler: gcc
|
||||
# - sanitizer: ubsan # todo: enable when heavy runners are available
|
||||
# compiler: gcc
|
||||
uses: ./.github/workflows/build_impl.yml
|
||||
- sanitizer: ubsan
|
||||
compiler: gcc
|
||||
|
||||
uses: ./.github/workflows/build_and_test.yml
|
||||
with:
|
||||
runs_on: ubuntu-latest # todo: change to heavy
|
||||
container: '{ "image": "rippleci/clio_ci:latest" }'
|
||||
runs_on: heavy
|
||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
||||
disable_cache: true
|
||||
conan_profile: ${{ matrix.compiler }}.${{ matrix.sanitizer }}
|
||||
build_type: Release
|
||||
code_coverage: false
|
||||
static: false
|
||||
unit_tests: true
|
||||
integration_tests: false
|
||||
clio_server: false
|
||||
target: clio_tests
|
||||
run_unit_tests: true
|
||||
run_integration_tests: false
|
||||
upload_clio_server: false
|
||||
targets: clio_tests clio_integration_tests
|
||||
sanitizer: ${{ matrix.sanitizer }}
|
||||
|
||||
# consider combining this with the previous matrix instead
|
||||
run_tests:
|
||||
needs: build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sanitizer: tsan
|
||||
compiler: gcc
|
||||
- sanitizer: asan
|
||||
compiler: gcc
|
||||
# - sanitizer: ubsan # todo: enable when heavy runners are available
|
||||
# compiler: gcc
|
||||
runs-on: ubuntu-latest # todo: change to heavy
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
permissions:
|
||||
contents: write
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: clio_tests_${{ runner.os }}_Release_${{ matrix.compiler }}.${{ matrix.sanitizer }}
|
||||
|
||||
- name: Run clio_tests [${{ matrix.compiler }} / ${{ matrix.sanitizer }}]
|
||||
shell: bash
|
||||
run: |
|
||||
chmod +x ./clio_tests
|
||||
./.github/scripts/execute-tests-under-sanitizer ./clio_tests
|
||||
|
||||
- name: Check for sanitizer report
|
||||
shell: bash
|
||||
id: check_report
|
||||
run: |
|
||||
if ls .sanitizer-report/* 1> /dev/null 2>&1; then
|
||||
echo "found_report=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "found_report=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Upload report
|
||||
if: ${{ steps.check_report.outputs.found_report == 'true' }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.compiler }}_${{ matrix.sanitizer }}_report
|
||||
path: .sanitizer-report/*
|
||||
include-hidden-files: true
|
||||
|
||||
#
|
||||
# todo: enable when we have fixed all currently existing issues from sanitizers
|
||||
#
|
||||
# - name: Create an issue
|
||||
# if: ${{ steps.check_report.outputs.found_report == 'true' }}
|
||||
# uses: ./.github/actions/create_issue
|
||||
# env:
|
||||
# GH_TOKEN: ${{ github.token }}
|
||||
# with:
|
||||
# labels: 'bug'
|
||||
# title: '[${{ matrix.sanitizer }}/${{ matrix.compiler }}] reported issues'
|
||||
# body: >
|
||||
# Clio tests failed one or more sanitizer checks when built with ${{ matrix.compiler }}`.
|
||||
|
||||
# Workflow: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/
|
||||
# Reports are available as artifacts.
|
||||
|
||||
165
.github/workflows/test_impl.yml
vendored
Normal file
165
.github/workflows/test_impl.yml
vendored
Normal file
@@ -0,0 +1,165 @@
|
||||
name: Reusable test
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs_on:
|
||||
description: Runner to run the job on
|
||||
required: true
|
||||
type: string
|
||||
|
||||
container:
|
||||
description: "The container object as a JSON string (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
conan_profile:
|
||||
description: Conan profile to use
|
||||
required: true
|
||||
type: string
|
||||
|
||||
build_type:
|
||||
description: Build type
|
||||
required: true
|
||||
type: string
|
||||
|
||||
run_unit_tests:
|
||||
description: Whether to run unit tests
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
run_integration_tests:
|
||||
description: Whether to run integration tests
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
sanitizer:
|
||||
description: Sanitizer to use
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
unit_tests:
|
||||
name: Unit testing ${{ inputs.container != '' && 'in container' || 'natively' }}
|
||||
runs-on: ${{ inputs.runs_on }}
|
||||
container: ${{ inputs.container != '' && fromJson(inputs.container) || null }}
|
||||
|
||||
if: inputs.run_unit_tests
|
||||
|
||||
env:
|
||||
# TODO: remove when we have fixed all currently existing issues from sanitizers
|
||||
SANITIZER_IGNORE_ERRORS: ${{ inputs.sanitizer != 'false' && inputs.sanitizer != 'ubsan' }}
|
||||
|
||||
steps:
|
||||
- name: Clean workdir
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||
|
||||
- name: Make clio_tests executable
|
||||
shell: bash
|
||||
run: chmod +x ./clio_tests
|
||||
|
||||
- name: Run clio_tests (regular)
|
||||
if: env.SANITIZER_IGNORE_ERRORS == 'false'
|
||||
run: ./clio_tests
|
||||
|
||||
- name: Run clio_tests (sanitizer errors ignored)
|
||||
if: env.SANITIZER_IGNORE_ERRORS == 'true'
|
||||
run: ./.github/scripts/execute-tests-under-sanitizer ./clio_tests
|
||||
|
||||
- name: Check for sanitizer report
|
||||
if: env.SANITIZER_IGNORE_ERRORS == 'true'
|
||||
shell: bash
|
||||
id: check_report
|
||||
run: |
|
||||
if ls .sanitizer-report/* 1> /dev/null 2>&1; then
|
||||
echo "found_report=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "found_report=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Upload sanitizer report
|
||||
if: env.SANITIZER_IGNORE_ERRORS == 'true' && steps.check_report.outputs.found_report == 'true'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.conan_profile }}_report
|
||||
path: .sanitizer-report/*
|
||||
include-hidden-files: true
|
||||
|
||||
- name: Create an issue
|
||||
if: false && env.SANITIZER_IGNORE_ERRORS == 'true' && steps.check_report.outputs.found_report == 'true'
|
||||
uses: ./.github/actions/create_issue
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
with:
|
||||
labels: "bug"
|
||||
title: "[${{ inputs.conan_profile }}] reported issues"
|
||||
body: >
|
||||
Clio tests failed one or more sanitizer checks when built with ${{ inputs.conan_profile }}`.
|
||||
|
||||
Workflow: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/
|
||||
Reports are available as artifacts.
|
||||
|
||||
integration_tests:
|
||||
name: Integration testing ${{ inputs.container != '' && 'in container' || 'natively' }}
|
||||
runs-on: ${{ inputs.runs_on }}
|
||||
container: ${{ inputs.container != '' && fromJson(inputs.container) || null }}
|
||||
|
||||
if: inputs.run_integration_tests
|
||||
|
||||
services:
|
||||
scylladb:
|
||||
image: ${{ inputs.container != '' && 'scylladb/scylla' || '' }}
|
||||
options: >-
|
||||
--health-cmd "cqlsh -e 'describe cluster'"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- name: Clean workdir
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
|
||||
|
||||
- name: Spin up scylladb
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
timeout-minutes: 3
|
||||
run: |
|
||||
docker rm --force scylladb || true
|
||||
docker run \
|
||||
--detach \
|
||||
--name scylladb \
|
||||
--health-cmd "cqlsh -e 'describe cluster'" \
|
||||
--health-interval 10s \
|
||||
--health-timeout 5s \
|
||||
--health-retries 5 \
|
||||
--publish 9042:9042 \
|
||||
--memory 16G \
|
||||
scylladb/scylla
|
||||
|
||||
until [ "$(docker inspect -f '{{.State.Health.Status}}' scylladb)" == "healthy" ]; do
|
||||
sleep 5
|
||||
done
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||
|
||||
- name: Run clio_integration_tests
|
||||
run: |
|
||||
chmod +x ./clio_integration_tests
|
||||
./clio_integration_tests ${{ runner.os != 'macOS' && '--backend_host=scylladb' || '' }}
|
||||
|
||||
- name: Show docker logs and stop scylladb
|
||||
if: ${{ always() && runner.os == 'macOS' }}
|
||||
run: |
|
||||
docker logs scylladb
|
||||
docker rm --force scylladb || true
|
||||
28
.github/workflows/update_docker_ci.yml
vendored
28
.github/workflows/update_docker_ci.yml
vendored
@@ -1,22 +1,37 @@
|
||||
name: Update CI docker image
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'docker/ci/**'
|
||||
- 'docker/compilers/**'
|
||||
- .github/workflows/update_docker_ci.yml
|
||||
|
||||
- ".github/actions/build_docker_image/**"
|
||||
|
||||
- "docker/ci/**"
|
||||
- "docker/compilers/**"
|
||||
push:
|
||||
branches: [develop]
|
||||
paths:
|
||||
- 'docker/ci/**' # CI image must update when either its dockerfile changes
|
||||
- 'docker/compilers/**' # or any compilers changed and were pushed by hand
|
||||
- .github/workflows/update_docker_ci.yml
|
||||
|
||||
- ".github/actions/build_docker_image/**"
|
||||
|
||||
# CI image must update when either its Dockerfile changes
|
||||
# or any compilers changed and were pushed by hand
|
||||
- "docker/ci/**"
|
||||
- "docker/compilers/**"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build_and_push:
|
||||
name: Build and push docker image
|
||||
runs-on: [self-hosted, heavy]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/build_docker_image
|
||||
@@ -25,7 +40,10 @@ jobs:
|
||||
DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
image_name: rippleci/clio_ci
|
||||
images: |
|
||||
rippleci/clio_ci
|
||||
ghcr.io/xrplf/clio-ci
|
||||
dockerhub_repo: rippleci/clio_ci
|
||||
push_image: ${{ github.event_name != 'pull_request' }}
|
||||
directory: docker/ci
|
||||
tags: |
|
||||
|
||||
10
.github/workflows/upload_coverage_report.yml
vendored
10
.github/workflows/upload_coverage_report.yml
vendored
@@ -1,4 +1,5 @@
|
||||
name: Upload report
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
@@ -10,6 +11,7 @@ jobs:
|
||||
upload_report:
|
||||
name: Upload report
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -23,13 +25,9 @@ jobs:
|
||||
|
||||
- name: Upload coverage report
|
||||
if: ${{ hashFiles('build/coverage_report.xml') != '' }}
|
||||
uses: wandalen/wretry.action@v3.7.3
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
with:
|
||||
action: codecov/codecov-action@v4
|
||||
with: |
|
||||
files: build/coverage_report.xml
|
||||
fail_ci_if_error: false
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
attempt_limit: 5
|
||||
attempt_delay: 10000
|
||||
|
||||
8
.hadolint.yml
Normal file
8
.hadolint.yml
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
ignored:
|
||||
- DL3003
|
||||
- DL3008
|
||||
- DL3013
|
||||
- DL3015
|
||||
- DL3027
|
||||
- DL3047
|
||||
6
.markdownlint.yaml
Normal file
6
.markdownlint.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
# Default state for all rules
|
||||
default: true
|
||||
|
||||
# MD013/line-length - Line length
|
||||
MD013:
|
||||
line_length: 1000
|
||||
109
.pre-commit-config.yaml
Normal file
109
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,109 @@
|
||||
---
|
||||
# pre-commit is a tool to perform a predefined set of tasks manually and/or
|
||||
# automatically before git commits are made.
|
||||
#
|
||||
# Config reference: https://pre-commit.com/#pre-commit-configyaml---top-level
|
||||
#
|
||||
# Common tasks
|
||||
#
|
||||
# - Run on all files: pre-commit run --all-files
|
||||
# - Register git hooks: pre-commit install --hook-type pre-commit --hook-type pre-push
|
||||
#
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
# `pre-commit sample-config` default hooks
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: cef0300fd0fc4d2a87a85fa2093c6b283ea36f4b # frozen: v5.0.0
|
||||
hooks:
|
||||
- id: check-added-large-files
|
||||
- id: check-executables-have-shebangs
|
||||
- id: check-shebang-scripts-are-executable
|
||||
- id: end-of-file-fixer
|
||||
exclude: ^docs/doxygen-awesome-theme/
|
||||
- id: trailing-whitespace
|
||||
exclude: ^docs/doxygen-awesome-theme/
|
||||
|
||||
# Autoformat: YAML, JSON, Markdown, etc.
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: 787fb9f542b140ba0b2aced38e6a3e68021647a3 # frozen: v3.5.3
|
||||
hooks:
|
||||
- id: prettier
|
||||
exclude: ^docs/doxygen-awesome-theme/
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: 586c3ea3f51230da42bab657c6a32e9e66c364f0 # frozen: v0.44.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: LICENSE.md
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: c3dc18df7a501f02a560a2cc7ba3c69a85ca01d3 # frozen: v2.13.1-beta
|
||||
hooks:
|
||||
- id: hadolint-docker
|
||||
# hadolint-docker is a special hook that runs hadolint in a Docker container
|
||||
# Docker is not installed in the environment where pre-commit is run
|
||||
stages: [manual]
|
||||
entry: hadolint/hadolint:v2.12.1-beta hadolint
|
||||
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: 63c8f8312b7559622c0d82815639671ae42132ac # frozen: v2.4.1
|
||||
hooks:
|
||||
- id: codespell
|
||||
args:
|
||||
[
|
||||
--write-changes,
|
||||
--ignore-words=pre-commit-hooks/codespell_ignore.txt,
|
||||
]
|
||||
|
||||
# Running fix-local-includes before clang-format
|
||||
# to ensure that the include order is correct.
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: fix-local-includes
|
||||
name: Fix Local Includes
|
||||
entry: pre-commit-hooks/fix-local-includes.sh
|
||||
types: [c++]
|
||||
language: script
|
||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||
rev: f9a52e87b6cdcb01b0a62b8611d9ba9f2dad0067 # frozen: v19.1.7
|
||||
hooks:
|
||||
- id: clang-format
|
||||
args: [--style=file]
|
||||
types: [c++]
|
||||
|
||||
- repo: https://github.com/cheshirekow/cmake-format-precommit
|
||||
rev: e2c2116d86a80e72e7146a06e68b7c228afc6319 # frozen: v0.6.13
|
||||
hooks:
|
||||
- id: cmake-format
|
||||
additional_dependencies: [PyYAML]
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: check-no-h-files
|
||||
name: No .h files
|
||||
entry: There should be no .h files in this repository
|
||||
language: fail
|
||||
files: \.h$
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: gofmt
|
||||
name: Go Format
|
||||
entry: pre-commit-hooks/run-go-fmt.sh
|
||||
types: [go]
|
||||
language: golang
|
||||
description: "Runs `gofmt`, requires golang"
|
||||
- id: check-docs
|
||||
name: Check Doxygen Documentation
|
||||
entry: pre-commit-hooks/check-doxygen-docs.sh
|
||||
types: [text]
|
||||
language: script
|
||||
pass_filenames: false
|
||||
- id: verify-commits
|
||||
name: Verify Commits
|
||||
entry: pre-commit-hooks/verify-commits.sh
|
||||
always_run: true
|
||||
stages: [pre-push]
|
||||
language: script
|
||||
pass_filenames: false
|
||||
@@ -17,6 +17,7 @@ option(packaging "Create distribution packages" FALSE)
|
||||
option(lint "Run clang-tidy checks during compilation" FALSE)
|
||||
option(static "Statically linked Clio" FALSE)
|
||||
option(snapshot "Build snapshot tool" FALSE)
|
||||
option(time_trace "Build using -ftime-trace to create compiler trace reports" FALSE)
|
||||
|
||||
# ========================================================================== #
|
||||
set(san "" CACHE STRING "Add sanitizer instrumentation")
|
||||
|
||||
125
CONTRIBUTING.md
125
CONTRIBUTING.md
@@ -1,32 +1,50 @@
|
||||
# Contributing
|
||||
|
||||
Thank you for your interest in contributing to the `clio` project 🙏
|
||||
|
||||
## Workflow
|
||||
|
||||
To contribute, please:
|
||||
|
||||
1. Fork the repository under your own user.
|
||||
2. Create a new branch on which to commit/push your changes.
|
||||
3. Write and test your code.
|
||||
4. Ensure that your code compiles with the provided build engine and update the provided build engine as part of your PR where needed and where appropriate.
|
||||
5. Where applicable, write test cases for your code and include those in the relevant subfolder under `tests`.
|
||||
6. Ensure your code passes automated checks (e.g. clang-format)
|
||||
6. Ensure your code passes [automated checks](#pre-commit-hooks)
|
||||
7. Squash your commits (i.e. rebase) into as few commits as is reasonable to describe your changes at a high level (typically a single commit for a small change). See below for more details.
|
||||
8. Open a PR to the main repository onto the _develop_ branch, and follow the provided template.
|
||||
|
||||
> **Note:** Please read the [Style guide](#style-guide).
|
||||
|
||||
## Install git hooks
|
||||
Please run the following command in order to use git hooks that are helpful for `clio` development.
|
||||
### `git lfs` hooks
|
||||
|
||||
Install `git lfs` hooks using the following command:
|
||||
|
||||
```bash
|
||||
git config --local core.hooksPath .githooks
|
||||
git lfs install
|
||||
```
|
||||
|
||||
## Git hooks dependencies
|
||||
The pre-commit hook requires `clang-format >= 19.0.0` and `cmake-format` to be installed on your machine.
|
||||
`clang-format` can be installed using `brew` on macOS and default package manager on Linux.
|
||||
`cmake-format` can be installed using `pip`.
|
||||
The hook will also attempt to automatically use `doxygen` to verify that everything public in the codebase is covered by doc comments. If `doxygen` is not installed, the hook will raise a warning suggesting to install `doxygen` for future commits.
|
||||
> **Note:** You need to install Git LFS hooks before installing `pre-commit` hooks.
|
||||
|
||||
### `pre-commit` hooks
|
||||
|
||||
To ensure code quality and style, we use [`pre-commit`](https://pre-commit.com/).
|
||||
|
||||
Run the following command to enable `pre-commit` hooks that help with Clio development:
|
||||
|
||||
```bash
|
||||
pip3 install pre-commit
|
||||
pre-commit install --hook-type pre-commit --hook-type pre-push
|
||||
```
|
||||
|
||||
`pre-commit` takes care of running each tool in [`.pre-commit-config.yaml`](https://github.com/XRPLF/clio/blob/develop/.pre-commit-config.yaml) in a separate environment.
|
||||
|
||||
`pre-commit` also attempts to automatically use Doxygen to verify that everything public in the codebase has doc comments.
|
||||
If Doxygen is not installed, the hook issues a warning and recommends installing Doxygen for future commits.
|
||||
|
||||
### Git commands
|
||||
|
||||
## Git commands
|
||||
This sections offers a detailed look at the git commands you will need to use to get your PR submitted.
|
||||
Please note that there are more than one way to do this and these commands are provided for your convenience.
|
||||
At this point it's assumed that you have already finished working on your feature/bug.
|
||||
@@ -43,6 +61,7 @@ git pull origin develop
|
||||
git checkout <your feature branch>
|
||||
git rebase -i develop
|
||||
```
|
||||
|
||||
For each commit in the list other than the first one, enter `s` to squash.
|
||||
After this is done, you will have the opportunity to write a message for the squashed commit.
|
||||
|
||||
@@ -52,6 +71,7 @@ After this is done, you will have the opportunity to write a message for the squ
|
||||
# You should now have a single commit on top of a commit in `develop`
|
||||
git log
|
||||
```
|
||||
|
||||
> **Note:** If there are merge conflicts, please resolve them now.
|
||||
|
||||
```bash
|
||||
@@ -68,15 +88,18 @@ git commit --amend -S
|
||||
git push --force
|
||||
```
|
||||
|
||||
## Use ccache (optional)
|
||||
### Use ccache (optional)
|
||||
|
||||
Clio uses `ccache` to speed up compilation. If you want to use it, please make sure it is installed on your machine.
|
||||
CMake will automatically detect it and use it if it is available.
|
||||
|
||||
## Opening a pull request
|
||||
### Opening a pull request
|
||||
|
||||
When a pull request is open CI will perform checks on the new code.
|
||||
Title of the pull request and squashed commit should follow [conventional commits specification](https://www.conventionalcommits.org/en/v1.0.0/).
|
||||
|
||||
## Fixing issues found during code review
|
||||
### Fixing issues found during code review
|
||||
|
||||
While your code is in review, it's possible that some changes will be requested by reviewer(s).
|
||||
This section describes the process of adding your fixes.
|
||||
|
||||
@@ -94,62 +117,72 @@ git commit -S -m "[FOLD] Your commit message"
|
||||
git push
|
||||
```
|
||||
|
||||
## After code review
|
||||
### After code review
|
||||
|
||||
When your PR is approved and ready to merge, use `Squash and merge`.
|
||||
The button for that is near the bottom of the PR's page on GitHub.
|
||||
|
||||
> **Important:** Please leave the automatically-generated mention/link to the PR in the subject line **and** in the description field add `"Fix #ISSUE_ID"` (replacing `ISSUE_ID` with yours) if the PR fixes an issue.
|
||||
> **Note:** See [issues](https://github.com/XRPLF/clio/issues) to find the `ISSUE_ID` for the feature/bug you were working on.
|
||||
|
||||
# Style guide
|
||||
## Style guide
|
||||
|
||||
This is a non-exhaustive list of recommended style guidelines. These are not always strictly enforced and serve as a way to keep the codebase coherent.
|
||||
|
||||
## Formatting
|
||||
Code must conform to `clang-format` version 19, unless the result would be unreasonably difficult to read or maintain.
|
||||
In most cases the pre-commit hook will take care of formatting and will fix any issues automatically.
|
||||
To manually format your code, use `clang-format -i <your changed files>` for C++ files and `cmake-format -i <your changed files>` for CMake files.
|
||||
### Formatting
|
||||
|
||||
Code must conform to `clang-format`, unless the result is unreasonably difficult to read or maintain.
|
||||
In most cases the `pre-commit` hook takes care of formatting and fixes any issues automatically.
|
||||
To manually format your code, run `pre-commit run clang-format --files <your changed files>` for C++ files, and `pre-commit run cmake-format --files <your changed files>` for CMake files.
|
||||
|
||||
### Documentation
|
||||
|
||||
## Documentation
|
||||
All public namespaces, classes and functions must be covered by doc (`doxygen`) comments. Everything that is not within a nested `impl` namespace is considered public.
|
||||
|
||||
> **Note:** Keep in mind that this is enforced by Clio's CI and your build will fail if newly added public code lacks documentation.
|
||||
|
||||
## Avoid
|
||||
* Proliferation of nearly identical code.
|
||||
* Proliferation of new files and classes unless it improves readability or/and compilation time.
|
||||
* Unmanaged memory allocation and raw pointers.
|
||||
* Macros (unless they add significant value.)
|
||||
* Lambda patterns (unless these add significant value.)
|
||||
* CPU or architecture-specific code unless there is a good reason to include it, and where it is used guard it with macros and provide explanatory comments.
|
||||
* Importing new libraries unless there is a very good reason to do so.
|
||||
### Avoid
|
||||
|
||||
## Seek to
|
||||
* Extend functionality of existing code rather than creating new code.
|
||||
* Prefer readability over terseness where important logic is concerned.
|
||||
* Inline functions that are not used or are not likely to be used elsewhere in the codebase.
|
||||
* Use clear and self-explanatory names for functions, variables, structs and classes.
|
||||
* Use TitleCase for classes, structs and filenames, camelCase for function and variable names, lower case for namespaces and folders.
|
||||
* Provide as many comments as you feel that a competent programmer would need to understand what your code does.
|
||||
- Proliferation of nearly identical code.
|
||||
- Proliferation of new files and classes unless it improves readability or/and compilation time.
|
||||
- Unmanaged memory allocation and raw pointers.
|
||||
- Macros (unless they add significant value.)
|
||||
- Lambda patterns (unless these add significant value.)
|
||||
- CPU or architecture-specific code unless there is a good reason to include it, and where it is used guard it with macros and provide explanatory comments.
|
||||
- Importing new libraries unless there is a very good reason to do so.
|
||||
|
||||
### Seek to
|
||||
|
||||
- Extend functionality of existing code rather than creating new code.
|
||||
- Prefer readability over terseness where important logic is concerned.
|
||||
- Inline functions that are not used or are not likely to be used elsewhere in the codebase.
|
||||
- Use clear and self-explanatory names for functions, variables, structs and classes.
|
||||
- Use TitleCase for classes, structs and filenames, camelCase for function and variable names, lower case for namespaces and folders.
|
||||
- Provide as many comments as you feel that a competent programmer would need to understand what your code does.
|
||||
|
||||
## Maintainers
|
||||
|
||||
# Maintainers
|
||||
Maintainers are ecosystem participants with elevated access to the repository. They are able to push new code, make decisions on when a release should be made, etc.
|
||||
|
||||
## Code Review
|
||||
### Code Review
|
||||
|
||||
A PR must be reviewed and approved by at least one of the maintainers before it can be merged.
|
||||
|
||||
## Adding and Removing
|
||||
### Adding and Removing
|
||||
|
||||
New maintainers can be proposed by two existing maintainers, subject to a vote by a quorum of the existing maintainers. A minimum of 50% support and a 50% participation is required. In the event of a tie vote, the addition of the new maintainer will be rejected.
|
||||
|
||||
Existing maintainers can resign, or be subject to a vote for removal at the behest of two existing maintainers. A minimum of 60% agreement and 50% participation are required. The XRP Ledger Foundation will have the ability, for cause, to remove an existing maintainer without a vote.
|
||||
|
||||
## Existing Maintainers
|
||||
### Existing Maintainers
|
||||
|
||||
* [cindyyan317](https://github.com/cindyyan317) (Ripple)
|
||||
* [godexsoft](https://github.com/godexsoft) (Ripple)
|
||||
* [kuznetsss](https://github.com/kuznetsss) (Ripple)
|
||||
* [legleux](https://github.com/legleux) (Ripple)
|
||||
- [godexsoft](https://github.com/godexsoft) (Ripple)
|
||||
- [kuznetsss](https://github.com/kuznetsss) (Ripple)
|
||||
- [legleux](https://github.com/legleux) (Ripple)
|
||||
- [PeterChen13579](https://github.com/PeterChen13579) (Ripple)
|
||||
|
||||
## Honorable ex-Maintainers
|
||||
### Honorable ex-Maintainers
|
||||
|
||||
* [cjcobb23](https://github.com/cjcobb23) (ex-Ripple)
|
||||
* [natenichols](https://github.com/natenichols) (ex-Ripple)
|
||||
- [cindyyan317](https://github.com/cindyyan317) (ex-Ripple)
|
||||
- [cjcobb23](https://github.com/cjcobb23) (ex-Ripple)
|
||||
- [natenichols](https://github.com/natenichols) (ex-Ripple)
|
||||
|
||||
@@ -5,4 +5,3 @@ Copyright (c) 2022, the clio developers
|
||||
Permission to use, copy, modify, and distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# <img src='./docs/img/xrpl-logo.svg' width='40' valign="top" /> Clio
|
||||
# <img src='./docs/img/xrpl-logo.svg' width='40' valign="top" /> Clio <!-- markdownlint-disable-line MD033 MD045 -->
|
||||
|
||||
[](https://github.com/XRPLF/clio/actions/workflows/build.yml?query=branch%3Adevelop)
|
||||
[](https://github.com/XRPLF/clio/actions/workflows/nightly.yml?query=branch%3Adevelop)
|
||||
@@ -16,7 +16,7 @@ Multiple Clio nodes can share access to the same dataset, which allows for a hig
|
||||
Clio offers the full `rippled` API, with the caveat that Clio by default only returns validated data. This means that `ledger_index` defaults to `validated` instead of `current` for all requests. Other non-validated data, such as information about queued transactions, is also not returned.
|
||||
|
||||
Clio retrieves data from a designated group of `rippled` nodes instead of connecting to the peer-to-peer network.
|
||||
For requests that require access to the peer-to-peer network, such as `fee` or `submit`, Clio automatically forwards the request to a `rippled` node and propagates the response back to the client. To access non-validated data for *any* request, simply add `ledger_index: "current"` to the request, and Clio will forward the request to `rippled`.
|
||||
For requests that require access to the peer-to-peer network, such as `fee` or `submit`, Clio automatically forwards the request to a `rippled` node and propagates the response back to the client. To access non-validated data for _any_ request, simply add `ledger_index: "current"` to the request, and Clio will forward the request to `rippled`.
|
||||
|
||||
> [!NOTE]
|
||||
> Clio requires access to at least one `rippled` node, which can run on the same machine as Clio or separately.
|
||||
|
||||
92
cliff.toml
Normal file
92
cliff.toml
Normal file
@@ -0,0 +1,92 @@
|
||||
# git-cliff ~ default configuration file
|
||||
# https://git-cliff.org/docs/configuration
|
||||
#
|
||||
# Lines starting with "#" are comments.
|
||||
# Configuration options are organized into tables and keys.
|
||||
# See documentation for more information on available options.
|
||||
|
||||
[changelog]
|
||||
# template for the changelog header
|
||||
header = """
|
||||
# Changelog\n
|
||||
All notable changes to this project will be documented in this file.\n
|
||||
"""
|
||||
# template for the changelog body
|
||||
# https://keats.github.io/tera/docs/#introduction
|
||||
body = """
|
||||
{% if version %}\
|
||||
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||
{% else %}\
|
||||
## [unreleased]
|
||||
{% endif %}\
|
||||
{% for group, commits in commits | filter(attribute="merge_commit", value=false) | group_by(attribute="group") %}
|
||||
### {{ group | striptags | trim | upper_first }}
|
||||
{% for commit in commits %}
|
||||
- {% if commit.scope %}*({{ commit.scope }})* {% endif %}\
|
||||
{% if commit.breaking %}[**breaking**] {% endif %}\
|
||||
{{ commit.message | upper_first }} {% if commit.remote.username %}by @{{ commit.remote.username }}{% endif %}\
|
||||
{% endfor %}
|
||||
{% endfor %}\n
|
||||
"""
|
||||
# template for the changelog footer
|
||||
footer = """
|
||||
<!-- generated by git-cliff -->
|
||||
"""
|
||||
# remove the leading and trailing s
|
||||
trim = true
|
||||
# postprocessors
|
||||
postprocessors = [
|
||||
# { pattern = '<REPO>', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL
|
||||
]
|
||||
# render body even when there are no releases to process
|
||||
# render_always = true
|
||||
# output file path
|
||||
output = "CHANGELOG.md"
|
||||
|
||||
[git]
|
||||
# parse the commits based on https://www.conventionalcommits.org
|
||||
conventional_commits = true
|
||||
# filter out the commits that are not conventional
|
||||
filter_unconventional = true
|
||||
# process each line of a commit as an individual commit
|
||||
split_commits = false
|
||||
# regex for preprocessing the commit messages
|
||||
commit_preprocessors = [
|
||||
# Replace issue numbers
|
||||
#{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](<REPO>/issues/${2}))"},
|
||||
# Check spelling of the commit with https://github.com/crate-ci/typos
|
||||
# If the spelling is incorrect, it will be automatically fixed.
|
||||
#{ pattern = '.*', replace_command = 'typos --write-changes -' },
|
||||
]
|
||||
# regex for parsing and grouping commits
|
||||
commit_parsers = [
|
||||
{ message = "^feat", group = "<!-- 0 -->🚀 Features" },
|
||||
{ message = "^fix", group = "<!-- 1 -->🐛 Bug Fixes" },
|
||||
{ message = "^doc", group = "<!-- 3 -->📚 Documentation" },
|
||||
{ message = "^perf", group = "<!-- 4 -->⚡ Performance" },
|
||||
{ message = "^refactor", group = "<!-- 2 -->🚜 Refactor" },
|
||||
{ message = "^style.*[Cc]lang-tidy auto fixes", skip = true },
|
||||
{ message = "^style", group = "<!-- 5 -->🎨 Styling" },
|
||||
{ message = "^test", group = "<!-- 6 -->🧪 Testing" },
|
||||
{ message = "^chore\\(release\\): prepare for", skip = true },
|
||||
{ message = "^chore: Commits", skip = true },
|
||||
{ message = "^chore\\(deps.*\\)", skip = true },
|
||||
{ message = "^chore\\(pr\\)", skip = true },
|
||||
{ message = "^chore\\(pull\\)", skip = true },
|
||||
{ message = "^chore|^ci", group = "<!-- 7 -->⚙️ Miscellaneous Tasks" },
|
||||
{ body = ".*security", group = "<!-- 8 -->🛡️ Security" },
|
||||
{ message = "^revert", group = "<!-- 9 -->◀️ Revert" },
|
||||
{ message = ".*", group = "<!-- 10 -->💼 Other" },
|
||||
]
|
||||
# filter out the commits that are not matched by commit parsers
|
||||
filter_commits = false
|
||||
# sort the tags topologically
|
||||
topo_order = false
|
||||
# sort the commits inside sections by oldest/newest order
|
||||
sort_commits = "oldest"
|
||||
|
||||
ignore_tags = "^.*-[b|rc].*"
|
||||
|
||||
[remote.github]
|
||||
owner = "XRPLF"
|
||||
repo = "clio"
|
||||
@@ -26,7 +26,7 @@ set(COMPILER_FLAGS
|
||||
# TODO: Address these and others in https://github.com/XRPLF/clio/issues/1273
|
||||
)
|
||||
|
||||
# TODO: reenable when we change CI #884 if (is_gcc AND NOT lint) list(APPEND COMPILER_FLAGS -Wduplicated-branches
|
||||
# TODO: re-enable when we change CI #884 if (is_gcc AND NOT lint) list(APPEND COMPILER_FLAGS -Wduplicated-branches
|
||||
# -Wduplicated-cond -Wlogical-op -Wuseless-cast ) endif ()
|
||||
|
||||
if (is_clang)
|
||||
@@ -70,4 +70,12 @@ endif ()
|
||||
# See https://github.com/cpp-best-practices/cppbestpractices/blob/master/02-Use_the_Tools_Available.md#gcc--clang for
|
||||
# the flags description
|
||||
|
||||
if (time_trace)
|
||||
if (is_clang OR is_appleclang)
|
||||
list(APPEND COMPILER_FLAGS -ftime-trace)
|
||||
else ()
|
||||
message(FATAL_ERROR "Clang or AppleClang is required to use `-ftime-trace`")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
target_compile_options(clio_options INTERFACE ${COMPILER_FLAGS})
|
||||
|
||||
11
conanfile.py
11
conanfile.py
@@ -1,6 +1,7 @@
|
||||
from conan import ConanFile
|
||||
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
|
||||
|
||||
|
||||
class Clio(ConanFile):
|
||||
name = 'clio'
|
||||
license = 'ISC'
|
||||
@@ -20,16 +21,17 @@ class Clio(ConanFile):
|
||||
'coverage': [True, False], # build for test coverage report; create custom target `clio_tests-ccov`
|
||||
'lint': [True, False], # run clang-tidy checks during compilation
|
||||
'snapshot': [True, False], # build export/import snapshot tool
|
||||
'time_trace': [True, False] # build using -ftime-trace to create compiler trace reports
|
||||
}
|
||||
|
||||
requires = [
|
||||
'boost/1.82.0',
|
||||
'boost/1.83.0',
|
||||
'cassandra-cpp-driver/2.17.0',
|
||||
'fmt/10.1.1',
|
||||
'protobuf/3.21.9',
|
||||
'grpc/1.50.1',
|
||||
'openssl/1.1.1u',
|
||||
'xrpl/2.4.0-rc4',
|
||||
'openssl/1.1.1v',
|
||||
'xrpl/2.5.0-b1',
|
||||
'zlib/1.3.1',
|
||||
'libbacktrace/cci.20210118'
|
||||
]
|
||||
@@ -46,6 +48,7 @@ class Clio(ConanFile):
|
||||
'lint': False,
|
||||
'docs': False,
|
||||
'snapshot': False,
|
||||
'time_trace': False,
|
||||
|
||||
'xrpl/*:tests': False,
|
||||
'xrpl/*:rocksdb': False,
|
||||
@@ -83,6 +86,7 @@ class Clio(ConanFile):
|
||||
self.folders.generators = 'build/generators'
|
||||
|
||||
generators = 'CMakeDeps'
|
||||
|
||||
def generate(self):
|
||||
tc = CMakeToolchain(self)
|
||||
tc.variables['verbose'] = self.options.verbose
|
||||
@@ -95,6 +99,7 @@ class Clio(ConanFile):
|
||||
tc.variables['packaging'] = self.options.packaging
|
||||
tc.variables['benchmark'] = self.options.benchmark
|
||||
tc.variables['snapshot'] = self.options.snapshot
|
||||
tc.variables['time_trace'] = self.options.time_trace
|
||||
tc.generate()
|
||||
|
||||
def build(self):
|
||||
|
||||
@@ -2,27 +2,36 @@ FROM rippleci/clio_clang:16
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TARGETARCH
|
||||
|
||||
SHELL ["/bin/bash", "-c"]
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
# Using root by default is not very secure but github checkout action doesn't work with any other user
|
||||
# https://github.com/actions/checkout/issues/956
|
||||
# And Github Actions doc recommends using root
|
||||
# https://docs.github.com/en/actions/sharing-automations/creating-actions/dockerfile-support-for-github-actions#user
|
||||
|
||||
# hadolint ignore=DL3002
|
||||
USER root
|
||||
WORKDIR /root
|
||||
|
||||
ENV CCACHE_VERSION=4.10.2 \
|
||||
LLVM_TOOLS_VERSION=19 \
|
||||
GH_VERSION=2.40.0 \
|
||||
DOXYGEN_VERSION=1.12.0
|
||||
DOXYGEN_VERSION=1.12.0 \
|
||||
CLANG_BUILD_ANALYZER_VERSION=1.6.0 \
|
||||
GIT_CLIFF_VERSION=2.8.0
|
||||
|
||||
# Add repositories
|
||||
RUN apt-get -qq update \
|
||||
&& apt-get -qq install -y --no-install-recommends --no-install-suggests gnupg wget curl software-properties-common \
|
||||
&& echo "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-${LLVM_TOOLS_VERSION} main" >> /etc/apt/sources.list \
|
||||
&& wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
|
||||
&& wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add - && \
|
||||
apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install packages
|
||||
RUN apt update -qq \
|
||||
&& apt install -y --no-install-recommends --no-install-suggests python3 python3-pip git git-lfs make ninja-build flex bison jq graphviz \
|
||||
clang-format-${LLVM_TOOLS_VERSION} clang-tidy-${LLVM_TOOLS_VERSION} clang-tools-${LLVM_TOOLS_VERSION} \
|
||||
&& update-alternatives --install /usr/bin/clang-format clang-format /usr/bin/clang-format-${LLVM_TOOLS_VERSION} 100 \
|
||||
&& pip3 install -q --upgrade --no-cache-dir pip && pip3 install -q --no-cache-dir conan==1.62 gcovr cmake cmake-format \
|
||||
clang-tidy-${LLVM_TOOLS_VERSION} clang-tools-${LLVM_TOOLS_VERSION} \
|
||||
&& pip3 install -q --upgrade --no-cache-dir pip && pip3 install -q --no-cache-dir conan==1.62 gcovr cmake==3.31.6 pre-commit \
|
||||
&& apt-get clean && apt remove -y software-properties-common
|
||||
|
||||
# Install gcc-12 and make ldconfig aware of the new libstdc++ location (for gcc)
|
||||
@@ -62,17 +71,25 @@ RUN wget "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN
|
||||
&& cmake --build . --target install \
|
||||
&& rm -rf /tmp/* /var/tmp/*
|
||||
|
||||
# Install ClangBuildAnalyzer
|
||||
RUN wget "https://github.com/aras-p/ClangBuildAnalyzer/releases/download/v${CLANG_BUILD_ANALYZER_VERSION}/ClangBuildAnalyzer-linux" \
|
||||
&& chmod +x ClangBuildAnalyzer-linux \
|
||||
&& mv ClangBuildAnalyzer-linux /usr/bin/ClangBuildAnalyzer \
|
||||
&& rm -rf /tmp/* /var/tmp/*
|
||||
|
||||
# Install git-cliff
|
||||
RUN wget "https://github.com/orhun/git-cliff/releases/download/v${GIT_CLIFF_VERSION}/git-cliff-${GIT_CLIFF_VERSION}-x86_64-unknown-linux-musl.tar.gz" \
|
||||
&& tar xf git-cliff-${GIT_CLIFF_VERSION}-x86_64-unknown-linux-musl.tar.gz \
|
||||
&& mv git-cliff-${GIT_CLIFF_VERSION}/git-cliff /usr/bin/git-cliff \
|
||||
&& rm -rf /tmp/* /var/tmp/*
|
||||
|
||||
# Install gh
|
||||
RUN wget https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz \
|
||||
RUN wget "https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz" \
|
||||
&& tar xf gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz \
|
||||
&& mv gh_${GH_VERSION}_linux_${TARGETARCH}/bin/gh /usr/bin/gh \
|
||||
&& rm -rf /tmp/* /var/tmp/*
|
||||
|
||||
WORKDIR /root
|
||||
# Using root by default is not very secure but github checkout action doesn't work with any other user
|
||||
# https://github.com/actions/checkout/issues/956
|
||||
# And Github Actions doc recommends using root
|
||||
# https://docs.github.com/en/actions/creating-actions/dockerfile-support-for-github-actions#user
|
||||
|
||||
# Setup conan
|
||||
RUN conan remote add --insert 0 conan-non-prod http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
||||
@@ -4,6 +4,7 @@ This image contains an environment to build [Clio](https://github.com/XRPLF/clio
|
||||
It is used in [Clio Github Actions](https://github.com/XRPLF/clio/actions) but can also be used to compile Clio locally.
|
||||
|
||||
The image is based on Ubuntu 20.04 and contains:
|
||||
|
||||
- clang 16.0.6
|
||||
- gcc 12.3
|
||||
- doxygen 1.12
|
||||
|
||||
@@ -12,12 +12,14 @@ Your configuration file should be mounted under the path `/opt/clio/etc/config.j
|
||||
Clio repository provides an [example](https://github.com/XRPLF/clio/blob/develop/docs/examples/config/example-config.json) of the configuration file.
|
||||
|
||||
Config file recommendations:
|
||||
|
||||
- Set `log_to_console` to `false` if you want to avoid logs being written to `stdout`.
|
||||
- Set `log_directory` to `/opt/clio/log` to store logs in a volume.
|
||||
|
||||
## Usage
|
||||
|
||||
The following command can be used to run Clio in docker (assuming server's port is `51233` in your config):
|
||||
|
||||
```bash
|
||||
docker run -d -v <path to your config.json>:/opt/clio/etc/config.json -v <path to store logs>:/opt/clio/log -p 51233:51233 rippleci/clio
|
||||
```
|
||||
|
||||
@@ -3,6 +3,8 @@ ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TARGETARCH
|
||||
|
||||
SHELL ["/bin/bash", "-c"]
|
||||
|
||||
# hadolint ignore=DL3002
|
||||
USER root
|
||||
WORKDIR /root
|
||||
|
||||
@@ -43,7 +43,7 @@ RUN /gcc-$GCC_VERSION/configure \
|
||||
--disable-multilib \
|
||||
--without-cuda-driver \
|
||||
--enable-checking=release \
|
||||
&& make -j`nproc` \
|
||||
&& make -j "$(nproc)" \
|
||||
&& make install-strip DESTDIR=/gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION \
|
||||
&& mkdir -p /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/usr/share/gdb/auto-load/usr/lib64 \
|
||||
&& mv /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/usr/lib64/libstdc++.so.6.0.30-gdb.py /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/usr/share/gdb/auto-load/usr/lib64/libstdc++.so.6.0.30-gdb.py
|
||||
@@ -1,6 +1,6 @@
|
||||
services:
|
||||
clio_develop:
|
||||
image: rippleci/clio_ci:latest
|
||||
image: ghcr.io/xrplf/clio-ci:latest
|
||||
volumes:
|
||||
- clio_develop_conan_data:/root/.conan/data
|
||||
- clio_develop_ccache:/root/.ccache
|
||||
|
||||
@@ -59,4 +59,3 @@ case $1 in
|
||||
esac
|
||||
|
||||
popd > /dev/null
|
||||
|
||||
|
||||
@@ -3,6 +3,8 @@ project(docs)
|
||||
|
||||
include(${CMAKE_CURRENT_SOURCE_DIR}/../cmake/ClioVersion.cmake)
|
||||
|
||||
# cmake-format: off
|
||||
# Generate `docs` target for doxygen documentation
|
||||
# Note: use `cmake --build . --target docs` from your `build` directory to generate the documentation
|
||||
# cmake-format: on
|
||||
include(${CMAKE_CURRENT_SOURCE_DIR}/../cmake/Docs.cmake)
|
||||
|
||||
@@ -1,31 +1,32 @@
|
||||
# How to build Clio
|
||||
|
||||
Clio is built with [CMake](https://cmake.org/) and uses [Conan](https://conan.io/) for managing dependencies. It is written in C++20 and therefore requires a modern compiler.
|
||||
`Clio` is built with [CMake](https://cmake.org/) and uses [Conan](https://conan.io/) for managing dependencies.
|
||||
`Clio` is written in C++23 and therefore requires a modern compiler.
|
||||
|
||||
## Minimum Requirements
|
||||
|
||||
- [Python 3.7](https://www.python.org/downloads/)
|
||||
- [Conan 1.55](https://conan.io/downloads.html)
|
||||
- [CMake 3.20](https://cmake.org/download/)
|
||||
- [Conan 1.55, <2.0](https://conan.io/downloads.html)
|
||||
- [CMake 3.20, <4.0](https://cmake.org/download/)
|
||||
- [**Optional**] [GCovr](https://gcc.gnu.org/onlinedocs/gcc/Gcov.html): needed for code coverage generation
|
||||
- [**Optional**] [CCache](https://ccache.dev/): speeds up compilation if you are going to compile Clio often
|
||||
|
||||
| Compiler | Version |
|
||||
|-------------|---------|
|
||||
| ----------- | ------- |
|
||||
| GCC | 12.3 |
|
||||
| Clang | 16 |
|
||||
| Apple Clang | 15 |
|
||||
|
||||
### Conan Configuration
|
||||
|
||||
Clio does not require anything other than `compiler.cppstd=20` in your (`~/.conan/profiles/default`) Conan profile.
|
||||
Clio requires `compiler.cppstd=20` in your Conan profile (`~/.conan/profiles/default`).
|
||||
|
||||
> [!NOTE]
|
||||
> Although Clio is built using C++23, it's required to set `compiler.cppstd=20` for the time being as some of Clio's dependencies are not yet capable of building under C++23.
|
||||
|
||||
> Mac example:
|
||||
**Mac example**:
|
||||
|
||||
```
|
||||
```text
|
||||
[settings]
|
||||
os=Macos
|
||||
os_build=Macos
|
||||
@@ -40,9 +41,9 @@ compiler.cppstd=20
|
||||
tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]
|
||||
```
|
||||
|
||||
> Linux example:
|
||||
**Linux example**:
|
||||
|
||||
```
|
||||
```text
|
||||
[settings]
|
||||
os=Linux
|
||||
os_build=Linux
|
||||
@@ -80,7 +81,8 @@ Navigate to Clio's root directory and run:
|
||||
|
||||
```sh
|
||||
mkdir build && cd build
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=False
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True
|
||||
# You can also add -GNinja to use Ninja build system instead of Make
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
|
||||
cmake --build . --parallel 8 # or without the number if you feel extra adventurous
|
||||
```
|
||||
@@ -93,12 +95,14 @@ If successful, `conan install` will find the required packages and `cmake` will
|
||||
> [!TIP]
|
||||
> To generate a Code Coverage report, include `-o coverage=True` in the `conan install` command above, along with `-o tests=True` to enable tests. After running the `cmake` commands, execute `make clio_tests-ccov`. The coverage report will be found at `clio_tests-llvm-cov/index.html`.
|
||||
|
||||
<!-- markdownlint-disable-line MD028 -->
|
||||
|
||||
> [!NOTE]
|
||||
> If you've built Clio before and the build is now failing, it's likely due to updated dependencies. Try deleting the build folder and then rerunning the Conan and CMake commands mentioned above.
|
||||
|
||||
### Generating API docs for Clio
|
||||
|
||||
The API documentation for Clio is generated by [Doxygen](https://www.doxygen.nl/index.html). If you want to generate the API documentation when building Clio, make sure to install Doxygen on your system.
|
||||
The API documentation for Clio is generated by [Doxygen](https://www.doxygen.nl/index.html). If you want to generate the API documentation when building Clio, make sure to install Doxygen 1.12.0 on your system.
|
||||
|
||||
To generate the API docs:
|
||||
|
||||
@@ -106,7 +110,7 @@ To generate the API docs:
|
||||
|
||||
```sh
|
||||
mkdir build && cd build
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=False -o docs=True
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o docs=True
|
||||
```
|
||||
|
||||
2. Once that has completed successfully, run the `cmake` command and add the `--target docs` option:
|
||||
@@ -127,10 +131,10 @@ To generate the API docs:
|
||||
It is also possible to build Clio using [Docker](https://www.docker.com/) if you don't want to install all the dependencies on your machine.
|
||||
|
||||
```sh
|
||||
docker run -it rippleci/clio_ci:latest
|
||||
docker run -it ghcr.io/xrplf/clio-ci:latest
|
||||
git clone https://github.com/XRPLF/clio
|
||||
mkdir build && cd build
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=False
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
|
||||
cmake --build . --parallel 8 # or without the number if you feel extra adventurous
|
||||
```
|
||||
@@ -146,7 +150,8 @@ If you wish to develop against a `rippled` instance running in standalone mode t
|
||||
|
||||
Sometimes, during development, you need to build against a custom version of `libxrpl`. (For example, you may be developing compatibility for a proposed amendment that is not yet merged to the main `rippled` codebase.) To build Clio with compatibility for a custom fork or branch of `rippled`, follow these steps:
|
||||
|
||||
1. First, pull/clone the appropriate `rippled` fork and switch to the branch you want to build. For example, the following example uses an in-development build with [XLS-33d Multi-Purpose Tokens](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0033d-multi-purpose-tokens):
|
||||
1. First, pull/clone the appropriate `rippled` fork and switch to the branch you want to build.
|
||||
The following example uses an in-development build with [XLS-33d Multi-Purpose Tokens](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0033d-multi-purpose-tokens):
|
||||
|
||||
```sh
|
||||
git clone https://github.com/shawnxie999/rippled/
|
||||
@@ -167,13 +172,14 @@ Sometimes, during development, you need to build against a custom version of `li
|
||||
```py
|
||||
# ... (excerpt from conanfile.py)
|
||||
requires = [
|
||||
'boost/1.82.0',
|
||||
'boost/1.83.0',
|
||||
'cassandra-cpp-driver/2.17.0',
|
||||
'fmt/10.1.1',
|
||||
'protobuf/3.21.9',
|
||||
'grpc/1.50.1',
|
||||
'openssl/1.1.1u',
|
||||
'openssl/1.1.1v',
|
||||
'xrpl/2.3.0-b1@my/feature', # Update this line
|
||||
'zlib/1.3.1',
|
||||
'libbacktrace/cci.20210118'
|
||||
]
|
||||
```
|
||||
@@ -186,14 +192,14 @@ Sometimes, during development, you need to build against a custom version of `li
|
||||
|
||||
The minimum [clang-tidy](https://clang.llvm.org/extra/clang-tidy/) version required is 19.0.
|
||||
|
||||
Clang-tidy can be run by Cmake when building the project. To achieve this, you just need to provide the option `-o lint=True` for the `conan install` command:
|
||||
Clang-tidy can be run by CMake when building the project. To achieve this, you just need to provide the option `-o lint=True` for the `conan install` command:
|
||||
|
||||
```sh
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=True
|
||||
```
|
||||
|
||||
By default Cmake will try to find `clang-tidy` automatically in your system.
|
||||
To force Cmake to use your desired binary, set the `CLIO_CLANG_TIDY_BIN` environment variable to the path of the `clang-tidy` binary. For example:
|
||||
By default CMake will try to find `clang-tidy` automatically in your system.
|
||||
To force CMake to use your desired binary, set the `CLIO_CLANG_TIDY_BIN` environment variable to the path of the `clang-tidy` binary. For example:
|
||||
|
||||
```sh
|
||||
export CLIO_CLANG_TIDY_BIN=/opt/homebrew/opt/llvm@19/bin/clang-tidy
|
||||
|
||||
@@ -1,452 +1,592 @@
|
||||
# Clio Config Description
|
||||
This file lists all Clio Configuration definitions in detail.
|
||||
|
||||
This document provides a list of all available Clio configuration properties in detail.
|
||||
|
||||
> [!NOTE]
|
||||
> Dot notation in configuration key names represents nested fields. For example, **database.scylladb** refers to the _scylladb_ field inside the _database_ object. If a key name includes "[]", it indicates that the nested field is an array (e.g., etl_sources.[]).
|
||||
|
||||
## Configuration Details
|
||||
|
||||
### Key: database.type
|
||||
### database.type
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: string
|
||||
- **Default value**: cassandra
|
||||
- **Constraints**: The value must be one of the following: `cassandra`
|
||||
- **Description**: Type of database to use. We currently support Cassandra and Scylladb. We default to Scylladb.
|
||||
### Key: database.cassandra.contact_points
|
||||
- **Default value**: `cassandra`
|
||||
- **Constraints**: The value must be one of the following: `cassandra`.
|
||||
- **Description**: Specifies the type of database used for storing and retrieving data required by the Clio server. Both ScyllaDB and Cassandra can serve as backends for Clio; however, this value must be set to `cassandra`.
|
||||
|
||||
### database.cassandra.contact_points
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: string
|
||||
- **Default value**: localhost
|
||||
- **Default value**: `localhost`
|
||||
- **Constraints**: None
|
||||
- **Description**: A list of IP addresses or hostnames of the initial nodes (Cassandra/Scylladb cluster nodes) that the client will connect to when establishing a connection with the database. If you're running locally, it should be 'localhost' or 127.0.0.1
|
||||
### Key: database.cassandra.secure_connect_bundle
|
||||
- **Description**: A list of IP addresses or hostnames for the initial cluster nodes (Cassandra or ScyllaDB) that the client connects to when establishing a database connection. If you're running Clio locally, set this value to `localhost` or `127.0.0.1`.
|
||||
|
||||
### database.cassandra.secure_connect_bundle
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: None
|
||||
- **Description**: Configuration file that contains the necessary security credentials and connection details for securely connecting to a Cassandra database cluster.
|
||||
### Key: database.cassandra.port
|
||||
- **Description**: The configuration file that contains the necessary credentials and connection details for securely connecting to a Cassandra database cluster.
|
||||
|
||||
### database.cassandra.port
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: int
|
||||
- **Default value**: None
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535
|
||||
- **Description**: Port number to connect to the database.
|
||||
### Key: database.cassandra.keyspace
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535`.
|
||||
- **Description**: The port number used to connect to the Cassandra database.
|
||||
|
||||
### database.cassandra.keyspace
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: string
|
||||
- **Default value**: clio
|
||||
- **Default value**: `clio`
|
||||
- **Constraints**: None
|
||||
- **Description**: Keyspace to use for the database.
|
||||
### Key: database.cassandra.replication_factor
|
||||
- **Description**: The Cassandra keyspace to use for the database. If you don't provide a value, this is set to `clio` by default.
|
||||
|
||||
### database.cassandra.replication_factor
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 3
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `65535`
|
||||
- **Description**: Number of replicated nodes for Scylladb. Visit this link for more details : https://university.scylladb.com/courses/scylla-essentials-overview/lessons/high-availability/topic/fault-tolerance-replication-factor/
|
||||
### Key: database.cassandra.table_prefix
|
||||
- **Default value**: `3`
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `65535`.
|
||||
- **Description**: Represents the number of replicated nodes for ScyllaDB. For more details see [Fault Tolerance Replication Factor](https://university.scylladb.com/courses/scylla-essentials-overview/lessons/high-availability/topic/fault-tolerance-replication-factor/).
|
||||
|
||||
### database.cassandra.table_prefix
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: None
|
||||
- **Description**: Prefix for Database table names.
|
||||
### Key: database.cassandra.max_write_requests_outstanding
|
||||
- **Description**: An optional field to specify a prefix for the Cassandra database table names.
|
||||
|
||||
### database.cassandra.max_write_requests_outstanding
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 10000
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: Maximum number of outstanding write requests. Write requests are api calls that write to database
|
||||
### Key: database.cassandra.max_read_requests_outstanding
|
||||
- **Default value**: `10000`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: Represents the maximum number of outstanding write requests. Write requests are API calls that write to the database.
|
||||
|
||||
### database.cassandra.max_read_requests_outstanding
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 100000
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: Maximum number of outstanding read requests, which reads from database
|
||||
### Key: database.cassandra.threads
|
||||
- **Default value**: `100000`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: Maximum number of outstanding read requests. Read requests are API calls that read from the database.
|
||||
|
||||
### database.cassandra.threads
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: The number of available CPU cores.
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: Number of threads that will be used for database operations.
|
||||
### Key: database.cassandra.core_connections_per_host
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: Represents the number of threads that will be used for database operations.
|
||||
|
||||
### database.cassandra.core_connections_per_host
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 1
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `65535`
|
||||
- **Description**: Number of core connections per host for Cassandra.
|
||||
### Key: database.cassandra.queue_size_io
|
||||
- **Default value**: `1`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535`.
|
||||
- **Description**: The number of core connections per host for the Cassandra database.
|
||||
|
||||
### database.cassandra.queue_size_io
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: int
|
||||
- **Default value**: None
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `65535`
|
||||
- **Description**: Queue size for I/O operations in Cassandra.
|
||||
### Key: database.cassandra.write_batch_size
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535`.
|
||||
- **Description**: Defines the queue size of the input/output (I/O) operations in Cassandra.
|
||||
|
||||
### database.cassandra.write_batch_size
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 20
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `65535`
|
||||
- **Description**: Batch size for write operations in Cassandra.
|
||||
### Key: database.cassandra.connect_timeout
|
||||
- **Default value**: `20`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535`.
|
||||
- **Description**: Represents the batch size for write operations in Cassandra.
|
||||
|
||||
### database.cassandra.connect_timeout
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: int
|
||||
- **Default value**: None
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: The maximum amount of time in seconds the system will wait for a connection to be successfully established with the database.
|
||||
### Key: database.cassandra.request_timeout
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: The maximum amount of time in seconds that the system waits for a database connection to be established.
|
||||
|
||||
### database.cassandra.request_timeout
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: int
|
||||
- **Default value**: None
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: The maximum amount of time in seconds the system will wait for a request to be fetched from database.
|
||||
### Key: database.cassandra.username
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: The maximum amount of time in seconds that the system waits for a request to be fetched from the database.
|
||||
|
||||
### database.cassandra.username
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: None
|
||||
- **Description**: The username used for authenticating with the database.
|
||||
### Key: database.cassandra.password
|
||||
|
||||
### database.cassandra.password
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: None
|
||||
- **Description**: The password used for authenticating with the database.
|
||||
### Key: database.cassandra.certfile
|
||||
|
||||
### database.cassandra.certfile
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: None
|
||||
- **Description**: The path to the SSL/TLS certificate file used to establish a secure connection between the client and the Cassandra database.
|
||||
### Key: allow_no_etl
|
||||
|
||||
### allow_no_etl
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: boolean
|
||||
- **Default value**: True
|
||||
- **Default value**: `True`
|
||||
- **Constraints**: None
|
||||
- **Description**: If True, no ETL nodes will run with Clio.
|
||||
### Key: etl_sources.[].ip
|
||||
- **Description**: If set to `True`, allows Clio to start without any ETL source.
|
||||
|
||||
### etl_sources.[].ip
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: The value must be a valid IP address
|
||||
- **Description**: IP address of the ETL source.
|
||||
### Key: etl_sources.[].ws_port
|
||||
- **Constraints**: The value must be a valid IP address.
|
||||
- **Description**: The IP address of the ETL source.
|
||||
|
||||
### etl_sources.[].ws_port
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535
|
||||
- **Description**: WebSocket port of the ETL source.
|
||||
### Key: etl_sources.[].grpc_port
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535`.
|
||||
- **Description**: The WebSocket port of the ETL source.
|
||||
|
||||
### etl_sources.[].grpc_port
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535
|
||||
- **Description**: gRPC port of the ETL source.
|
||||
### Key: forwarding.cache_timeout
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535`.
|
||||
- **Description**: The gRPC port of the ETL source.
|
||||
|
||||
### forwarding.cache_timeout
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: double
|
||||
- **Default value**: 0
|
||||
- **Constraints**: The value must be a positive double number
|
||||
- **Description**: Timeout duration for the forwarding cache used in Rippled communication.
|
||||
### Key: forwarding.request_timeout
|
||||
- **Default value**: `0`
|
||||
- **Constraints**: The value must be a positive double number.
|
||||
- **Description**: Specifies the timeout duration (in seconds) for the forwarding cache used in `rippled` communication. A value of `0` means disabling this feature.
|
||||
|
||||
### forwarding.request_timeout
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: double
|
||||
- **Default value**: 10
|
||||
- **Constraints**: The value must be a positive double number
|
||||
- **Description**: Timeout duration for the forwarding request used in Rippled communication.
|
||||
### Key: rpc.cache_timeout
|
||||
- **Default value**: `10`
|
||||
- **Constraints**: The value must be a positive double number.
|
||||
- **Description**: Specifies the timeout duration (in seconds) for the forwarding request used in `rippled` communication.
|
||||
|
||||
### rpc.cache_timeout
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: double
|
||||
- **Default value**: 0
|
||||
- **Constraints**: The value must be a positive double number
|
||||
- **Description**: Timeout duration for RPC requests.
|
||||
### Key: num_markers
|
||||
- **Default value**: `0`
|
||||
- **Constraints**: The value must be a positive double number.
|
||||
- **Description**: Specifies the timeout duration (in seconds) for RPC cache response to timeout. A value of `0` means disabling this feature.
|
||||
|
||||
### num_markers
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: int
|
||||
- **Default value**: None
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `256`
|
||||
- **Description**: The number of markers is the number of coroutines to download the initial ledger
|
||||
### Key: dos_guard.whitelist.[]
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `256`.
|
||||
- **Description**: Specifies the number of coroutines used to download the initial ledger.
|
||||
|
||||
### dos_guard.whitelist.[]
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: None
|
||||
- **Description**: List of IP addresses to whitelist for DOS protection.
|
||||
### Key: dos_guard.max_fetches
|
||||
- **Description**: The list of IP addresses to whitelist for DOS protection.
|
||||
|
||||
### dos_guard.max_fetches
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 1000000
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: Maximum number of fetch operations allowed by DOS guard.
|
||||
### Key: dos_guard.max_connections
|
||||
- **Default value**: `1000000`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: The maximum number of fetch operations allowed by DOS guard.
|
||||
|
||||
### dos_guard.max_connections
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 20
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: Maximum number of concurrent connections allowed by DOS guard.
|
||||
### Key: dos_guard.max_requests
|
||||
- **Default value**: `20`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: The maximum number of concurrent connections for a specific IP address.
|
||||
|
||||
### dos_guard.max_requests
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 20
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: Maximum number of requests allowed by DOS guard.
|
||||
### Key: dos_guard.sweep_interval
|
||||
- **Default value**: `20`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: The maximum number of requests allowed for a specific IP address.
|
||||
|
||||
### dos_guard.sweep_interval
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: double
|
||||
- **Default value**: 1
|
||||
- **Constraints**: The value must be a positive double number
|
||||
- **Description**: Interval in seconds for DOS guard to sweep/clear its state.
|
||||
### Key: workers
|
||||
- **Default value**: `1`
|
||||
- **Constraints**: The value must be a positive double number.
|
||||
- **Description**: Interval in seconds for DOS guard to sweep(clear) its state.
|
||||
|
||||
### workers
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: The number of available CPU cores.
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: Number of threads to process RPC requests.
|
||||
### Key: server.ip
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: The number of threads used to process RPC requests.
|
||||
|
||||
### server.ip
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: The value must be a valid IP address
|
||||
- **Description**: IP address of the Clio HTTP server.
|
||||
### Key: server.port
|
||||
- **Constraints**: The value must be a valid IP address.
|
||||
- **Description**: The IP address of the Clio HTTP server.
|
||||
|
||||
### server.port
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: None
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535
|
||||
- **Description**: Port number of the Clio HTTP server.
|
||||
### Key: server.max_queue_size
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535`.
|
||||
- **Description**: The port number of the Clio HTTP server.
|
||||
|
||||
### server.max_queue_size
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 0
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: Maximum size of the server's request queue. Value of 0 is no limit.
|
||||
### Key: server.local_admin
|
||||
- **Default value**: `1`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: The maximum size of the server's request queue. If set to `0`, this means there is no queue size limit.
|
||||
|
||||
### server.local_admin
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: boolean
|
||||
- **Default value**: None
|
||||
- **Constraints**: None
|
||||
- **Description**: Indicates if the server should run with admin privileges. Only one of local_admin or admin_password can be set.
|
||||
### Key: server.admin_password
|
||||
- **Description**: Indicates if requests from `localhost` are allowed to call Clio admin-only APIs. Note that this setting cannot be enabled together with [server.admin_password](#serveradmin_password).
|
||||
|
||||
### server.admin_password
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: None
|
||||
- **Description**: Password for Clio admin-only APIs. Only one of local_admin or admin_password can be set.
|
||||
### Key: server.processing_policy
|
||||
- **Description**: The password for Clio admin-only APIs. Note that this setting cannot be enabled together with [server.local_admin](#serveradmin_password).
|
||||
|
||||
### server.processing_policy
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: string
|
||||
- **Default value**: parallel
|
||||
- **Constraints**: The value must be one of the following: `parallel, sequent`
|
||||
- **Description**: Could be "sequent" or "parallel". For the sequent policy, requests from a single client
|
||||
connection are processed one by one, with the next request read only after the previous one is processed. For the parallel policy, Clio will accept
|
||||
all requests and process them in parallel, sending a reply for each request as soon as it is ready.
|
||||
### Key: server.parallel_requests_limit
|
||||
- **Default value**: `parallel`
|
||||
- **Constraints**: The value must be one of the following: `parallel`, `sequent`.
|
||||
- **Description**: For the `sequent` policy, requests from a single client connection are processed one by one, with the next request read only after the previous one is processed. For the `parallel` policy, Clio will accept all requests and process them in parallel, sending a reply for each request as soon as it is ready.
|
||||
|
||||
### server.parallel_requests_limit
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: int
|
||||
- **Default value**: None
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `65535`
|
||||
- **Description**: Optional parameter, used only if processing_strategy `parallel`. It limits the number of requests for a single client connection that are processed in parallel. If not specified, the limit is infinite.
|
||||
### Key: server.ws_max_sending_queue_size
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535`.
|
||||
- **Description**: This is an optional parameter, used only if the `processing_strategy` is `parallel`. It limits the number of requests processed in parallel for a single client connection. If not specified, no limit is enforced.
|
||||
|
||||
### server.ws_max_sending_queue_size
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 1500
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: Maximum size of the websocket sending queue.
|
||||
### Key: prometheus.enabled
|
||||
- **Default value**: `1500`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: Maximum queue size for sending subscription data to clients. This queue buffers data when a client is slow to receive it, ensuring delivery once the client is ready.
|
||||
|
||||
### prometheus.enabled
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: boolean
|
||||
- **Default value**: False
|
||||
- **Default value**: `False`
|
||||
- **Constraints**: None
|
||||
- **Description**: Enable or disable Prometheus metrics.
|
||||
### Key: prometheus.compress_reply
|
||||
- **Description**: Enables or disables Prometheus metrics.
|
||||
|
||||
### prometheus.compress_reply
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: boolean
|
||||
- **Default value**: False
|
||||
- **Default value**: `False`
|
||||
- **Constraints**: None
|
||||
- **Description**: Enable or disable compression of Prometheus responses.
|
||||
### Key: io_threads
|
||||
- **Description**: Enables or disables compression of Prometheus responses.
|
||||
|
||||
### io_threads
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 2
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535`
|
||||
- **Description**: Number of I/O threads. Value cannot be less than 1
|
||||
### Key: subscription_workers
|
||||
- **Default value**: `2`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535`.
|
||||
- **Description**: The number of input/output (I/O) threads. The value cannot be less than `1`.
|
||||
|
||||
### subscription_workers
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 1
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: The number of worker threads or processes that are responsible for managing and processing subscription-based tasks from rippled
|
||||
### Key: graceful_period
|
||||
- **Default value**: `1`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: The number of worker threads or processes that are responsible for managing and processing subscription-based tasks from `rippled`.
|
||||
|
||||
### graceful_period
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: double
|
||||
- **Default value**: 10
|
||||
- **Constraints**: The value must be a positive double number
|
||||
- **Description**: Number of milliseconds server will wait to shutdown gracefully.
|
||||
### Key: cache.num_diffs
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 32
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `65535`
|
||||
- **Description**: Number of diffs to cache. For more info, consult readme.md in etc
|
||||
### Key: cache.num_markers
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 48
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `65535`
|
||||
- **Description**: Number of markers to cache.
|
||||
### Key: cache.num_cursors_from_diff
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 0
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `65535`
|
||||
- **Description**: Num of cursors that are different.
|
||||
### Key: cache.num_cursors_from_account
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 0
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `65535`
|
||||
- **Description**: Number of cursors from an account.
|
||||
### Key: cache.page_fetch_size
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 512
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `65535`
|
||||
- **Description**: Page fetch size for cache operations.
|
||||
### Key: cache.load
|
||||
- **Required**: True
|
||||
- **Type**: string
|
||||
- **Default value**: async
|
||||
- **Constraints**: The value must be one of the following: `sync, async, none`
|
||||
- **Description**: Cache loading strategy ('sync' or 'async').
|
||||
### Key: log_channels.[].channel
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: The value must be one of the following: `General, WebServer, Backend, RPC, ETL, Subscriptions, Performance, Migration`
|
||||
- **Description**: Name of the log channel.'RPC', 'ETL', and 'Performance'
|
||||
### Key: log_channels.[].log_level
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: The value must be one of the following: `trace, debug, info, warning, error, fatal, count`
|
||||
- **Description**: Log level for the specific log channel.`warning`, `error`, `fatal`
|
||||
### Key: log_level
|
||||
- **Required**: True
|
||||
- **Type**: string
|
||||
- **Default value**: info
|
||||
- **Constraints**: The value must be one of the following: `trace, debug, info, warning, error, fatal, count`
|
||||
- **Description**: General logging level of Clio. This level will be applied to all log channels that do not have an explicitly defined logging level.
|
||||
### Key: log_format
|
||||
- **Required**: True
|
||||
- **Type**: string
|
||||
- **Default value**: %TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%
|
||||
- **Constraints**: None
|
||||
- **Description**: Format string for log messages.
|
||||
### Key: log_to_console
|
||||
- **Required**: True
|
||||
- **Type**: boolean
|
||||
- **Default value**: True
|
||||
- **Constraints**: None
|
||||
- **Description**: Enable or disable logging to console.
|
||||
### Key: log_directory
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: None
|
||||
- **Description**: Directory path for log files.
|
||||
### Key: log_rotation_size
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 2048
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`
|
||||
- **Description**: Log rotation size in megabytes. When the log file reaches this particular size, a new log file starts.
|
||||
### Key: log_directory_max_size
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 51200
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`
|
||||
- **Description**: Maximum size of the log directory in megabytes.
|
||||
### Key: log_rotation_hour_interval
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 12
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`
|
||||
- **Description**: Interval in hours for log rotation. If the current log file reaches this value in logging, a new log file starts.
|
||||
### Key: log_tag_style
|
||||
- **Required**: True
|
||||
- **Type**: string
|
||||
- **Default value**: none
|
||||
- **Constraints**: The value must be one of the following: `int, uint, null, none, uuid`
|
||||
- **Description**: Style for log tags.
|
||||
### Key: extractor_threads
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 1
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: Number of extractor threads.
|
||||
### Key: read_only
|
||||
- **Required**: True
|
||||
- **Type**: boolean
|
||||
- **Default value**: True
|
||||
- **Constraints**: None
|
||||
- **Description**: Indicates if the server should have read-only privileges.
|
||||
### Key: txn_threshold
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 0
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `65535`
|
||||
- **Description**: Transaction threshold value.
|
||||
### Key: start_sequence
|
||||
- **Required**: False
|
||||
- **Type**: int
|
||||
- **Default value**: None
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: Starting ledger index.
|
||||
### Key: finish_sequence
|
||||
- **Required**: False
|
||||
- **Type**: int
|
||||
- **Default value**: None
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: Ending ledger index.
|
||||
### Key: ssl_cert_file
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: None
|
||||
- **Description**: Path to the SSL certificate file.
|
||||
### Key: ssl_key_file
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: None
|
||||
- **Description**: Path to the SSL key file.
|
||||
### Key: api_version.default
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 1
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `3`
|
||||
- **Description**: Default API version Clio will run on.
|
||||
### Key: api_version.min
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 1
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `3`
|
||||
- **Description**: Minimum API version.
|
||||
### Key: api_version.max
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 3
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `3`
|
||||
- **Description**: Maximum API version.
|
||||
### Key: migration.full_scan_threads
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 2
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: The number of threads used to scan the table.
|
||||
### Key: migration.full_scan_jobs
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 4
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: The number of coroutines used to scan the table.
|
||||
### Key: migration.cursors_per_job
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: 100
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `4294967295`
|
||||
- **Description**: The number of cursors each coroutine will scan.
|
||||
- **Default value**: `10`
|
||||
- **Constraints**: The value must be a positive double number.
|
||||
- **Description**: The number of milliseconds the server waits to shutdown gracefully. If Clio does not shutdown gracefully after the specified value, it will be killed instead.
|
||||
|
||||
### cache.num_diffs
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: `32`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535`.
|
||||
- **Description**: The number of cursors generated is the number of changed (without counting deleted) objects in the latest `cache.num_diffs` number of ledgers. Cursors are workers that load the ledger cache from the position of markers concurrently. For more information, please read [README.md](../src/etl/README.md).
|
||||
|
||||
### cache.num_markers
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: `48`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535`.
|
||||
- **Description**: Specifies how many markers are placed randomly within the cache. These markers define the positions on the ledger that will be loaded concurrently by the workers. The higher the number, the more places within the cache we potentially cover.
|
||||
|
||||
### cache.num_cursors_from_diff
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: `0`
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `65535`.
|
||||
- **Description**: `cache.num_cursors_from_diff` number of cursors are generated by looking at the number of changed objects in the most recent ledger. If number of changed objects in current ledger is not enough, it will keep reading previous ledgers until it hit `cache.num_cursors_from_diff`. If set to `0`, the system defaults to generating cursors based on `cache.num_diffs`.
|
||||
|
||||
### cache.num_cursors_from_account
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: `0`
|
||||
- **Constraints**: The minimum value is `0`. The maximum value is `65535`.
|
||||
- **Description**: `cache.num_cursors_from_diff` of cursors are generated by reading accounts in `account_tx` table. If set to `0`, the system defaults to generating cursors based on `cache.num_diffs`.
|
||||
|
||||
### cache.page_fetch_size
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: `512`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `65535`.
|
||||
- **Description**: The number of ledger objects to fetch concurrently per marker.
|
||||
|
||||
### cache.load
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: string
|
||||
- **Default value**: `async`
|
||||
- **Constraints**: The value must be one of the following: `sync`, `async`, `none`.
|
||||
- **Description**: The strategy used for Cache loading.
|
||||
|
||||
### log_channels.[].channel
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: The value must be one of the following: `General`, `WebServer`, `Backend`, `RPC`, `ETL`, `Subscriptions`, `Performance`, `Migration`.
|
||||
- **Description**: The name of the log channel.
|
||||
|
||||
### log_channels.[].log_level
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: The value must be one of the following: `trace`, `debug`, `info`, `warning`, `error`, `fatal`, `count`.
|
||||
- **Description**: The log level for the specific log channel.
|
||||
|
||||
### log_level
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: string
|
||||
- **Default value**: `info`
|
||||
- **Constraints**: The value must be one of the following: `trace`, `debug`, `info`, `warning`, `error`, `fatal`, `count`.
|
||||
- **Description**: The general logging level of Clio. This level is applied to all log channels that do not have an explicitly defined logging level.
|
||||
|
||||
### log_format
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: string
|
||||
- **Default value**: `%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%`
|
||||
- **Constraints**: None
|
||||
- **Description**: The format string for log messages. The format is described here: <https://www.boost.org/doc/libs/1_83_0/libs/log/doc/html/log/tutorial/formatters.html>.
|
||||
|
||||
### log_to_console
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: boolean
|
||||
- **Default value**: `True`
|
||||
- **Constraints**: None
|
||||
- **Description**: Enables or disables logging to the console.
|
||||
|
||||
### log_directory
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: None
|
||||
- **Description**: The directory path for the log files.
|
||||
|
||||
### log_rotation_size
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: `2048`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: The log rotation size in megabytes. When the log file reaches this particular size, a new log file starts.
|
||||
|
||||
### log_directory_max_size
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: `51200`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: The maximum size of the log directory in megabytes.
|
||||
|
||||
### log_rotation_hour_interval
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: `12`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: Represents the interval (in hours) for log rotation. If the current log file reaches this value in logging, a new log file starts.
|
||||
|
||||
### log_tag_style
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: string
|
||||
- **Default value**: `none`
|
||||
- **Constraints**: The value must be one of the following: `int`, `uint`, `null`, `none`, `uuid`.
|
||||
- **Description**: Log tags are unique identifiers for log messages. `uint`/`int` starts logging from 0 and increments, making it faster. In contrast, `uuid` generates a random unique identifier, which adds overhead.
|
||||
|
||||
### extractor_threads
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: `1`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: Number of threads used to extract data from ETL source.
|
||||
|
||||
### read_only
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: boolean
|
||||
- **Default value**: `True`
|
||||
- **Constraints**: None
|
||||
- **Description**: Indicates if the server is allowed to write data to the database.
|
||||
|
||||
### start_sequence
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: int
|
||||
- **Default value**: None
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: If specified, the ledger index Clio will start writing to the database from.
|
||||
|
||||
### finish_sequence
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: int
|
||||
- **Default value**: None
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: If specified, the final ledger that Clio will write to the database.
|
||||
|
||||
### ssl_cert_file
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: None
|
||||
- **Description**: The path to the SSL certificate file.
|
||||
|
||||
### ssl_key_file
|
||||
|
||||
- **Required**: False
|
||||
- **Type**: string
|
||||
- **Default value**: None
|
||||
- **Constraints**: None
|
||||
- **Description**: The path to the SSL key file.
|
||||
|
||||
### api_version.default
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: `1`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `3`.
|
||||
- **Description**: The default API version that the Clio server will run on.
|
||||
|
||||
### api_version.min
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: `1`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `3`.
|
||||
- **Description**: The minimum API version allowed to use.
|
||||
|
||||
### api_version.max
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: `3`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `3`.
|
||||
- **Description**: The maximum API version allowed to use.
|
||||
|
||||
### migration.full_scan_threads
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: `2`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: The number of threads used to scan the table.
|
||||
|
||||
### migration.full_scan_jobs
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: `4`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: The number of coroutines used to scan the table.
|
||||
|
||||
### migration.cursors_per_job
|
||||
|
||||
- **Required**: True
|
||||
- **Type**: int
|
||||
- **Default value**: `100`
|
||||
- **Constraints**: The minimum value is `1`. The maximum value is `4294967295`.
|
||||
- **Description**: The number of cursors each job will scan.
|
||||
|
||||
@@ -18,7 +18,8 @@ Clio needs access to a `rippled` server in order to work. The following configur
|
||||
|
||||
- A port to handle gRPC requests, with the IP(s) of Clio specified in the `secure_gateway` entry
|
||||
|
||||
The example configs of [rippled](https://github.com/XRPLF/rippled/blob/develop/cfg/rippled-example.cfg) and [Clio](../docs/examples/config/example-config.json) are set up in a way that minimal changes are required.
|
||||
The example configs of [rippled](https://github.com/XRPLF/rippled/blob/develop/cfg/rippled-example.cfg) and [Clio](../docs/examples/config/example-config.json) are set up in a way that minimal changes are required. However, if you want to view all configuration keys available in Clio, see [config-description.md](./config-description.md).
|
||||
|
||||
When running locally, the only change needed is to uncomment the `port_grpc` section of the `rippled` config.
|
||||
|
||||
If you're running Clio and `rippled` on separate machines, in addition to uncommenting the `port_grpc` section, a few other steps must be taken:
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
}
|
||||
],
|
||||
"forwarding": {
|
||||
"cache_timeout": 0.250, // in seconds, could be 0, which means no cache
|
||||
"cache_timeout": 0.25, // in seconds, could be 0, which means no cache
|
||||
"request_timeout": 10.0 // time for Clio to wait for rippled to reply on a forwarded request (default is 10 seconds)
|
||||
},
|
||||
"rpc": {
|
||||
@@ -44,9 +44,7 @@
|
||||
},
|
||||
"dos_guard": {
|
||||
// Comma-separated list of IPs to exclude from rate limiting
|
||||
"whitelist": [
|
||||
"127.0.0.1"
|
||||
],
|
||||
"whitelist": ["127.0.0.1"],
|
||||
//
|
||||
// The below values are the default values and are only specified here
|
||||
// for documentation purposes. The rate limiter currently limits
|
||||
|
||||
@@ -4,16 +4,17 @@
|
||||
> This is only an example of Grafana dashboard for Clio. It was created for demonstration purposes only and may contain errors.
|
||||
> Clio team would not recommend to relate on data from this dashboard or use it for monitoring your Clio instances.
|
||||
|
||||
This directory contains an example of docker based infrastructure to collect and visualise metrics from clio.
|
||||
This directory contains an example of docker based infrastructure to collect and visualize metrics from clio.
|
||||
|
||||
The structure of the directory:
|
||||
|
||||
- `compose.yaml`
|
||||
Docker-compose file with Prometheus and Grafana set up.
|
||||
Docker Compose file with Prometheus and Grafana set up.
|
||||
- `prometheus.yaml`
|
||||
Defines metrics collection from Clio and Prometheus itself.
|
||||
Demonstrates how to setup Clio target and Clio's admin authorisation in Prometheus.
|
||||
Demonstrates how to setup Clio target and Clio's admin authorization in Prometheus.
|
||||
- `grafana/clio_dashboard.json`
|
||||
Json file containing preconfigured dashboard in Grafana format.
|
||||
Json file containing pre-configured dashboard in Grafana format.
|
||||
- `grafana/dashboard_local.yaml`
|
||||
Grafana configuration file defining the directory to search for dashboards json files.
|
||||
- `grafana/datasources.yaml`
|
||||
@@ -21,9 +22,9 @@ The structure of the directory:
|
||||
|
||||
## How to try
|
||||
|
||||
1. Make sure you have `docker` and `docker-compose` installed.
|
||||
2. Run `docker-compose up -d` from this directory. It will start docker containers with Prometheus and Grafana.
|
||||
1. Make sure you have Docker (with `Docker Compose`) installed.
|
||||
2. Run `docker compose up -d` from this directory. It will start docker containers with Prometheus and Grafana.
|
||||
3. Open [http://localhost:3000/dashboards](http://localhost:3000/dashboards). Grafana login `admin`, password `grafana`.
|
||||
There will be preconfigured Clio dashboard.
|
||||
There will be pre-configured Clio dashboard.
|
||||
|
||||
If Clio is not running yet launch Clio to see metrics. Some of the metrics may appear only after requests to Clio.
|
||||
|
||||
@@ -6,7 +6,7 @@ services:
|
||||
volumes:
|
||||
- ./prometheus.yaml:/etc/prometheus/prometheus.yml
|
||||
command:
|
||||
- '--config.file=/etc/prometheus/prometheus.yml'
|
||||
- "--config.file=/etc/prometheus/prometheus.yml"
|
||||
grafana:
|
||||
image: grafana/grafana
|
||||
ports:
|
||||
|
||||
@@ -80,9 +80,7 @@
|
||||
"orientation": "auto",
|
||||
"percentChangeColorMode": "standard",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
],
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
@@ -161,9 +159,7 @@
|
||||
"orientation": "auto",
|
||||
"percentChangeColorMode": "standard",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
],
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
@@ -246,9 +242,7 @@
|
||||
"orientation": "auto",
|
||||
"percentChangeColorMode": "standard",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
],
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
@@ -331,9 +325,7 @@
|
||||
"orientation": "auto",
|
||||
"percentChangeColorMode": "standard",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
],
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
@@ -1406,7 +1398,7 @@
|
||||
"refId": "B"
|
||||
}
|
||||
],
|
||||
"title": "DB Opperations Error Rate",
|
||||
"title": "DB Operations Error Rate",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
apiVersion: 1
|
||||
|
||||
providers:
|
||||
- name: 'Clio dashboard'
|
||||
- name: "Clio dashboard"
|
||||
# <int> Org id. Default to 1
|
||||
orgId: 1
|
||||
# <string> name of the dashboard folder.
|
||||
folder: ''
|
||||
folder: ""
|
||||
# <string> folder UID. will be automatically generated if not specified
|
||||
folderUid: ''
|
||||
folderUid: ""
|
||||
# <string> provider type. Default to 'file'
|
||||
type: file
|
||||
# <bool> disable dashboard deletion
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
## Prerequisites
|
||||
|
||||
- Access to a Cassandra cluster or ScyllaDB cluster. Can be local or remote.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> There are some key considerations when using **ScyllaDB**. By default, Scylla reserves all free RAM on a machine for itself. If you are running `rippled` or other services on the same machine, restrict its memory usage using the `--memory` argument.
|
||||
>
|
||||
@@ -91,4 +92,4 @@ To completely disable Prometheus metrics add `"prometheus": { "enabled": false }
|
||||
|
||||
It is important to know that Clio responds to Prometheus request only if they are admin requests. If you are using the admin password feature, the same password should be provided in the Authorization header of Prometheus requests.
|
||||
|
||||
You can find an example docker-compose file, with Prometheus and Grafana configs, in [examples/infrastructure](../docs/examples/infrastructure/).
|
||||
You can find an example Docker Compose file, with Prometheus and Grafana configs, in [examples/infrastructure](../docs/examples/infrastructure/).
|
||||
|
||||
@@ -1,47 +1,60 @@
|
||||
# Troubleshooting Guide
|
||||
|
||||
This guide will help you troubleshoot common issues of Clio.
|
||||
|
||||
## Can't connect to DB
|
||||
|
||||
If you see the error log message `Could not connect to Cassandra: No hosts available`, this means that Clio can't connect to the database. Check the following:
|
||||
|
||||
- Make sure the database is running at the specified address and port.
|
||||
- Make sure the database is accessible from the machine where Clio is running.
|
||||
You can use [cqlsh](https://pypi.org/project/cqlsh/) to check the connection to the database.
|
||||
|
||||
If you would like to run a local ScyllaDB, you can call:
|
||||
|
||||
```sh
|
||||
docker run --rm -p 9042:9042 --name clio-scylla -d scylladb/scylla
|
||||
```
|
||||
|
||||
## Check the server status of Clio
|
||||
|
||||
To check if Clio is syncing with rippled:
|
||||
|
||||
```sh
|
||||
curl -v -d '{"method":"server_info", "params":[{}]}' 127.0.0.1:51233|python3 -m json.tool|grep seq
|
||||
```
|
||||
|
||||
If Clio is syncing with rippled, the `seq` value will be increasing.
|
||||
|
||||
## Clio fails to start
|
||||
|
||||
If you see the error log message `Failed to fetch ETL state from...`, this means the configured rippled node is not reachable. Check the following:
|
||||
|
||||
- Make sure the rippled node is running at the specified address and port.
|
||||
- Make sure the rippled node is accessible from the machine where Clio is running.
|
||||
|
||||
If you would like to run Clio without an avaliable rippled node, you can add below setting to Clio's configuration file:
|
||||
```
|
||||
If you would like to run Clio without an available rippled node, you can add below setting to Clio's configuration file:
|
||||
|
||||
```text
|
||||
"allow_no_etl": true
|
||||
```
|
||||
|
||||
## Clio is not added to secure_gateway in rippled's config
|
||||
|
||||
If you see the warning message `AsyncCallData is_unlimited is false.`, this means that Clio is not added to the `secure_gateway` of `port_grpc` session in the rippled configuration file. It will slow down the sync process. Please add Clio's IP to the `secure_gateway` in the rippled configuration file for both grpc and ws port.
|
||||
|
||||
## Clio is slow
|
||||
|
||||
To speed up the response time, Clio has a cache inside. However, cache can take time to warm up. If you see slow response time, you can firstly check if cache is still loading.
|
||||
You can check the cache status by calling:
|
||||
|
||||
```sh
|
||||
curl -v -d '{"method":"server_info", "params":[{}]}' 127.0.0.1:51233|python3 -m json.tool|grep is_full
|
||||
curl -v -d '{"method":"server_info", "params":[{}]}' 127.0.0.1:51233|python3 -m json.tool|grep is_enabled
|
||||
```
|
||||
If `is_full` is false, it means the cache is still loading. Normally, the Clio can respond quicker if cache finishs loading. If `is_enabled` is false, it means the cache is disabled in the configuration file or there is data corruption in the database.
|
||||
|
||||
If `is_full` is false, it means the cache is still loading. Normally, the Clio can respond quicker if cache finishes loading. If `is_enabled` is false, it means the cache is disabled in the configuration file or there is data corruption in the database.
|
||||
|
||||
## Receive error message `Too many requests`
|
||||
|
||||
If client sees the error message `Too many requests`, this means that the client is blocked by Clio's DosGuard protection. You may want to add the client's IP to the whitelist in the configuration file, Or update other your DosGuard settings.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
# Note: This script is intended to be run from the root of the repository.
|
||||
#
|
||||
# Not really a hook but should be used to check the completness of documentation for added code, otherwise CI will come for you.
|
||||
# Not really a hook but should be used to check the completeness of documentation for added code, otherwise CI will come for you.
|
||||
# It's good to have /tmp as the output so that consecutive runs are fast but no clutter in the repository.
|
||||
|
||||
echo "+ Checking documentation..."
|
||||
@@ -15,6 +15,12 @@ DOCDIR=${TMPDIR}/out
|
||||
|
||||
# Check doxygen is at all installed
|
||||
if [ -z "$DOXYGEN" ]; then
|
||||
if [[ "${CI}" == "true" ]]; then
|
||||
# If we are in CI, we should fail the check
|
||||
echo "doxygen not found in CI, please install it"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# No hard error if doxygen is not installed yet
|
||||
cat <<EOF
|
||||
|
||||
9
pre-commit-hooks/codespell_ignore.txt
Normal file
9
pre-commit-hooks/codespell_ignore.txt
Normal file
@@ -0,0 +1,9 @@
|
||||
ser
|
||||
onWs
|
||||
datas
|
||||
AtLeast
|
||||
AtMost
|
||||
compiletime
|
||||
tring
|
||||
trings
|
||||
strat
|
||||
44
pre-commit-hooks/fix-local-includes.sh
Executable file
44
pre-commit-hooks/fix-local-includes.sh
Executable file
@@ -0,0 +1,44 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Note: This script is intended to be run from the root of the repository.
|
||||
#
|
||||
# This script will fix local includes in the C++ code for a given file.
|
||||
# Usage: ./pre-commit-hooks/fix-local-includes.sh <file1> <file2> ...
|
||||
|
||||
files="$@"
|
||||
echo "+ Fixing includes in $files..."
|
||||
|
||||
GNU_SED=$(sed --version 2>&1 | grep -q 'GNU' && echo true || echo false)
|
||||
|
||||
if [[ "$GNU_SED" == "false" ]]; then # macOS sed
|
||||
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | tr '\n' '|' | sed 's/|$//' | sed 's/|/\\|/g')
|
||||
else
|
||||
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | paste -sd '|' | sed 's/|/\\|/g')
|
||||
fi
|
||||
|
||||
fix_includes() {
|
||||
file_path="$1"
|
||||
|
||||
file_path_all_global="${file_path}.tmp.global"
|
||||
file_path_fixed="${file_path}.tmp.fixed"
|
||||
|
||||
# Make all includes to be <...> style
|
||||
sed -E 's|#include "(.*)"|#include <\1>|g' "$file_path" > "$file_path_all_global"
|
||||
|
||||
# Make local includes to be "..." style
|
||||
sed -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g" "$file_path_all_global" > "$file_path_fixed"
|
||||
rm "$file_path_all_global"
|
||||
|
||||
# Check if the temporary file is different from the original file
|
||||
if ! cmp -s "$file_path" "$file_path_fixed"; then
|
||||
# Replace the original file with the temporary file
|
||||
mv "$file_path_fixed" "$file_path"
|
||||
else
|
||||
# Remove the temporary file if it's the same as the original
|
||||
rm "$file_path_fixed"
|
||||
fi
|
||||
}
|
||||
|
||||
for file in $files; do
|
||||
fix_includes "$file"
|
||||
done
|
||||
14
pre-commit-hooks/run-go-fmt.sh
Executable file
14
pre-commit-hooks/run-go-fmt.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Capture and print stdout, since gofmt doesn't use proper exit codes
|
||||
#
|
||||
set -e -o pipefail
|
||||
|
||||
if ! command -v gofmt &> /dev/null ; then
|
||||
echo "gofmt not installed or available in the PATH" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
output="$(gofmt -l -w "$@")"
|
||||
echo "$output"
|
||||
[[ -z "$output" ]]
|
||||
@@ -39,9 +39,8 @@ verify_tag_signed() {
|
||||
fi
|
||||
}
|
||||
|
||||
while read local_ref local_oid remote_ref remote_oid; do
|
||||
# Check some things if we're pushing a branch called "release/"
|
||||
if echo "$remote_ref" | grep ^refs\/heads\/release\/ &> /dev/null ; then
|
||||
if echo "$PRE_COMMIT_REMOTE_BRANCH" | grep ^refs\/heads\/release\/ &> /dev/null ; then
|
||||
version=$(git tag --points-at HEAD)
|
||||
echo "Looks like you're trying to push a $version release..."
|
||||
echo "Making sure you've signed and tagged it."
|
||||
@@ -51,8 +50,3 @@ while read local_ref local_oid remote_ref remote_oid; do
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'pre-push' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; }
|
||||
|
||||
git lfs pre-push "$@"
|
||||
@@ -1,5 +1,6 @@
|
||||
add_subdirectory(util)
|
||||
add_subdirectory(data)
|
||||
add_subdirectory(cluster)
|
||||
add_subdirectory(etl)
|
||||
add_subdirectory(etlng)
|
||||
add_subdirectory(feed)
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
add_library(clio_app)
|
||||
target_sources(clio_app PRIVATE CliArgs.cpp ClioApplication.cpp Stopper.cpp WebHandlers.cpp)
|
||||
|
||||
target_link_libraries(clio_app PUBLIC clio_etl clio_etlng clio_feed clio_web clio_rpc clio_migration)
|
||||
target_link_libraries(
|
||||
clio_app
|
||||
PUBLIC clio_cluster
|
||||
clio_etl
|
||||
clio_etlng
|
||||
clio_feed
|
||||
clio_web
|
||||
clio_rpc
|
||||
clio_migration
|
||||
)
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
|
||||
#include "migration/MigrationApplication.hpp"
|
||||
#include "util/build/Build.hpp"
|
||||
#include "util/newconfig/ConfigDescription.hpp"
|
||||
#include "util/config/ConfigDescription.hpp"
|
||||
|
||||
#include <boost/program_options/options_description.hpp>
|
||||
#include <boost/program_options/parsers.hpp>
|
||||
|
||||
@@ -21,12 +21,15 @@
|
||||
|
||||
#include "app/Stopper.hpp"
|
||||
#include "app/WebHandlers.hpp"
|
||||
#include "cluster/ClusterCommunicationService.hpp"
|
||||
#include "data/AmendmentCenter.hpp"
|
||||
#include "data/BackendFactory.hpp"
|
||||
#include "data/LedgerCache.hpp"
|
||||
#include "etl/ETLService.hpp"
|
||||
#include "etl/LoadBalancer.hpp"
|
||||
#include "etl/NetworkValidatedLedgers.hpp"
|
||||
#include "etlng/LoadBalancer.hpp"
|
||||
#include "etlng/LoadBalancerInterface.hpp"
|
||||
#include "feed/SubscriptionManager.hpp"
|
||||
#include "migration/MigrationInspectorFactory.hpp"
|
||||
#include "rpc/Counters.hpp"
|
||||
@@ -34,14 +37,15 @@
|
||||
#include "rpc/WorkQueue.hpp"
|
||||
#include "rpc/common/impl/HandlerProvider.hpp"
|
||||
#include "util/build/Build.hpp"
|
||||
#include "util/config/ConfigDefinition.hpp"
|
||||
#include "util/log/Logger.hpp"
|
||||
#include "util/newconfig/ConfigDefinition.hpp"
|
||||
#include "util/prometheus/Prometheus.hpp"
|
||||
#include "web/AdminVerificationStrategy.hpp"
|
||||
#include "web/RPCServerHandler.hpp"
|
||||
#include "web/Server.hpp"
|
||||
#include "web/dosguard/DOSGuard.hpp"
|
||||
#include "web/dosguard/IntervalSweepHandler.hpp"
|
||||
#include "web/dosguard/Weights.hpp"
|
||||
#include "web/dosguard/WhitelistHandler.hpp"
|
||||
#include "web/ng/RPCServerHandler.hpp"
|
||||
#include "web/ng/Server.hpp"
|
||||
@@ -101,13 +105,17 @@ ClioApplication::run(bool const useNgWebServer)
|
||||
|
||||
// Rate limiter, to prevent abuse
|
||||
auto whitelistHandler = web::dosguard::WhitelistHandler{config_};
|
||||
auto dosGuard = web::dosguard::DOSGuard{config_, whitelistHandler};
|
||||
auto const dosguardWeights = web::dosguard::Weights::make(config_);
|
||||
auto dosGuard = web::dosguard::DOSGuard{config_, whitelistHandler, dosguardWeights};
|
||||
auto sweepHandler = web::dosguard::IntervalSweepHandler{config_, ioc, dosGuard};
|
||||
auto cache = data::LedgerCache{};
|
||||
|
||||
// Interface to the database
|
||||
auto backend = data::makeBackend(config_, cache);
|
||||
|
||||
cluster::ClusterCommunicationService clusterCommunicationService{backend};
|
||||
clusterCommunicationService.run();
|
||||
|
||||
auto const amendmentCenter = std::make_shared<data::AmendmentCenter const>(backend);
|
||||
|
||||
{
|
||||
@@ -130,7 +138,12 @@ ClioApplication::run(bool const useNgWebServer)
|
||||
// ETL uses the balancer to extract data.
|
||||
// The server uses the balancer to forward RPCs to a rippled node.
|
||||
// The balancer itself publishes to streams (transactions_proposed and accounts_proposed)
|
||||
auto balancer = etl::LoadBalancer::makeLoadBalancer(config_, ioc, backend, subscriptions, ledgers);
|
||||
auto balancer = [&] -> std::shared_ptr<etlng::LoadBalancerInterface> {
|
||||
if (config_.get<bool>("__ng_etl"))
|
||||
return etlng::LoadBalancer::makeLoadBalancer(config_, ioc, backend, subscriptions, ledgers);
|
||||
|
||||
return etl::LoadBalancer::makeLoadBalancer(config_, ioc, backend, subscriptions, ledgers);
|
||||
}();
|
||||
|
||||
// ETL is responsible for writing and publishing to streams. In read-only mode, ETL only publishes
|
||||
auto etl = etl::ETLService::makeETLService(config_, ioc, backend, subscriptions, balancer, ledgers);
|
||||
@@ -142,12 +155,12 @@ ClioApplication::run(bool const useNgWebServer)
|
||||
config_, backend, subscriptions, balancer, etl, amendmentCenter, counters
|
||||
);
|
||||
|
||||
using RPCEngineType = rpc::RPCEngine<etl::LoadBalancer, rpc::Counters>;
|
||||
using RPCEngineType = rpc::RPCEngine<rpc::Counters>;
|
||||
auto const rpcEngine =
|
||||
RPCEngineType::makeRPCEngine(config_, backend, balancer, dosGuard, workQueue, counters, handlerProvider);
|
||||
|
||||
if (useNgWebServer or config_.get<bool>("server.__ng_web_server")) {
|
||||
web::ng::RPCServerHandler<RPCEngineType, etl::ETLService> handler{config_, backend, rpcEngine, etl};
|
||||
web::ng::RPCServerHandler<RPCEngineType> handler{config_, backend, rpcEngine, etl, dosGuard};
|
||||
|
||||
auto expectedAdminVerifier = web::makeAdminVerificationStrategy(config_);
|
||||
if (not expectedAdminVerifier.has_value()) {
|
||||
@@ -165,7 +178,7 @@ ClioApplication::run(bool const useNgWebServer)
|
||||
|
||||
httpServer->onGet("/metrics", MetricsHandler{adminVerifier});
|
||||
httpServer->onGet("/health", HealthCheckHandler{});
|
||||
auto requestHandler = RequestHandler{adminVerifier, handler, dosGuard};
|
||||
auto requestHandler = RequestHandler{adminVerifier, handler};
|
||||
httpServer->onPost("/", requestHandler);
|
||||
httpServer->onWs(std::move(requestHandler));
|
||||
|
||||
@@ -188,8 +201,7 @@ ClioApplication::run(bool const useNgWebServer)
|
||||
}
|
||||
|
||||
// Init the web server
|
||||
auto handler =
|
||||
std::make_shared<web::RPCServerHandler<RPCEngineType, etl::ETLService>>(config_, backend, rpcEngine, etl);
|
||||
auto handler = std::make_shared<web::RPCServerHandler<RPCEngineType>>(config_, backend, rpcEngine, etl, dosGuard);
|
||||
|
||||
auto const httpServer = web::makeHttpServer(config_, ioc, dosGuard, handler);
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
|
||||
#include "app/Stopper.hpp"
|
||||
#include "util/SignalsHandler.hpp"
|
||||
#include "util/newconfig/ConfigDefinition.hpp"
|
||||
#include "util/config/ConfigDefinition.hpp"
|
||||
|
||||
namespace app {
|
||||
|
||||
|
||||
@@ -20,8 +20,8 @@
|
||||
#pragma once
|
||||
|
||||
#include "data/BackendInterface.hpp"
|
||||
#include "etl/ETLService.hpp"
|
||||
#include "etl/LoadBalancer.hpp"
|
||||
#include "etlng/ETLServiceInterface.hpp"
|
||||
#include "etlng/LoadBalancerInterface.hpp"
|
||||
#include "feed/SubscriptionManagerInterface.hpp"
|
||||
#include "util/CoroutineGroup.hpp"
|
||||
#include "util/log/Logger.hpp"
|
||||
@@ -74,15 +74,12 @@ public:
|
||||
* @param ioc The io_context to stop.
|
||||
* @return The callback to be called on application stop.
|
||||
*/
|
||||
template <
|
||||
web::ng::SomeServer ServerType,
|
||||
etl::SomeLoadBalancer LoadBalancerType,
|
||||
etl::SomeETLService ETLServiceType>
|
||||
template <web::ng::SomeServer ServerType>
|
||||
static std::function<void(boost::asio::yield_context)>
|
||||
makeOnStopCallback(
|
||||
ServerType& server,
|
||||
LoadBalancerType& balancer,
|
||||
ETLServiceType& etl,
|
||||
etlng::LoadBalancerInterface& balancer,
|
||||
etlng::ETLServiceInterface& etl,
|
||||
feed::SubscriptionManagerInterface& subscriptions,
|
||||
data::BackendInterface& backend,
|
||||
boost::asio::io_context& ioc
|
||||
|
||||
@@ -19,8 +19,8 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "util/newconfig/ConfigDefinition.hpp"
|
||||
#include "util/newconfig/ConfigFileJson.hpp"
|
||||
#include "util/config/ConfigDefinition.hpp"
|
||||
#include "util/config/ConfigFileJson.hpp"
|
||||
|
||||
#include <cstdlib>
|
||||
#include <iostream>
|
||||
|
||||
@@ -147,7 +147,6 @@ class RequestHandler {
|
||||
util::Logger webServerLog_{"WebServer"};
|
||||
std::shared_ptr<web::AdminVerificationStrategy> adminVerifier_;
|
||||
std::reference_wrapper<RpcHandlerType> rpcHandler_;
|
||||
std::reference_wrapper<web::dosguard::DOSGuardInterface> dosguard_;
|
||||
|
||||
public:
|
||||
/**
|
||||
@@ -155,14 +154,9 @@ public:
|
||||
*
|
||||
* @param adminVerifier The AdminVerificationStrategy to use for verifying the connection for admin access.
|
||||
* @param rpcHandler The RPC handler to use for handling the request.
|
||||
* @param dosguard The DOSGuardInterface to use for checking the connection.
|
||||
*/
|
||||
RequestHandler(
|
||||
std::shared_ptr<web::AdminVerificationStrategy> adminVerifier,
|
||||
RpcHandlerType& rpcHandler,
|
||||
web::dosguard::DOSGuardInterface& dosguard
|
||||
)
|
||||
: adminVerifier_(std::move(adminVerifier)), rpcHandler_(rpcHandler), dosguard_(dosguard)
|
||||
RequestHandler(std::shared_ptr<web::AdminVerificationStrategy> adminVerifier, RpcHandlerType& rpcHandler)
|
||||
: adminVerifier_(std::move(adminVerifier)), rpcHandler_(rpcHandler)
|
||||
{
|
||||
}
|
||||
|
||||
@@ -183,21 +177,6 @@ public:
|
||||
boost::asio::yield_context yield
|
||||
)
|
||||
{
|
||||
if (not dosguard_.get().request(connectionMetadata.ip())) {
|
||||
auto error = rpc::makeError(rpc::RippledError::rpcSLOW_DOWN);
|
||||
|
||||
if (not request.isHttp()) {
|
||||
try {
|
||||
auto requestJson = boost::json::parse(request.message());
|
||||
if (requestJson.is_object() && requestJson.as_object().contains("id"))
|
||||
error["id"] = requestJson.as_object().at("id");
|
||||
error["request"] = request.message();
|
||||
} catch (std::exception const&) {
|
||||
error["request"] = request.message();
|
||||
}
|
||||
}
|
||||
return web::ng::Response{boost::beast::http::status::service_unavailable, error, request};
|
||||
}
|
||||
LOG(webServerLog_.info()) << connectionMetadata.tag()
|
||||
<< "Received request from ip = " << connectionMetadata.ip()
|
||||
<< " - posting to WorkQueue";
|
||||
@@ -207,20 +186,7 @@ public:
|
||||
});
|
||||
|
||||
try {
|
||||
auto response = rpcHandler_(request, connectionMetadata, std::move(subscriptionContext), yield);
|
||||
|
||||
if (not dosguard_.get().add(connectionMetadata.ip(), response.message().size())) {
|
||||
auto jsonResponse = boost::json::parse(response.message()).as_object();
|
||||
jsonResponse["warning"] = "load";
|
||||
if (jsonResponse.contains("warnings") && jsonResponse["warnings"].is_array()) {
|
||||
jsonResponse["warnings"].as_array().push_back(rpc::makeWarning(rpc::WarnRpcRateLimit));
|
||||
} else {
|
||||
jsonResponse["warnings"] = boost::json::array{rpc::makeWarning(rpc::WarnRpcRateLimit)};
|
||||
}
|
||||
response.setMessage(jsonResponse);
|
||||
}
|
||||
|
||||
return response;
|
||||
return rpcHandler_(request, connectionMetadata, std::move(subscriptionContext), yield);
|
||||
} catch (std::exception const&) {
|
||||
return web::ng::Response{
|
||||
boost::beast::http::status::internal_server_error,
|
||||
|
||||
5
src/cluster/CMakeLists.txt
Normal file
5
src/cluster/CMakeLists.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
add_library(clio_cluster)
|
||||
|
||||
target_sources(clio_cluster PRIVATE ClioNode.cpp ClusterCommunicationService.cpp)
|
||||
|
||||
target_link_libraries(clio_cluster PRIVATE clio_util clio_data)
|
||||
64
src/cluster/ClioNode.cpp
Normal file
64
src/cluster/ClioNode.cpp
Normal file
@@ -0,0 +1,64 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of clio: https://github.com/XRPLF/clio
|
||||
Copyright (c) 2025, the clio developers.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#include "cluster/ClioNode.hpp"
|
||||
|
||||
#include "util/TimeUtils.hpp"
|
||||
|
||||
#include <boost/json/conversion.hpp>
|
||||
#include <boost/json/object.hpp>
|
||||
#include <boost/json/value.hpp>
|
||||
#include <boost/uuid/uuid.hpp>
|
||||
|
||||
#include <memory>
|
||||
#include <stdexcept>
|
||||
#include <string>
|
||||
#include <string_view>
|
||||
|
||||
namespace cluster {
|
||||
|
||||
namespace {
|
||||
|
||||
struct Fields {
|
||||
static constexpr std::string_view const kUPDATE_TIME = "update_time";
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
void
|
||||
tag_invoke(boost::json::value_from_tag, boost::json::value& jv, ClioNode const& node)
|
||||
{
|
||||
jv = {
|
||||
{Fields::kUPDATE_TIME, util::systemTpToUtcStr(node.updateTime, ClioNode::kTIME_FORMAT)},
|
||||
};
|
||||
}
|
||||
|
||||
ClioNode
|
||||
tag_invoke(boost::json::value_to_tag<ClioNode>, boost::json::value const& jv)
|
||||
{
|
||||
auto const& updateTimeStr = jv.as_object().at(Fields::kUPDATE_TIME).as_string();
|
||||
auto const updateTime = util::systemTpFromUtcStr(std::string(updateTimeStr), ClioNode::kTIME_FORMAT);
|
||||
if (!updateTime.has_value()) {
|
||||
throw std::runtime_error("Failed to parse update time");
|
||||
}
|
||||
|
||||
return ClioNode{.uuid = std::make_shared<boost::uuids::uuid>(), .updateTime = updateTime.value()};
|
||||
}
|
||||
|
||||
} // namespace cluster
|
||||
58
src/cluster/ClioNode.hpp
Normal file
58
src/cluster/ClioNode.hpp
Normal file
@@ -0,0 +1,58 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of clio: https://github.com/XRPLF/clio
|
||||
Copyright (c) 2025, the clio developers.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <boost/json/conversion.hpp>
|
||||
#include <boost/json/value.hpp>
|
||||
#include <boost/uuid/uuid.hpp>
|
||||
|
||||
#include <chrono>
|
||||
#include <memory>
|
||||
|
||||
namespace cluster {
|
||||
|
||||
/**
|
||||
* @brief Represents a node in the cluster.
|
||||
*/
|
||||
struct ClioNode {
|
||||
/**
|
||||
* @brief The format of the time to store in the database.
|
||||
*/
|
||||
static constexpr char const* kTIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ";
|
||||
|
||||
// enum class WriterRole {
|
||||
// ReadOnly,
|
||||
// NotWriter,
|
||||
// Writer
|
||||
// };
|
||||
|
||||
std::shared_ptr<boost::uuids::uuid> uuid; ///< The UUID of the node.
|
||||
std::chrono::system_clock::time_point updateTime; ///< The time the data about the node was last updated.
|
||||
|
||||
// WriterRole writerRole;
|
||||
};
|
||||
|
||||
void
|
||||
tag_invoke(boost::json::value_from_tag, boost::json::value& jv, ClioNode const& node);
|
||||
|
||||
ClioNode
|
||||
tag_invoke(boost::json::value_to_tag<ClioNode>, boost::json::value const& jv);
|
||||
|
||||
} // namespace cluster
|
||||
185
src/cluster/ClusterCommunicationService.cpp
Normal file
185
src/cluster/ClusterCommunicationService.cpp
Normal file
@@ -0,0 +1,185 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of clio: https://github.com/XRPLF/clio
|
||||
Copyright (c) 2025, the clio developers.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#include "cluster/ClusterCommunicationService.hpp"
|
||||
|
||||
#include "cluster/ClioNode.hpp"
|
||||
#include "data/BackendInterface.hpp"
|
||||
#include "util/log/Logger.hpp"
|
||||
|
||||
#include <boost/asio/spawn.hpp>
|
||||
#include <boost/asio/steady_timer.hpp>
|
||||
#include <boost/json/parse.hpp>
|
||||
#include <boost/json/serialize.hpp>
|
||||
#include <boost/json/value.hpp>
|
||||
#include <boost/json/value_from.hpp>
|
||||
#include <boost/json/value_to.hpp>
|
||||
#include <boost/uuid/random_generator.hpp>
|
||||
#include <boost/uuid/uuid.hpp>
|
||||
|
||||
#include <chrono>
|
||||
#include <ctime>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
namespace cluster {
|
||||
|
||||
ClusterCommunicationService::ClusterCommunicationService(
|
||||
std::shared_ptr<data::BackendInterface> backend,
|
||||
std::chrono::steady_clock::duration readInterval,
|
||||
std::chrono::steady_clock::duration writeInterval
|
||||
)
|
||||
: backend_(std::move(backend))
|
||||
, readInterval_(readInterval)
|
||||
, writeInterval_(writeInterval)
|
||||
, selfData_{ClioNode{
|
||||
.uuid = std::make_shared<boost::uuids::uuid>(boost::uuids::random_generator{}()),
|
||||
.updateTime = std::chrono::system_clock::time_point{}
|
||||
}}
|
||||
{
|
||||
nodesInClusterMetric_.set(1); // The node always sees itself
|
||||
isHealthy_ = true;
|
||||
}
|
||||
|
||||
void
|
||||
ClusterCommunicationService::run()
|
||||
{
|
||||
boost::asio::spawn(strand_, [this](boost::asio::yield_context yield) {
|
||||
boost::asio::steady_timer timer(yield.get_executor());
|
||||
while (true) {
|
||||
timer.expires_after(readInterval_);
|
||||
timer.async_wait(yield);
|
||||
doRead(yield);
|
||||
}
|
||||
});
|
||||
|
||||
boost::asio::spawn(strand_, [this](boost::asio::yield_context yield) {
|
||||
boost::asio::steady_timer timer(yield.get_executor());
|
||||
while (true) {
|
||||
doWrite();
|
||||
timer.expires_after(writeInterval_);
|
||||
timer.async_wait(yield);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
ClusterCommunicationService::~ClusterCommunicationService()
|
||||
{
|
||||
stop();
|
||||
}
|
||||
|
||||
void
|
||||
ClusterCommunicationService::stop()
|
||||
{
|
||||
if (stopped_)
|
||||
return;
|
||||
|
||||
ctx_.stop();
|
||||
ctx_.join();
|
||||
stopped_ = true;
|
||||
}
|
||||
|
||||
std::shared_ptr<boost::uuids::uuid>
|
||||
ClusterCommunicationService::selfUuid() const
|
||||
{
|
||||
// Uuid never changes so it is safe to copy it without using strand_
|
||||
return selfData_.uuid;
|
||||
}
|
||||
|
||||
ClioNode
|
||||
ClusterCommunicationService::selfData() const
|
||||
{
|
||||
ClioNode result{};
|
||||
boost::asio::spawn(strand_, [this, &result](boost::asio::yield_context) { result = selfData_; });
|
||||
return result;
|
||||
}
|
||||
|
||||
std::expected<std::vector<ClioNode>, std::string>
|
||||
ClusterCommunicationService::clusterData() const
|
||||
{
|
||||
if (not isHealthy_) {
|
||||
return std::unexpected{"Service is not healthy"};
|
||||
}
|
||||
std::vector<ClioNode> result;
|
||||
boost::asio::spawn(strand_, [this, &result](boost::asio::yield_context) {
|
||||
result = otherNodesData_;
|
||||
result.push_back(selfData_);
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
void
|
||||
ClusterCommunicationService::doRead(boost::asio::yield_context yield)
|
||||
{
|
||||
otherNodesData_.clear();
|
||||
|
||||
BackendInterface::ClioNodesDataFetchResult expectedResult;
|
||||
try {
|
||||
expectedResult = backend_->fetchClioNodesData(yield);
|
||||
} catch (...) {
|
||||
expectedResult = std::unexpected{"Failed to fecth Clio nodes data"};
|
||||
}
|
||||
|
||||
if (!expectedResult.has_value()) {
|
||||
LOG(log_.error()) << "Failed to fetch nodes data";
|
||||
isHealthy_ = false;
|
||||
return;
|
||||
}
|
||||
|
||||
// Create a new vector here to not have partially parsed data in otherNodesData_
|
||||
std::vector<ClioNode> otherNodesData;
|
||||
for (auto const& [uuid, nodeDataStr] : expectedResult.value()) {
|
||||
if (uuid == *selfData_.uuid) {
|
||||
continue;
|
||||
}
|
||||
|
||||
boost::system::error_code errorCode;
|
||||
auto const json = boost::json::parse(nodeDataStr, errorCode);
|
||||
if (errorCode.failed()) {
|
||||
LOG(log_.error()) << "Error parsing json from DB: " << nodeDataStr;
|
||||
isHealthy_ = false;
|
||||
return;
|
||||
}
|
||||
|
||||
auto expectedNodeData = boost::json::try_value_to<ClioNode>(json);
|
||||
if (expectedNodeData.has_error()) {
|
||||
LOG(log_.error()) << "Error converting json to ClioNode: " << json;
|
||||
isHealthy_ = false;
|
||||
return;
|
||||
}
|
||||
*expectedNodeData->uuid = uuid;
|
||||
otherNodesData.push_back(std::move(expectedNodeData).value());
|
||||
}
|
||||
otherNodesData_ = std::move(otherNodesData);
|
||||
nodesInClusterMetric_.set(otherNodesData_.size() + 1);
|
||||
isHealthy_ = true;
|
||||
}
|
||||
|
||||
void
|
||||
ClusterCommunicationService::doWrite()
|
||||
{
|
||||
selfData_.updateTime = std::chrono::system_clock::now();
|
||||
boost::json::value jsonValue{};
|
||||
boost::json::value_from(selfData_, jsonValue);
|
||||
backend_->writeNodeMessage(*selfData_.uuid, boost::json::serialize(jsonValue.as_object()));
|
||||
}
|
||||
|
||||
} // namespace cluster
|
||||
142
src/cluster/ClusterCommunicationService.hpp
Normal file
142
src/cluster/ClusterCommunicationService.hpp
Normal file
@@ -0,0 +1,142 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of clio: https://github.com/XRPLF/clio
|
||||
Copyright (c) 2025, the clio developers.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "cluster/ClioNode.hpp"
|
||||
#include "cluster/ClusterCommunicationServiceInterface.hpp"
|
||||
#include "data/BackendInterface.hpp"
|
||||
#include "util/log/Logger.hpp"
|
||||
#include "util/prometheus/Bool.hpp"
|
||||
#include "util/prometheus/Gauge.hpp"
|
||||
#include "util/prometheus/Prometheus.hpp"
|
||||
|
||||
#include <boost/asio/spawn.hpp>
|
||||
#include <boost/asio/strand.hpp>
|
||||
#include <boost/asio/thread_pool.hpp>
|
||||
#include <boost/uuid/uuid.hpp>
|
||||
|
||||
#include <chrono>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
namespace cluster {
|
||||
|
||||
/**
|
||||
* @brief Service to post and read messages to/from the cluster. It uses a backend to communicate with the cluster.
|
||||
*/
|
||||
class ClusterCommunicationService : public ClusterCommunicationServiceInterface {
|
||||
util::prometheus::GaugeInt& nodesInClusterMetric_ = PrometheusService::gaugeInt(
|
||||
"cluster_nodes_total_number",
|
||||
{},
|
||||
"Total number of nodes this node can detect in the cluster."
|
||||
);
|
||||
util::prometheus::Bool isHealthy_ = PrometheusService::boolMetric(
|
||||
"cluster_communication_is_healthy",
|
||||
{},
|
||||
"Whether cluster communication service is operating healthy (1 - healthy, 0 - we have a problem)"
|
||||
);
|
||||
|
||||
// TODO: Use util::async::CoroExecutionContext after https://github.com/XRPLF/clio/issues/1973 is implemented
|
||||
boost::asio::thread_pool ctx_{1};
|
||||
boost::asio::strand<boost::asio::thread_pool::executor_type> strand_ = boost::asio::make_strand(ctx_);
|
||||
|
||||
util::Logger log_{"ClusterCommunication"};
|
||||
|
||||
std::shared_ptr<data::BackendInterface> backend_;
|
||||
|
||||
std::chrono::steady_clock::duration readInterval_;
|
||||
std::chrono::steady_clock::duration writeInterval_;
|
||||
|
||||
ClioNode selfData_;
|
||||
std::vector<ClioNode> otherNodesData_;
|
||||
|
||||
bool stopped_ = false;
|
||||
|
||||
public:
|
||||
static constexpr std::chrono::milliseconds kDEFAULT_READ_INTERVAL{2100};
|
||||
static constexpr std::chrono::milliseconds kDEFAULT_WRITE_INTERVAL{1200};
|
||||
/**
|
||||
* @brief Construct a new Cluster Communication Service object.
|
||||
*
|
||||
* @param backend The backend to use for communication.
|
||||
* @param readInterval The interval to read messages from the cluster.
|
||||
* @param writeInterval The interval to write messages to the cluster.
|
||||
*/
|
||||
ClusterCommunicationService(
|
||||
std::shared_ptr<data::BackendInterface> backend,
|
||||
std::chrono::steady_clock::duration readInterval = kDEFAULT_READ_INTERVAL,
|
||||
std::chrono::steady_clock::duration writeInterval = kDEFAULT_WRITE_INTERVAL
|
||||
);
|
||||
|
||||
~ClusterCommunicationService() override;
|
||||
|
||||
/**
|
||||
* @brief Start the service.
|
||||
*/
|
||||
void
|
||||
run();
|
||||
|
||||
/**
|
||||
* @brief Stop the service.
|
||||
*/
|
||||
void
|
||||
stop();
|
||||
|
||||
ClusterCommunicationService(ClusterCommunicationService&&) = delete;
|
||||
ClusterCommunicationService(ClusterCommunicationService const&) = delete;
|
||||
ClusterCommunicationService&
|
||||
operator=(ClusterCommunicationService&&) = delete;
|
||||
ClusterCommunicationService&
|
||||
operator=(ClusterCommunicationService const&) = delete;
|
||||
|
||||
/**
|
||||
* @brief Get the UUID of the current node.
|
||||
*
|
||||
* @return The UUID of the current node.
|
||||
*/
|
||||
std::shared_ptr<boost::uuids::uuid>
|
||||
selfUuid() const;
|
||||
|
||||
/**
|
||||
* @brief Get the data of the current node.
|
||||
*
|
||||
* @return The data of the current node.
|
||||
*/
|
||||
ClioNode
|
||||
selfData() const override;
|
||||
|
||||
/**
|
||||
* @brief Get the data of all nodes in the cluster (including self).
|
||||
*
|
||||
* @return The data of all nodes in the cluster or error if the service is not healthy.
|
||||
*/
|
||||
std::expected<std::vector<ClioNode>, std::string>
|
||||
clusterData() const override;
|
||||
|
||||
private:
|
||||
void
|
||||
doRead(boost::asio::yield_context yield);
|
||||
|
||||
void
|
||||
doWrite();
|
||||
};
|
||||
|
||||
} // namespace cluster
|
||||
54
src/cluster/ClusterCommunicationServiceInterface.hpp
Normal file
54
src/cluster/ClusterCommunicationServiceInterface.hpp
Normal file
@@ -0,0 +1,54 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of clio: https://github.com/XRPLF/clio
|
||||
Copyright (c) 2025, the clio developers.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "cluster/ClioNode.hpp"
|
||||
|
||||
#include <expected>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
namespace cluster {
|
||||
|
||||
/**
|
||||
* @brief Interface for the cluster communication service.
|
||||
*/
|
||||
class ClusterCommunicationServiceInterface {
|
||||
public:
|
||||
virtual ~ClusterCommunicationServiceInterface() = default;
|
||||
|
||||
/**
|
||||
* @brief Get the data of the current node.
|
||||
*
|
||||
* @return The data of the current node.
|
||||
*/
|
||||
[[nodiscard]] virtual ClioNode
|
||||
selfData() const = 0;
|
||||
|
||||
/**
|
||||
* @brief Get the data of all nodes in the cluster (including self).
|
||||
*
|
||||
* @return The data of all nodes in the cluster or error if the service is not healthy.
|
||||
*/
|
||||
[[nodiscard]] virtual std::expected<std::vector<ClioNode>, std::string>
|
||||
clusterData() const = 0;
|
||||
};
|
||||
|
||||
} // namespace cluster
|
||||
@@ -137,6 +137,8 @@ struct Amendments {
|
||||
REGISTER(fixInvalidTxFlags);
|
||||
REGISTER(fixFrozenLPTokenTransfer);
|
||||
REGISTER(DeepFreeze);
|
||||
REGISTER(PermissionDelegation);
|
||||
REGISTER(fixPayChanCancelAfter);
|
||||
|
||||
// Obsolete but supported by libxrpl
|
||||
REGISTER(CryptoConditionsSuite);
|
||||
|
||||
@@ -23,8 +23,8 @@
|
||||
#include "data/CassandraBackend.hpp"
|
||||
#include "data/LedgerCacheInterface.hpp"
|
||||
#include "data/cassandra/SettingsProvider.hpp"
|
||||
#include "util/config/ConfigDefinition.hpp"
|
||||
#include "util/log/Logger.hpp"
|
||||
#include "util/newconfig/ConfigDefinition.hpp"
|
||||
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include <boost/algorithm/string/predicate.hpp>
|
||||
|
||||
@@ -61,7 +61,7 @@ BackendInterface::finishWrites(std::uint32_t const ledgerSequence)
|
||||
LOG(gLog.debug()) << "Want finish writes for " << ledgerSequence;
|
||||
auto commitRes = doFinishWrites();
|
||||
if (commitRes) {
|
||||
LOG(gLog.debug()) << "Successfully commited. Updating range now to " << ledgerSequence;
|
||||
LOG(gLog.debug()) << "Successfully committed. Updating range now to " << ledgerSequence;
|
||||
updateRange(ledgerSequence);
|
||||
}
|
||||
return commitRes;
|
||||
@@ -246,7 +246,7 @@ BackendInterface::fetchBookOffers(
|
||||
auto end = std::chrono::system_clock::now();
|
||||
LOG(gLog.debug()) << "Fetching " << std::to_string(keys.size()) << " offers took "
|
||||
<< std::to_string(getMillis(mid - begin)) << " milliseconds. Fetching next dir took "
|
||||
<< std::to_string(succMillis) << " milliseonds. Fetched next dir " << std::to_string(numSucc)
|
||||
<< std::to_string(succMillis) << " milliseconds. Fetched next dir " << std::to_string(numSucc)
|
||||
<< " times"
|
||||
<< " Fetching next page of dir took " << std::to_string(pageMillis) << " milliseconds"
|
||||
<< ". num pages = " << std::to_string(numPages) << ". Fetching all objects took "
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
#include <boost/json.hpp>
|
||||
#include <boost/json/object.hpp>
|
||||
#include <boost/utility/result_of.hpp>
|
||||
#include <boost/uuid/uuid.hpp>
|
||||
#include <xrpl/basics/base_uint.h>
|
||||
#include <xrpl/protocol/AccountID.h>
|
||||
#include <xrpl/protocol/Fees.h>
|
||||
@@ -68,7 +69,7 @@ public:
|
||||
|
||||
static constexpr std::size_t kDEFAULT_WAIT_BETWEEN_RETRY = 500;
|
||||
/**
|
||||
* @brief A helper function that catches DatabaseTimout exceptions and retries indefinitely.
|
||||
* @brief A helper function that catches DatabaseTimeout exceptions and retries indefinitely.
|
||||
*
|
||||
* @tparam FnType The type of function object to execute
|
||||
* @param func The function object to execute
|
||||
@@ -154,7 +155,7 @@ public:
|
||||
}
|
||||
virtual ~BackendInterface() = default;
|
||||
|
||||
// TODO: Remove this hack once old ETL is removed.
|
||||
// TODO https://github.com/XRPLF/clio/issues/1956: Remove this hack once old ETL is removed.
|
||||
// Cache should not be exposed thru BackendInterface
|
||||
|
||||
/**
|
||||
@@ -397,7 +398,7 @@ public:
|
||||
* @brief Fetches a specific ledger object.
|
||||
*
|
||||
* Currently the real fetch happens in doFetchLedgerObject and fetchLedgerObject attempts to fetch from Cache first
|
||||
* and only calls out to the real DB if a cache miss ocurred.
|
||||
* and only calls out to the real DB if a cache miss occurred.
|
||||
*
|
||||
* @param key The key of the object
|
||||
* @param sequence The ledger sequence to fetch for
|
||||
@@ -511,7 +512,7 @@ public:
|
||||
* @param key The key to fetch for
|
||||
* @param ledgerSequence The ledger sequence to fetch for
|
||||
* @param yield The coroutine context
|
||||
* @return The sucessor on success; nullopt otherwise
|
||||
* @return The successor on success; nullopt otherwise
|
||||
*/
|
||||
std::optional<LedgerObject>
|
||||
fetchSuccessorObject(ripple::uint256 key, std::uint32_t ledgerSequence, boost::asio::yield_context yield) const;
|
||||
@@ -525,7 +526,7 @@ public:
|
||||
* @param key The key to fetch for
|
||||
* @param ledgerSequence The ledger sequence to fetch for
|
||||
* @param yield The coroutine context
|
||||
* @return The sucessor key on success; nullopt otherwise
|
||||
* @return The successor key on success; nullopt otherwise
|
||||
*/
|
||||
std::optional<ripple::uint256>
|
||||
fetchSuccessorKey(ripple::uint256 key, std::uint32_t ledgerSequence, boost::asio::yield_context yield) const;
|
||||
@@ -536,7 +537,7 @@ public:
|
||||
* @param key The key to fetch for
|
||||
* @param ledgerSequence The ledger sequence to fetch for
|
||||
* @param yield The coroutine context
|
||||
* @return The sucessor on success; nullopt otherwise
|
||||
* @return The successor on success; nullopt otherwise
|
||||
*/
|
||||
virtual std::optional<ripple::uint256>
|
||||
doFetchSuccessorKey(ripple::uint256 key, std::uint32_t ledgerSequence, boost::asio::yield_context yield) const = 0;
|
||||
@@ -568,6 +569,19 @@ public:
|
||||
virtual std::optional<std::string>
|
||||
fetchMigratorStatus(std::string const& migratorName, boost::asio::yield_context yield) const = 0;
|
||||
|
||||
/** @brief Return type for fetchClioNodesData() method */
|
||||
using ClioNodesDataFetchResult =
|
||||
std::expected<std::vector<std::pair<boost::uuids::uuid, std::string>>, std::string>;
|
||||
|
||||
/**
|
||||
* @brief Fetches the data of all nodes in the cluster.
|
||||
*
|
||||
* @param yield The coroutine context
|
||||
*@return The data of all nodes in the cluster.
|
||||
*/
|
||||
[[nodiscard]] virtual ClioNodesDataFetchResult
|
||||
fetchClioNodesData(boost::asio::yield_context yield) const = 0;
|
||||
|
||||
/**
|
||||
* @brief Synchronously fetches the ledger range from DB.
|
||||
*
|
||||
@@ -648,6 +662,14 @@ public:
|
||||
virtual void
|
||||
writeAccountTransactions(std::vector<AccountTransactionsData> data) = 0;
|
||||
|
||||
/**
|
||||
* @brief Write a new account transaction.
|
||||
*
|
||||
* @param record An object representing the account transaction
|
||||
*/
|
||||
virtual void
|
||||
writeAccountTransaction(AccountTransactionsData record) = 0;
|
||||
|
||||
/**
|
||||
* @brief Write NFTs transactions.
|
||||
*
|
||||
@@ -674,6 +696,15 @@ public:
|
||||
virtual void
|
||||
writeSuccessor(std::string&& key, std::uint32_t seq, std::string&& successor) = 0;
|
||||
|
||||
/**
|
||||
* @brief Write a node message. Used by ClusterCommunicationService
|
||||
*
|
||||
* @param uuid The UUID of the node
|
||||
* @param message The message to write
|
||||
*/
|
||||
virtual void
|
||||
writeNodeMessage(boost::uuids::uuid const& uuid, std::string message) = 0;
|
||||
|
||||
/**
|
||||
* @brief Starts a write transaction with the DB. No-op for cassandra.
|
||||
*
|
||||
|
||||
@@ -5,6 +5,7 @@ target_sources(
|
||||
BackendCounters.cpp
|
||||
BackendInterface.cpp
|
||||
LedgerCache.cpp
|
||||
LedgerHeaderCache.cpp
|
||||
cassandra/impl/Future.cpp
|
||||
cassandra/impl/Cluster.cpp
|
||||
cassandra/impl/Batch.cpp
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
#include "data/BackendInterface.hpp"
|
||||
#include "data/DBHelpers.hpp"
|
||||
#include "data/LedgerCacheInterface.hpp"
|
||||
#include "data/LedgerHeaderCache.hpp"
|
||||
#include "data/Types.hpp"
|
||||
#include "data/cassandra/Concepts.hpp"
|
||||
#include "data/cassandra/Handle.hpp"
|
||||
@@ -36,6 +37,8 @@
|
||||
|
||||
#include <boost/asio/spawn.hpp>
|
||||
#include <boost/json/object.hpp>
|
||||
#include <boost/uuid/string_generator.hpp>
|
||||
#include <boost/uuid/uuid.hpp>
|
||||
#include <cassandra.h>
|
||||
#include <fmt/core.h>
|
||||
#include <xrpl/basics/Blob.h>
|
||||
@@ -46,6 +49,7 @@
|
||||
#include <xrpl/protocol/LedgerHeader.h>
|
||||
#include <xrpl/protocol/nft.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <atomic>
|
||||
#include <chrono>
|
||||
#include <cstddef>
|
||||
@@ -59,6 +63,8 @@
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
class CacheBackendCassandraTest;
|
||||
|
||||
namespace data::cassandra {
|
||||
|
||||
/**
|
||||
@@ -68,21 +74,27 @@ namespace data::cassandra {
|
||||
*
|
||||
* @tparam SettingsProviderType The settings provider type to use
|
||||
* @tparam ExecutionStrategyType The execution strategy type to use
|
||||
* @tparam FetchLedgerCacheType The ledger header cache type to use
|
||||
*/
|
||||
template <SomeSettingsProvider SettingsProviderType, SomeExecutionStrategy ExecutionStrategyType>
|
||||
template <
|
||||
SomeSettingsProvider SettingsProviderType,
|
||||
SomeExecutionStrategy ExecutionStrategyType,
|
||||
typename FetchLedgerCacheType = FetchLedgerCache>
|
||||
class BasicCassandraBackend : public BackendInterface {
|
||||
util::Logger log_{"Backend"};
|
||||
|
||||
SettingsProviderType settingsProvider_;
|
||||
Schema<SettingsProviderType> schema_;
|
||||
|
||||
std::atomic_uint32_t ledgerSequence_ = 0u;
|
||||
friend class ::CacheBackendCassandraTest;
|
||||
|
||||
protected:
|
||||
Handle handle_;
|
||||
|
||||
// have to be mutable because BackendInterface constness :(
|
||||
mutable ExecutionStrategyType executor_;
|
||||
// TODO: move to interface level
|
||||
mutable FetchLedgerCacheType ledgerCache_{};
|
||||
|
||||
public:
|
||||
/**
|
||||
@@ -126,7 +138,6 @@ public:
|
||||
LOG(log_.error()) << error;
|
||||
throw std::runtime_error(error);
|
||||
}
|
||||
|
||||
LOG(log_.info()) << "Created (revamped) CassandraBackend";
|
||||
}
|
||||
|
||||
@@ -260,11 +271,16 @@ public:
|
||||
std::optional<ripple::LedgerHeader>
|
||||
fetchLedgerBySequence(std::uint32_t const sequence, boost::asio::yield_context yield) const override
|
||||
{
|
||||
if (auto const lock = ledgerCache_.get(); lock.has_value() && lock->seq == sequence)
|
||||
return lock->ledger;
|
||||
|
||||
auto const res = executor_.read(yield, schema_->selectLedgerBySeq, sequence);
|
||||
if (res) {
|
||||
if (auto const& result = res.value(); result) {
|
||||
if (auto const maybeValue = result.template get<std::vector<unsigned char>>(); maybeValue) {
|
||||
return util::deserializeHeader(ripple::makeSlice(*maybeValue));
|
||||
auto const header = util::deserializeHeader(ripple::makeSlice(*maybeValue));
|
||||
ledgerCache_.put(FetchLedgerCache::CacheEntry{header, sequence});
|
||||
return header;
|
||||
}
|
||||
|
||||
LOG(log_.error()) << "Could not fetch ledger by sequence - no rows";
|
||||
@@ -777,7 +793,7 @@ public:
|
||||
|
||||
while (liveAccounts.size() < number) {
|
||||
Statement const statement = lastItem ? schema_->selectAccountFromToken.bind(*lastItem, Limit{pageSize})
|
||||
: schema_->selectAccountFromBegining.bind(Limit{pageSize});
|
||||
: schema_->selectAccountFromBeginning.bind(Limit{pageSize});
|
||||
|
||||
auto const res = executor_.read(yield, statement);
|
||||
if (res) {
|
||||
@@ -877,6 +893,22 @@ public:
|
||||
return {};
|
||||
}
|
||||
|
||||
std::expected<std::vector<std::pair<boost::uuids::uuid, std::string>>, std::string>
|
||||
fetchClioNodesData(boost::asio::yield_context yield) const override
|
||||
{
|
||||
auto const readResult = executor_.read(yield, schema_->selectClioNodesData);
|
||||
if (not readResult)
|
||||
return std::unexpected{readResult.error().message()};
|
||||
|
||||
std::vector<std::pair<boost::uuids::uuid, std::string>> result;
|
||||
|
||||
for (auto [uuid, message] : extract<boost::uuids::uuid, std::string>(*readResult)) {
|
||||
result.emplace_back(uuid, std::move(message));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
void
|
||||
doWriteLedgerObject(std::string&& key, std::uint32_t const seq, std::string&& blob) override
|
||||
{
|
||||
@@ -906,19 +938,31 @@ public:
|
||||
statements.reserve(data.size() * 10); // assume 10 transactions avg
|
||||
|
||||
for (auto& record : data) {
|
||||
std::transform(
|
||||
std::begin(record.accounts),
|
||||
std::end(record.accounts),
|
||||
std::back_inserter(statements),
|
||||
[this, &record](auto&& account) {
|
||||
std::ranges::transform(record.accounts, std::back_inserter(statements), [this, &record](auto&& account) {
|
||||
return schema_->insertAccountTx.bind(
|
||||
std::forward<decltype(account)>(account),
|
||||
std::make_tuple(record.ledgerSequence, record.transactionIndex),
|
||||
record.txHash
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
executor_.write(std::move(statements));
|
||||
}
|
||||
|
||||
void
|
||||
writeAccountTransaction(AccountTransactionsData record) override
|
||||
{
|
||||
std::vector<Statement> statements;
|
||||
statements.reserve(record.accounts.size());
|
||||
|
||||
std::ranges::transform(record.accounts, std::back_inserter(statements), [this, &record](auto&& account) {
|
||||
return schema_->insertAccountTx.bind(
|
||||
std::forward<decltype(account)>(account),
|
||||
std::make_tuple(record.ledgerSequence, record.transactionIndex),
|
||||
record.txHash
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
executor_.write(std::move(statements));
|
||||
}
|
||||
@@ -929,7 +973,7 @@ public:
|
||||
std::vector<Statement> statements;
|
||||
statements.reserve(data.size());
|
||||
|
||||
std::transform(std::cbegin(data), std::cend(data), std::back_inserter(statements), [this](auto const& record) {
|
||||
std::ranges::transform(data, std::back_inserter(statements), [this](auto const& record) {
|
||||
return schema_->insertNFTTx.bind(
|
||||
record.tokenID, std::make_tuple(record.ledgerSequence, record.transactionIndex), record.txHash
|
||||
);
|
||||
@@ -999,7 +1043,7 @@ public:
|
||||
std::vector<Statement> statements;
|
||||
statements.reserve(data.size());
|
||||
for (auto [mptId, holder] : data)
|
||||
statements.push_back(schema_->insertMPTHolder.bind(std::move(mptId), std::move(holder)));
|
||||
statements.push_back(schema_->insertMPTHolder.bind(mptId, holder));
|
||||
|
||||
executor_.write(std::move(statements));
|
||||
}
|
||||
@@ -1019,6 +1063,12 @@ public:
|
||||
);
|
||||
}
|
||||
|
||||
void
|
||||
writeNodeMessage(boost::uuids::uuid const& uuid, std::string message) override
|
||||
{
|
||||
executor_.writeSync(schema_->updateClioNodeMessage, data::cassandra::Text{std::move(message)}, uuid);
|
||||
}
|
||||
|
||||
bool
|
||||
isTooBusy() const override
|
||||
{
|
||||
|
||||
@@ -54,7 +54,7 @@ struct AccountTransactionsData {
|
||||
* @param meta The transaction metadata
|
||||
* @param txHash The transaction hash
|
||||
*/
|
||||
AccountTransactionsData(ripple::TxMeta& meta, ripple::uint256 const& txHash)
|
||||
AccountTransactionsData(ripple::TxMeta const& meta, ripple::uint256 const& txHash)
|
||||
: accounts(meta.getAffectedAccounts())
|
||||
, ledgerSequence(meta.getLgrSeq())
|
||||
, transactionIndex(meta.getIndex())
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
#include "data/LedgerCache.hpp"
|
||||
|
||||
#include "data/Types.hpp"
|
||||
#include "etlng/Models.hpp"
|
||||
#include "util/Assert.hpp"
|
||||
|
||||
#include <xrpl/basics/base_uint.h>
|
||||
@@ -62,7 +63,7 @@ LedgerCache::update(std::vector<LedgerObject> const& objs, uint32_t seq, bool is
|
||||
if (seq > latestSeq_) {
|
||||
ASSERT(
|
||||
seq == latestSeq_ + 1 || latestSeq_ == 0,
|
||||
"New sequense must be either next or first. seq = {}, latestSeq_ = {}",
|
||||
"New sequence must be either next or first. seq = {}, latestSeq_ = {}",
|
||||
seq,
|
||||
latestSeq_
|
||||
);
|
||||
@@ -87,6 +88,42 @@ LedgerCache::update(std::vector<LedgerObject> const& objs, uint32_t seq, bool is
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
LedgerCache::update(std::vector<etlng::model::Object> const& objs, uint32_t seq)
|
||||
{
|
||||
if (disabled_)
|
||||
return;
|
||||
|
||||
std::scoped_lock const lck{mtx_};
|
||||
if (seq > latestSeq_) {
|
||||
ASSERT(
|
||||
seq == latestSeq_ + 1 || latestSeq_ == 0,
|
||||
"New sequence must be either next or first. seq = {}, latestSeq_ = {}",
|
||||
seq,
|
||||
latestSeq_
|
||||
);
|
||||
latestSeq_ = seq;
|
||||
}
|
||||
|
||||
deleted_.clear(); // previous update's deletes no longer needed
|
||||
|
||||
for (auto const& obj : objs) {
|
||||
if (!obj.data.empty()) {
|
||||
auto& e = map_[obj.key];
|
||||
if (seq > e.seq)
|
||||
e = {.seq = seq, .blob = obj.data};
|
||||
} else {
|
||||
if (map_.contains(obj.key))
|
||||
deleted_[obj.key] = map_[obj.key];
|
||||
|
||||
map_.erase(obj.key);
|
||||
if (!full_)
|
||||
deletes_.insert(obj.key);
|
||||
}
|
||||
}
|
||||
cv_.notify_all();
|
||||
}
|
||||
|
||||
std::optional<LedgerObject>
|
||||
LedgerCache::getSuccessor(ripple::uint256 const& key, uint32_t seq) const
|
||||
{
|
||||
@@ -139,6 +176,29 @@ LedgerCache::get(ripple::uint256 const& key, uint32_t seq) const
|
||||
return {e->second.blob};
|
||||
}
|
||||
|
||||
std::optional<Blob>
|
||||
LedgerCache::getDeleted(ripple::uint256 const& key, uint32_t seq) const
|
||||
{
|
||||
if (disabled_)
|
||||
return std::nullopt;
|
||||
|
||||
std::shared_lock const lck{mtx_};
|
||||
if (seq > latestSeq_)
|
||||
return std::nullopt;
|
||||
|
||||
++objectReqCounter_.get();
|
||||
|
||||
auto e = deleted_.find(key);
|
||||
if (e == deleted_.end())
|
||||
return std::nullopt;
|
||||
|
||||
if (seq < e->second.seq)
|
||||
return std::nullopt;
|
||||
|
||||
++objectHitCounter_.get();
|
||||
return {e->second.blob};
|
||||
}
|
||||
|
||||
void
|
||||
LedgerCache::setDisabled()
|
||||
{
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user