Compare commits

..

1 Commits

Author SHA1 Message Date
godexsoft
782fea3837 [CI] clang-tidy auto fixes 2025-04-18 09:27:58 +00:00
384 changed files with 4277 additions and 6218 deletions

View File

@@ -1,5 +1,5 @@
---
Language: Cpp
Language: Cpp
AccessModifierOffset: -4
AlignAfterOpenBracket: BlockIndent
AlignConsecutiveAssignments: false
@@ -22,31 +22,31 @@ BreakBeforeBinaryOperators: false
BreakBeforeBraces: WebKit
BreakBeforeTernaryOperators: true
BreakConstructorInitializersBeforeComma: true
ColumnLimit: 120
CommentPragmas: "^ IWYU pragma:"
ColumnLimit: 120
CommentPragmas: '^ IWYU pragma:'
ConstructorInitializerAllOnOneLineOrOnePerLine: true
ConstructorInitializerIndentWidth: 4
ContinuationIndentWidth: 4
Cpp11BracedListStyle: true
DerivePointerAlignment: false
DisableFormat: false
DisableFormat: false
ExperimentalAutoDetectBinPacking: false
FixNamespaceComments: true
ForEachMacros: [Q_FOREACH, BOOST_FOREACH]
ForEachMacros: [ Q_FOREACH, BOOST_FOREACH ]
IncludeBlocks: Regroup
IncludeCategories:
- Regex: '^".*"$'
Priority: 1
- Regex: '^<.*\.(h|hpp)>$'
Priority: 2
- Regex: "^<.*>$"
Priority: 3
- Regex: ".*"
Priority: 4
IncludeIsMainRegex: "$"
- Regex: '^".*"$'
Priority: 1
- Regex: '^<.*\.(h|hpp)>$'
Priority: 2
- Regex: '^<.*>$'
Priority: 3
- Regex: '.*'
Priority: 4
IncludeIsMainRegex: '$'
IndentCaseLabels: true
IndentFunctionDeclarationAfterType: false
IndentWidth: 4
IndentWidth: 4
IndentWrappedFunctionNames: false
IndentRequiresClause: true
RequiresClausePosition: OwnLine
@@ -63,18 +63,18 @@ PenaltyExcessCharacter: 1000000
PenaltyReturnTypeOnItsOwnLine: 200
PointerAlignment: Left
QualifierAlignment: Right
ReflowComments: true
SortIncludes: true
ReflowComments: true
SortIncludes: true
SpaceAfterCStyleCast: false
SpaceBeforeAssignmentOperators: true
SpaceBeforeParens: ControlStatements
SpaceInEmptyParentheses: false
SpacesBeforeTrailingComments: 2
SpacesInAngles: false
SpacesInAngles: false
SpacesInContainerLiterals: true
SpacesInCStyleCastParentheses: false
SpacesInParentheses: false
SpacesInSquareBrackets: false
Standard: Cpp11
TabWidth: 8
UseTab: Never
Standard: Cpp11
TabWidth: 8
UseTab: Never

View File

@@ -1,5 +1,5 @@
---
Checks: "-*,
Checks: '-*,
bugprone-argument-comment,
bugprone-assert-side-effect,
bugprone-bad-signal-to-kill-thread,
@@ -146,7 +146,7 @@ Checks: "-*,
readability-static-definition-in-anonymous-namespace,
readability-suspicious-call-argument,
readability-use-std-min-max
"
'
CheckOptions:
readability-braces-around-statements.ShortStatementLines: 2
@@ -158,21 +158,21 @@ CheckOptions:
readability-identifier-naming.EnumConstantCase: CamelCase
readability-identifier-naming.ScopedEnumConstantCase: CamelCase
readability-identifier-naming.GlobalConstantCase: UPPER_CASE
readability-identifier-naming.GlobalConstantPrefix: "k"
readability-identifier-naming.GlobalConstantPrefix: 'k'
readability-identifier-naming.GlobalVariableCase: CamelCase
readability-identifier-naming.GlobalVariablePrefix: "g"
readability-identifier-naming.GlobalVariablePrefix: 'g'
readability-identifier-naming.ConstexprFunctionCase: camelBack
readability-identifier-naming.ConstexprMethodCase: camelBack
readability-identifier-naming.ClassMethodCase: camelBack
readability-identifier-naming.ClassMemberCase: camelBack
readability-identifier-naming.ClassConstantCase: UPPER_CASE
readability-identifier-naming.ClassConstantPrefix: "k"
readability-identifier-naming.ClassConstantPrefix: 'k'
readability-identifier-naming.StaticConstantCase: UPPER_CASE
readability-identifier-naming.StaticConstantPrefix: "k"
readability-identifier-naming.StaticConstantPrefix: 'k'
readability-identifier-naming.StaticVariableCase: UPPER_CASE
readability-identifier-naming.StaticVariablePrefix: "k"
readability-identifier-naming.StaticVariablePrefix: 'k'
readability-identifier-naming.ConstexprVariableCase: UPPER_CASE
readability-identifier-naming.ConstexprVariablePrefix: "k"
readability-identifier-naming.ConstexprVariablePrefix: 'k'
readability-identifier-naming.LocalConstantCase: camelBack
readability-identifier-naming.LocalVariableCase: camelBack
readability-identifier-naming.TemplateParameterCase: CamelCase
@@ -181,11 +181,11 @@ CheckOptions:
readability-identifier-naming.MemberCase: camelBack
readability-identifier-naming.PrivateMemberSuffix: _
readability-identifier-naming.ProtectedMemberSuffix: _
readability-identifier-naming.PublicMemberSuffix: ""
readability-identifier-naming.FunctionIgnoredRegexp: ".*tag_invoke.*"
readability-identifier-naming.PublicMemberSuffix: ''
readability-identifier-naming.FunctionIgnoredRegexp: '.*tag_invoke.*'
bugprone-unsafe-functions.ReportMoreUnsafeFunctions: true
bugprone-unused-return-value.CheckedReturnTypes: ::std::error_code;::std::error_condition;::std::errc
misc-include-cleaner.IgnoreHeaders: '.*/(detail|impl)/.*;.*(expected|unexpected).*;.*ranges_lower_bound\.h;time.h;stdlib.h;__chrono/.*;fmt/chrono.h;boost/uuid/uuid_hash.hpp'
HeaderFilterRegex: '^.*/(src|tests)/.*\.(h|hpp)$'
WarningsAsErrors: "*"
WarningsAsErrors: '*'

View File

@@ -1,222 +1,222 @@
_help_parse: Options affecting listfile parsing
parse:
_help_additional_commands:
- Specify structure for custom cmake functions
- Specify structure for custom cmake functions
additional_commands:
foo:
flags:
- BAR
- BAZ
- BAR
- BAZ
kwargs:
HEADERS: "*"
SOURCES: "*"
DEPENDS: "*"
HEADERS: '*'
SOURCES: '*'
DEPENDS: '*'
_help_override_spec:
- Override configurations per-command where available
- Override configurations per-command where available
override_spec: {}
_help_vartags:
- Specify variable tags.
- Specify variable tags.
vartags: []
_help_proptags:
- Specify property tags.
- Specify property tags.
proptags: []
_help_format: Options affecting formatting.
format:
_help_disable:
- Disable formatting entirely, making cmake-format a no-op
- Disable formatting entirely, making cmake-format a no-op
disable: false
_help_line_width:
- How wide to allow formatted cmake files
- How wide to allow formatted cmake files
line_width: 120
_help_tab_size:
- How many spaces to tab for indent
- How many spaces to tab for indent
tab_size: 2
_help_use_tabchars:
- If true, lines are indented using tab characters (utf-8
- 0x09) instead of <tab_size> space characters (utf-8 0x20).
- In cases where the layout would require a fractional tab
- character, the behavior of the fractional indentation is
- governed by <fractional_tab_policy>
- If true, lines are indented using tab characters (utf-8
- 0x09) instead of <tab_size> space characters (utf-8 0x20).
- In cases where the layout would require a fractional tab
- character, the behavior of the fractional indentation is
- governed by <fractional_tab_policy>
use_tabchars: false
_help_fractional_tab_policy:
- If <use_tabchars> is True, then the value of this variable
- indicates how fractional indentions are handled during
- whitespace replacement. If set to 'use-space', fractional
- indentation is left as spaces (utf-8 0x20). If set to
- "`round-up` fractional indentation is replaced with a single"
- tab character (utf-8 0x09) effectively shifting the column
- to the next tabstop
- If <use_tabchars> is True, then the value of this variable
- indicates how fractional indentions are handled during
- whitespace replacement. If set to 'use-space', fractional
- indentation is left as spaces (utf-8 0x20). If set to
- '`round-up` fractional indentation is replaced with a single'
- tab character (utf-8 0x09) effectively shifting the column
- to the next tabstop
fractional_tab_policy: use-space
_help_max_subgroups_hwrap:
- If an argument group contains more than this many sub-groups
- (parg or kwarg groups) then force it to a vertical layout.
- If an argument group contains more than this many sub-groups
- (parg or kwarg groups) then force it to a vertical layout.
max_subgroups_hwrap: 4
_help_max_pargs_hwrap:
- If a positional argument group contains more than this many
- arguments, then force it to a vertical layout.
- If a positional argument group contains more than this many
- arguments, then force it to a vertical layout.
max_pargs_hwrap: 6
_help_max_rows_cmdline:
- If a cmdline positional group consumes more than this many
- lines without nesting, then invalidate the layout (and nest)
- If a cmdline positional group consumes more than this many
- lines without nesting, then invalidate the layout (and nest)
max_rows_cmdline: 2
_help_separate_ctrl_name_with_space:
- If true, separate flow control names from their parentheses
- with a space
- If true, separate flow control names from their parentheses
- with a space
separate_ctrl_name_with_space: true
_help_separate_fn_name_with_space:
- If true, separate function names from parentheses with a
- space
- If true, separate function names from parentheses with a
- space
separate_fn_name_with_space: false
_help_dangle_parens:
- If a statement is wrapped to more than one line, than dangle
- the closing parenthesis on its own line.
- If a statement is wrapped to more than one line, than dangle
- the closing parenthesis on its own line.
dangle_parens: true
_help_dangle_align:
- If the trailing parenthesis must be 'dangled' on its on
- "line, then align it to this reference: `prefix`: the start"
- "of the statement, `prefix-indent`: the start of the"
- "statement, plus one indentation level, `child`: align to"
- the column of the arguments
- If the trailing parenthesis must be 'dangled' on its on
- 'line, then align it to this reference: `prefix`: the start'
- 'of the statement, `prefix-indent`: the start of the'
- 'statement, plus one indentation level, `child`: align to'
- the column of the arguments
dangle_align: prefix
_help_min_prefix_chars:
- If the statement spelling length (including space and
- parenthesis) is smaller than this amount, then force reject
- nested layouts.
- If the statement spelling length (including space and
- parenthesis) is smaller than this amount, then force reject
- nested layouts.
min_prefix_chars: 4
_help_max_prefix_chars:
- If the statement spelling length (including space and
- parenthesis) is larger than the tab width by more than this
- amount, then force reject un-nested layouts.
- If the statement spelling length (including space and
- parenthesis) is larger than the tab width by more than this
- amount, then force reject un-nested layouts.
max_prefix_chars: 10
_help_max_lines_hwrap:
- If a candidate layout is wrapped horizontally but it exceeds
- this many lines, then reject the layout.
- If a candidate layout is wrapped horizontally but it exceeds
- this many lines, then reject the layout.
max_lines_hwrap: 2
_help_line_ending:
- What style line endings to use in the output.
- What style line endings to use in the output.
line_ending: unix
_help_command_case:
- Format command names consistently as 'lower' or 'upper' case
- Format command names consistently as 'lower' or 'upper' case
command_case: canonical
_help_keyword_case:
- Format keywords consistently as 'lower' or 'upper' case
- Format keywords consistently as 'lower' or 'upper' case
keyword_case: unchanged
_help_always_wrap:
- A list of command names which should always be wrapped
- A list of command names which should always be wrapped
always_wrap: []
_help_enable_sort:
- If true, the argument lists which are known to be sortable
- will be sorted lexicographicall
- If true, the argument lists which are known to be sortable
- will be sorted lexicographicall
enable_sort: true
_help_autosort:
- If true, the parsers may infer whether or not an argument
- list is sortable (without annotation).
- If true, the parsers may infer whether or not an argument
- list is sortable (without annotation).
autosort: true
_help_require_valid_layout:
- By default, if cmake-format cannot successfully fit
- everything into the desired linewidth it will apply the
- last, most aggressive attempt that it made. If this flag is
- True, however, cmake-format will print error, exit with non-
- zero status code, and write-out nothing
- By default, if cmake-format cannot successfully fit
- everything into the desired linewidth it will apply the
- last, most agressive attempt that it made. If this flag is
- True, however, cmake-format will print error, exit with non-
- zero status code, and write-out nothing
require_valid_layout: false
_help_layout_passes:
- A dictionary mapping layout nodes to a list of wrap
- decisions. See the documentation for more information.
- A dictionary mapping layout nodes to a list of wrap
- decisions. See the documentation for more information.
layout_passes: {}
_help_markup: Options affecting comment reflow and formatting.
markup:
_help_bullet_char:
- What character to use for bulleted lists
bullet_char: "*"
- What character to use for bulleted lists
bullet_char: '*'
_help_enum_char:
- What character to use as punctuation after numerals in an
- enumerated list
- What character to use as punctuation after numerals in an
- enumerated list
enum_char: .
_help_first_comment_is_literal:
- If comment markup is enabled, don't reflow the first comment
- block in each listfile. Use this to preserve formatting of
- your copyright/license statements.
- If comment markup is enabled, don't reflow the first comment
- block in each listfile. Use this to preserve formatting of
- your copyright/license statements.
first_comment_is_literal: false
_help_literal_comment_pattern:
- If comment markup is enabled, don't reflow any comment block
- which matches this (regex) pattern. Default is `None`
- (disabled).
- If comment markup is enabled, don't reflow any comment block
- which matches this (regex) pattern. Default is `None`
- (disabled).
literal_comment_pattern: null
_help_fence_pattern:
- Regular expression to match preformat fences in comments
- default= ``r'^\s*([`~]{3}[`~]*)(.*)$'``
- Regular expression to match preformat fences in comments
- default= ``r'^\s*([`~]{3}[`~]*)(.*)$'``
fence_pattern: ^\s*([`~]{3}[`~]*)(.*)$
_help_ruler_pattern:
- Regular expression to match rulers in comments default=
- '``r''^\s*[^\w\s]{3}.*[^\w\s]{3}$''``'
- Regular expression to match rulers in comments default=
- '``r''^\s*[^\w\s]{3}.*[^\w\s]{3}$''``'
ruler_pattern: ^\s*[^\w\s]{3}.*[^\w\s]{3}$
_help_explicit_trailing_pattern:
- If a comment line matches starts with this pattern then it
- is explicitly a trailing comment for the preceding
- argument. Default is '#<'
explicit_trailing_pattern: "#<"
- If a comment line matches starts with this pattern then it
- is explicitly a trailing comment for the preceeding
- argument. Default is '#<'
explicit_trailing_pattern: '#<'
_help_hashruler_min_length:
- If a comment line starts with at least this many consecutive
- hash characters, then don't lstrip() them off. This allows
- for lazy hash rulers where the first hash char is not
- separated by space
- If a comment line starts with at least this many consecutive
- hash characters, then don't lstrip() them off. This allows
- for lazy hash rulers where the first hash char is not
- separated by space
hashruler_min_length: 10
_help_canonicalize_hashrulers:
- If true, then insert a space between the first hash char and
- remaining hash chars in a hash ruler, and normalize its
- length to fill the column
- If true, then insert a space between the first hash char and
- remaining hash chars in a hash ruler, and normalize its
- length to fill the column
canonicalize_hashrulers: true
_help_enable_markup:
- enable comment markup parsing and reflow
- enable comment markup parsing and reflow
enable_markup: true
_help_lint: Options affecting the linter
lint:
_help_disabled_codes:
- a list of lint codes to disable
- a list of lint codes to disable
disabled_codes: []
_help_function_pattern:
- regular expression pattern describing valid function names
function_pattern: "[0-9a-z_]+"
- regular expression pattern describing valid function names
function_pattern: '[0-9a-z_]+'
_help_macro_pattern:
- regular expression pattern describing valid macro names
macro_pattern: "[0-9A-Z_]+"
- regular expression pattern describing valid macro names
macro_pattern: '[0-9A-Z_]+'
_help_global_var_pattern:
- regular expression pattern describing valid names for
- variables with global (cache) scope
global_var_pattern: "[A-Z][0-9A-Z_]+"
- regular expression pattern describing valid names for
- variables with global (cache) scope
global_var_pattern: '[A-Z][0-9A-Z_]+'
_help_internal_var_pattern:
- regular expression pattern describing valid names for
- variables with global scope (but internal semantic)
- regular expression pattern describing valid names for
- variables with global scope (but internal semantic)
internal_var_pattern: _[A-Z][0-9A-Z_]+
_help_local_var_pattern:
- regular expression pattern describing valid names for
- variables with local scope
local_var_pattern: "[a-z][a-z0-9_]+"
- regular expression pattern describing valid names for
- variables with local scope
local_var_pattern: '[a-z][a-z0-9_]+'
_help_private_var_pattern:
- regular expression pattern describing valid names for
- privatedirectory variables
- regular expression pattern describing valid names for
- privatedirectory variables
private_var_pattern: _[0-9a-z_]+
_help_public_var_pattern:
- regular expression pattern describing valid names for public
- directory variables
public_var_pattern: "[A-Z][0-9A-Z_]+"
- regular expression pattern describing valid names for public
- directory variables
public_var_pattern: '[A-Z][0-9A-Z_]+'
_help_argument_var_pattern:
- regular expression pattern describing valid names for
- function/macro arguments and loop variables.
argument_var_pattern: "[a-z][a-z0-9_]+"
- regular expression pattern describing valid names for
- function/macro arguments and loop variables.
argument_var_pattern: '[a-z][a-z0-9_]+'
_help_keyword_pattern:
- regular expression pattern describing valid names for
- keywords used in functions or macros
keyword_pattern: "[A-Z][0-9A-Z_]+"
- regular expression pattern describing valid names for
- keywords used in functions or macros
keyword_pattern: '[A-Z][0-9A-Z_]+'
_help_max_conditionals_custom_parser:
- In the heuristic for C0201, how many conditionals to match
- within a loop in before considering the loop a parser.
- In the heuristic for C0201, how many conditionals to match
- within a loop in before considering the loop a parser.
max_conditionals_custom_parser: 2
_help_min_statement_spacing:
- Require at least this many newlines between statements
- Require at least this many newlines between statements
min_statement_spacing: 1
_help_max_statement_spacing:
- Require no more than this many newlines between statements
- Require no more than this many newlines between statements
max_statement_spacing: 2
max_returns: 6
max_branches: 12
@@ -226,20 +226,20 @@ lint:
_help_encode: Options affecting file encoding
encode:
_help_emit_byteorder_mark:
- If true, emit the unicode byte-order mark (BOM) at the start
- of the file
- If true, emit the unicode byte-order mark (BOM) at the start
- of the file
emit_byteorder_mark: false
_help_input_encoding:
- Specify the encoding of the input file. Defaults to utf-8
- Specify the encoding of the input file. Defaults to utf-8
input_encoding: utf-8
_help_output_encoding:
- Specify the encoding of the output file. Defaults to utf-8.
- Note that cmake only claims to support utf-8 so be careful
- when using anything else
- Specify the encoding of the output file. Defaults to utf-8.
- Note that cmake only claims to support utf-8 so be careful
- when using anything else
output_encoding: utf-8
_help_misc: Miscellaneous configurations options.
misc:
_help_per_command:
- A dictionary containing any per-command configuration
- overrides. Currently only `command_case` is supported.
- A dictionary containing any per-command configuration
- overrides. Currently only `command_case` is supported.
per_command: {}

View File

@@ -9,14 +9,3 @@ coverage:
default:
target: 20% # Need to bump this number https://docs.codecov.com/docs/commit-status#patch-status
threshold: 2%
# `codecov/codecov-action` reruns `gcovr` if build files present
# That's why we run it in a separate workflow
# This ignore list is not currently used
#
# More info: https://github.com/XRPLF/clio/pull/2066
ignore:
- "tests"
- "src/data/cassandra/"
- "src/data/CassandraBackend.hpp"
- "src/data/BackendFactory.*"

View File

@@ -2,7 +2,7 @@
# Note: This script is intended to be run from the root of the repository.
#
# Not really a hook but should be used to check the completeness of documentation for added code, otherwise CI will come for you.
# Not really a hook but should be used to check the completness of documentation for added code, otherwise CI will come for you.
# It's good to have /tmp as the output so that consecutive runs are fast but no clutter in the repository.
echo "+ Checking documentation..."
@@ -15,18 +15,12 @@ DOCDIR=${TMPDIR}/out
# Check doxygen is at all installed
if [ -z "$DOXYGEN" ]; then
if [[ "${CI}" == "true" ]]; then
# If we are in CI, we should fail the check
echo "doxygen not found in CI, please install it"
exit 1
fi
# No hard error if doxygen is not installed yet
cat <<EOF
WARNING
-----------------------------------------------------------------------------
'doxygen' is required to check documentation.
'doxygen' is required to check documentation.
Please install it for next time.
Your changes may fail to pass CI once pushed.

119
.githooks/check-format Executable file
View File

@@ -0,0 +1,119 @@
#!/bin/bash
# Note: This script is intended to be run from the root of the repository.
#
# This script checks the format of the code and cmake files.
# In many cases it will automatically fix the issues and abort the commit.
no_formatted_directories_staged() {
staged_directories=$(git diff-index --cached --name-only HEAD | awk -F/ '{print $1}')
for sd in $staged_directories; do
if [[ "$sd" =~ ^(benchmark|cmake|src|tests)$ ]]; then
return 1
fi
done
return 0
}
if no_formatted_directories_staged ; then
exit 0
fi
echo "+ Checking code format..."
# paths to check and re-format
sources="src tests"
formatter="clang-format -i"
version=$($formatter --version | grep -o '[0-9\.]*')
if [[ "19.0.0" > "$version" ]]; then
cat <<EOF
ERROR
-----------------------------------------------------------------------------
A minimum of version 19 of `which clang-format` is required.
Your version is $version.
Please fix paths and run again.
-----------------------------------------------------------------------------
EOF
exit 3
fi
# check there is no .h headers, only .hpp
wrong_headers=$(find $sources -name "*.h" | sed 's/^/ - /')
if [[ ! -z "$wrong_headers" ]]; then
cat <<EOF
ERROR
-----------------------------------------------------------------------------
Found .h headers in the source code. Please rename them to .hpp:
$wrong_headers
-----------------------------------------------------------------------------
EOF
exit 2
fi
if ! command -v cmake-format &> /dev/null; then
cat <<EOF
ERROR
-----------------------------------------------------------------------------
'cmake-format' is required to run this script.
Please install it and run again.
-----------------------------------------------------------------------------
EOF
exit 3
fi
function grep_code {
grep -l "${1}" ${sources} -r --include \*.hpp --include \*.cpp
}
GNU_SED=$(sed --version 2>&1 | grep -q 'GNU' && echo true || echo false)
if [[ "$GNU_SED" == "false" ]]; then # macOS sed
# make all includes to be <...> style
grep_code '#include ".*"' | xargs sed -i '' -E 's|#include "(.*)"|#include <\1>|g'
# make local includes to be "..." style
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | tr '\n' '|' | sed 's/|$//' | sed 's/|/\\|/g')
grep_code "#include <\($main_src_dirs\)/.*>" | xargs sed -i '' -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g"
else
# make all includes to be <...> style
grep_code '#include ".*"' | xargs sed -i -E 's|#include "(.*)"|#include <\1>|g'
# make local includes to be "..." style
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | paste -sd '|' | sed 's/|/\\|/g')
grep_code "#include <\($main_src_dirs\)/.*>" | xargs sed -i -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g"
fi
cmake_dirs=$(echo cmake $sources)
cmake_files=$(find $cmake_dirs -type f \( -name "CMakeLists.txt" -o -name "*.cmake" \))
cmake_files=$(echo $cmake_files ./CMakeLists.txt)
first=$(git diff $sources $cmake_files)
find $sources -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.ipp' \) -print0 | xargs -0 $formatter
cmake-format -i $cmake_files
second=$(git diff $sources $cmake_files)
changes=$(diff <(echo "$first") <(echo "$second"))
changes_number=$(echo -n "$changes" | wc -l | sed -e 's/^[[:space:]]*//')
if [ "$changes_number" != "0" ]; then
cat <<\EOF
WARNING
-----------------------------------------------------------------------------
Automatically re-formatted code with 'clang-format' - commit was aborted.
Please manually add any updated files and commit again.
-----------------------------------------------------------------------------
EOF
if [[ "$1" == "--diff" ]]; then
echo "$changes"
fi
exit 1
fi

3
.githooks/post-checkout Executable file
View File

@@ -0,0 +1,3 @@
#!/bin/sh
command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'post-checkout' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; }
git lfs post-checkout "$@"

3
.githooks/post-commit Executable file
View File

@@ -0,0 +1,3 @@
#!/bin/sh
command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'post-commit' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; }
git lfs post-commit "$@"

3
.githooks/post-merge Executable file
View File

@@ -0,0 +1,3 @@
#!/bin/sh
command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'post-merge' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; }
git lfs post-merge "$@"

7
.githooks/pre-commit Executable file
View File

@@ -0,0 +1,7 @@
#!/bin/bash
# This script is intended to be run from the root of the repository.
source .githooks/check-format
source .githooks/check-docs

View File

@@ -39,14 +39,20 @@ verify_tag_signed() {
fi
}
# Check some things if we're pushing a branch called "release/"
if echo "$PRE_COMMIT_REMOTE_BRANCH" | grep ^refs\/heads\/release\/ &> /dev/null ; then
version=$(git tag --points-at HEAD)
echo "Looks like you're trying to push a $version release..."
echo "Making sure you've signed and tagged it."
if verify_commit_signed && verify_tag && verify_tag_signed ; then
: # Ok, I guess you can push
else
exit 1
while read local_ref local_oid remote_ref remote_oid; do
# Check some things if we're pushing a branch called "release/"
if echo "$remote_ref" | grep ^refs\/heads\/release\/ &> /dev/null ; then
version=$(git tag --points-at HEAD)
echo "Looks like you're trying to push a $version release..."
echo "Making sure you've signed and tagged it."
if verify_commit_signed && verify_tag && verify_tag_signed ; then
: # Ok, I guess you can push
else
exit 1
fi
fi
fi
done
command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'pre-push' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; }
git lfs pre-push "$@"

View File

@@ -3,34 +3,29 @@ name: Bug report
about: Create a report to help us improve
title: "[Title with short description] (Version: [Clio version])"
labels: bug
assignees: ""
assignees: ''
---
<!-- Please search existing issues to avoid creating duplicates. -->
<!-- Kindly refrain from posting any credentials or sensitive information in this issue -->
## Issue Description
<!-- Provide a summary for your issue/bug. -->
## Steps to Reproduce
<!-- List in detail the exact steps to reproduce the unexpected behavior of the software. -->
## Expected Result
<!-- Explain in detail what behavior you expected to happen. -->
## Actual Result
<!-- Explain in detail what behavior actually happened. -->
## Environment
<!-- Please describe your environment setup (such as Ubuntu 20.04.2 with Boost 1.82). -->
<!-- Please use the version returned by './clio_server --version' as the version number -->
## Supporting Files
<!-- If you have supporting files such as a log, feel free to post a link here using Github Gist. -->
<!-- Consider adding configuration files with private information removed via Github Gist. -->

View File

@@ -3,24 +3,21 @@ name: Feature request
about: Suggest an idea for this project
title: "[Title with short description] (Version: [Clio version])"
labels: enhancement
assignees: ""
assignees: ''
---
<!-- Please search existing issues to avoid creating duplicates. -->
<!-- Kindly refrain from posting any credentials or sensitive information in this issue -->
## Summary
<!-- Provide a summary to the feature request -->
## Motivation
<!-- Why do we need this feature? -->
## Solution
<!-- What is the solution? -->
## Paths Not Taken
<!-- What other alternatives have been considered? -->

View File

@@ -3,7 +3,8 @@ name: Question
about: A question in form of an issue
title: "[Title with short description] (Version: Clio version)"
labels: question
assignees: ""
assignees: ''
---
<!-- Please search existing issues to avoid creating duplicates. -->
@@ -11,9 +12,7 @@ assignees: ""
<!-- Kindly refrain from posting any credentials or sensitive information in this issue -->
## Question
<!-- Your question -->
## Paths Not Taken
<!-- If applicable, what other alternatives have been considered? -->

View File

@@ -1,15 +1,13 @@
name: Build clio
description: Build clio in build directory
inputs:
targets:
description: Space-separated build target names
target:
description: Build target name
default: all
subtract_threads:
description: An option for the action get_number_of_threads. See get_number_of_threads
substract_threads:
description: An option for the action get_number_of_threads. See get_number_of_threads
required: true
default: "0"
default: '0'
runs:
using: composite
steps:
@@ -17,13 +15,10 @@ runs:
uses: ./.github/actions/get_number_of_threads
id: number_of_threads
with:
subtract_threads: ${{ inputs.subtract_threads }}
substract_threads: ${{ inputs.substract_threads }}
- name: Build targets
- name: Build Clio
shell: bash
run: |
cd build
cmake \
--build . \
--parallel "${{ steps.number_of_threads.outputs.threads_number }}" \
--target ${{ inputs.targets }}
cmake --build . --parallel ${{ steps.number_of_threads.outputs.threads_number }} --target ${{ inputs.target }}

View File

@@ -1,12 +1,8 @@
name: Build and push Docker image
description: Build and push Docker image to DockerHub and GitHub Container Registry
inputs:
images:
description: Name of the images to use as a base name
required: true
dockerhub_repo:
description: DockerHub repository name
image_name:
description: Name of the image to build
required: true
push_image:
description: Whether to push the image to the registry (true/false)
@@ -23,50 +19,48 @@ inputs:
description:
description: Short description of the image
required: true
runs:
using: composite
steps:
- name: Login to DockerHub
if: ${{ inputs.push_image == 'true' }}
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_PW }}
- name: Login to DockerHub
if: ${{ inputs.push_image == 'true' }}
uses: docker/login-action@v3
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_PW }}
- name: Login to GitHub Container Registry
if: ${{ inputs.push_image == 'true' }}
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ env.GITHUB_TOKEN }}
- name: Login to GitHub Container Registry
if: ${{ inputs.push_image == 'true' }}
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ env.GITHUB_TOKEN }}
- uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
with:
cache-image: false
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- uses: docker/setup-qemu-action@v3
- uses: docker/setup-buildx-action@v3
- uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
id: meta
with:
images: ${{ inputs.images }}
tags: ${{ inputs.tags }}
- uses: docker/metadata-action@v5
id: meta
with:
images: ${{ inputs.image_name }}
tags: ${{ inputs.tags }}
- name: Build and push
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
with:
context: ${{ inputs.directory }}
platforms: ${{ inputs.platforms }}
push: ${{ inputs.push_image == 'true' }}
tags: ${{ steps.meta.outputs.tags }}
- name: Build and push
uses: docker/build-push-action@v5
with:
context: ${{ inputs.directory }}
platforms: ${{ inputs.platforms }}
push: ${{ inputs.push_image == 'true' }}
tags: ${{ steps.meta.outputs.tags }}
- name: Update DockerHub description
if: ${{ inputs.push_image == 'true' }}
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_PW }}
repository: ${{ inputs.image_name }}
short-description: ${{ inputs.description }}
readme-filepath: ${{ inputs.directory }}/README.md
- name: Update DockerHub description
if: ${{ inputs.push_image == 'true' }}
uses: peter-evans/dockerhub-description@432a30c9e07499fd01da9f8a49f0faf9e0ca5b77 # v4.0.2
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_PW }}
repository: ${{ inputs.dockerhub_repo }}
short-description: ${{ inputs.description }}
readme-filepath: ${{ inputs.directory }}/README.md

View File

@@ -1,26 +1,21 @@
name: Generate code coverage report
description: Run tests, generate code coverage report and upload it to codecov.io
runs:
using: composite
steps:
- name: Run tests
shell: bash
run: |
build/clio_tests
# Please keep exclude list in sync with .codecov.yml
- name: Run gcovr
shell: bash
run: |
gcovr \
-e tests \
gcovr -e tests \
-e src/data/cassandra \
-e src/data/CassandraBackend.hpp \
-e 'src/data/BackendFactory.*' \
--xml build/coverage_report.xml \
-j8 --exclude-throw-branches
--xml build/coverage_report.xml -j8 --exclude-throw-branches
- name: Archive coverage report
uses: actions/upload-artifact@v4

View File

@@ -1,6 +1,5 @@
name: Create an issue
description: Create an issue
inputs:
title:
description: Issue title
@@ -11,31 +10,24 @@ inputs:
labels:
description: Comma-separated list of labels
required: true
default: "bug"
default: 'bug'
assignees:
description: Comma-separated list of assignees
required: true
default: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru"
default: 'godexsoft,kuznetsss,PeterChen13579'
outputs:
created_issue_id:
description: Created issue id
value: ${{ steps.create_issue.outputs.created_issue }}
runs:
using: composite
steps:
- name: Create an issue
id: create_issue
shell: bash
run: |
echo -e '${{ inputs.body }}' > issue.md
gh issue create \
--assignee '${{ inputs.assignees }}' \
--label '${{ inputs.labels }}' \
--title '${{ inputs.title }}' \
--body-file ./issue.md \
> create_issue.log
created_issue="$(sed 's|.*/||' create_issue.log)"
echo "created_issue=$created_issue" >> $GITHUB_OUTPUT
rm create_issue.log issue.md
- name: Create an issue
id: create_issue
shell: bash
run: |
echo -e '${{ inputs.body }}' > issue.md
gh issue create --assignee '${{ inputs.assignees }}' --label '${{ inputs.labels }}' --title '${{ inputs.title }}' --body-file ./issue.md > create_issue.log
created_issue=$(cat create_issue.log | sed 's|.*/||')
echo "created_issue=$created_issue" >> $GITHUB_OUTPUT
rm create_issue.log issue.md

View File

@@ -1,6 +1,5 @@
name: Run conan and cmake
description: Run conan and cmake
inputs:
conan_profile:
description: Conan profile name
@@ -8,37 +7,27 @@ inputs:
conan_cache_hit:
description: Whether conan cache has been downloaded
required: true
default: "false"
default: 'false'
build_type:
description: Build type for third-party libraries and clio. Could be 'Release', 'Debug'
required: true
default: "Release"
default: 'Release'
build_integration_tests:
description: Whether to build integration tests
required: true
default: "true"
default: 'true'
code_coverage:
description: Whether conan's coverage option should be on or not
required: true
default: "false"
default: 'false'
static:
description: Whether Clio is to be statically linked
required: true
default: "false"
default: 'false'
sanitizer:
description: Sanitizer to use
required: true
default: "false"
choices:
- "false"
- "tsan"
- "asan"
- "ubsan"
time_trace:
description: Whether to enable compiler trace reports
required: true
default: "false"
default: 'false' # false, tsan, asan or ubsan
runs:
using: composite
steps:
@@ -53,36 +42,19 @@ runs:
CODE_COVERAGE: "${{ inputs.code_coverage == 'true' && 'True' || 'False' }}"
STATIC_OPTION: "${{ inputs.static == 'true' && 'True' || 'False' }}"
INTEGRATION_TESTS_OPTION: "${{ inputs.build_integration_tests == 'true' && 'True' || 'False' }}"
TIME_TRACE: "${{ inputs.time_trace == 'true' && 'True' || 'False' }}"
run: |
cd build
conan \
install .. \
-of . \
-b $BUILD_OPTION \
-s build_type="${{ inputs.build_type }}" \
-o clio:static="${STATIC_OPTION}" \
-o clio:tests=True \
-o clio:integration_tests="${INTEGRATION_TESTS_OPTION}" \
-o clio:lint=False \
-o clio:coverage="${CODE_COVERAGE}" \
-o clio:time_trace="${TIME_TRACE}" \
--profile "${{ inputs.conan_profile }}"
conan install .. -of . -b $BUILD_OPTION -s build_type=${{ inputs.build_type }} -o clio:static="${STATIC_OPTION}" -o clio:tests=True -o clio:integration_tests="${INTEGRATION_TESTS_OPTION}" -o clio:lint=False -o clio:coverage="${CODE_COVERAGE}" --profile ${{ inputs.conan_profile }}
- name: Run cmake
shell: bash
env:
BUILD_TYPE: "${{ inputs.build_type }}"
SANITIZER_OPTION: |-
SANITIZER_OPTION: |
${{ inputs.sanitizer == 'tsan' && '-Dsan=thread' ||
inputs.sanitizer == 'ubsan' && '-Dsan=undefined' ||
inputs.sanitizer == 'asan' && '-Dsan=address' ||
'' }}
run: |
cd build
cmake \
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
"${SANITIZER_OPTION}" \
.. \
-G Ninja
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE="${BUILD_TYPE}" ${SANITIZER_OPTION} .. -G Ninja

View File

@@ -1,16 +1,14 @@
name: Get number of threads
description: Determines number of threads to use on macOS and Linux
inputs:
subtract_threads:
description: How many threads to subtract from the calculated number
substract_threads:
description: How many threads to substract from the calculated number
required: true
default: "0"
default: '0'
outputs:
threads_number:
description: Number of threads to use
value: ${{ steps.number_of_threads_export.outputs.num }}
runs:
using: composite
steps:
@@ -30,7 +28,7 @@ runs:
id: number_of_threads_export
shell: bash
run: |
num_of_threads="${{ steps.mac_threads.outputs.num || steps.linux_threads.outputs.num }}"
shift_by="${{ inputs.subtract_threads }}"
shifted="$((num_of_threads - shift_by))"
num_of_threads=${{ steps.mac_threads.outputs.num || steps.linux_threads.outputs.num }}
shift_by=${{ inputs.substract_threads }}
shifted=$((num_of_threads - shift_by))
echo "num=$(( shifted > 1 ? shifted : 1 ))" >> $GITHUB_OUTPUT

View File

@@ -1,11 +1,9 @@
name: Git common ancestor
description: Find the closest common commit
outputs:
commit:
description: Hash of commit
value: ${{ steps.find_common_ancestor.outputs.commit }}
runs:
using: composite
steps:
@@ -13,4 +11,4 @@ runs:
id: find_common_ancestor
shell: bash
run: |
echo "commit=\"$(git merge-base --fork-point origin/develop)\"" >> $GITHUB_OUTPUT
echo "commit=$(git merge-base --fork-point origin/develop)" >> $GITHUB_OUTPUT

View File

@@ -1,11 +1,9 @@
name: Prepare runner
description: Install packages, set environment variables, create directories
inputs:
disable_ccache:
description: Whether ccache should be disabled
required: true
runs:
using: composite
steps:
@@ -13,42 +11,39 @@ runs:
if: ${{ runner.os == 'macOS' }}
shell: bash
run: |
brew install \
bison \
ca-certificates \
ccache \
clang-build-analyzer \
conan@1 \
gh \
jq \
llvm@14 \
ninja \
pkg-config
echo "/opt/homebrew/opt/conan@1/bin" >> $GITHUB_PATH
brew install llvm@14 pkg-config ninja bison ccache jq gh conan@1 ca-certificates
echo "/opt/homebrew/opt/conan@1/bin" >> $GITHUB_PATH
- name: Install CMake 3.31.6 on mac
if: ${{ runner.os == 'macOS' }}
shell: bash
run: |
# Uninstall any existing cmake
brew uninstall cmake --ignore-dependencies || true
# Download specific cmake formula
FORMULA_URL="https://raw.githubusercontent.com/Homebrew/homebrew-core/b4e46db74e74a8c1650b38b1da222284ce1ec5ce/Formula/c/cmake.rb"
FORMULA_EXPECTED_SHA256="c7ec95d86f0657638835441871e77541165e0a2581b53b3dd657cf13ad4228d4"
mkdir -p /tmp/homebrew-formula
curl -s -L "$FORMULA_URL" -o /tmp/homebrew-formula/cmake.rb
echo "$FORMULA_EXPECTED_SHA256 /tmp/homebrew-formula/cmake.rb" | shasum -a 256 -c
# Install cmake from the specific formula with force flag
brew install --formula --force /tmp/homebrew-formula/cmake.rb
# Uninstall any existing cmake
brew uninstall cmake --ignore-dependencies || true
# Download specific cmake formula
FORMULA_URL="https://raw.githubusercontent.com/Homebrew/homebrew-core/b4e46db74e74a8c1650b38b1da222284ce1ec5ce/Formula/c/cmake.rb"
FORMULA_EXPECTED_SHA256="c7ec95d86f0657638835441871e77541165e0a2581b53b3dd657cf13ad4228d4"
mkdir -p /tmp/homebrew-formula
curl -s -L $FORMULA_URL -o /tmp/homebrew-formula/cmake.rb
# Verify the downloaded formula
ACTUAL_SHA256=$(shasum -a 256 /tmp/homebrew-formula/cmake.rb | cut -d ' ' -f 1)
if [ "$ACTUAL_SHA256" != "$FORMULA_EXPECTED_SHA256" ]; then
echo "Error: Formula checksum mismatch"
echo "Expected: $FORMULA_EXPECTED_SHA256"
echo "Actual: $ACTUAL_SHA256"
exit 1
fi
# Install cmake from the specific formula with force flag
brew install --force /tmp/homebrew-formula/cmake.rb
- name: Fix git permissions on Linux
if: ${{ runner.os == 'Linux' }}
shell: bash
run: git config --global --add safe.directory "$PWD"
run: git config --global --add safe.directory $PWD
- name: Set env variables for macOS
if: ${{ runner.os == 'macOS' }}
@@ -73,5 +68,7 @@ runs:
- name: Create directories
shell: bash
run: |
mkdir -p "$CCACHE_DIR"
mkdir -p "$CONAN_USER_HOME/.conan"
mkdir -p $CCACHE_DIR
mkdir -p $CONAN_USER_HOME/.conan

View File

@@ -1,6 +1,5 @@
name: Restore cache
description: Find and restores conan and ccache cache
inputs:
conan_dir:
description: Path to .conan directory
@@ -18,7 +17,7 @@ inputs:
code_coverage:
description: Whether code coverage is on
required: true
default: "false"
default: 'false'
outputs:
conan_hash:
description: Hash to use as a part of conan cache key
@@ -29,7 +28,6 @@ outputs:
ccache_cache_hit:
description: True if ccache cache has been downloaded
value: ${{ steps.ccache_cache.outputs.cache-hit }}
runs:
using: composite
steps:
@@ -42,9 +40,9 @@ runs:
shell: bash
run: |
conan info . -j info.json -o clio:tests=True
packages_info="$(cat info.json | jq '.[] | "\(.display_name): \(.id)"' | grep -v 'clio')"
packages_info=$(cat info.json | jq '.[] | "\(.display_name): \(.id)"' | grep -v 'clio')
echo "$packages_info"
hash="$(echo "$packages_info" | shasum -a 256 | cut -d ' ' -f 1)"
hash=$(echo "$packages_info" | shasum -a 256 | cut -d ' ' -f 1)
rm info.json
echo "hash=$hash" >> $GITHUB_OUTPUT

View File

@@ -1,6 +1,5 @@
name: Save cache
description: Save conan and ccache cache for develop branch
inputs:
conan_dir:
description: Path to .conan directory
@@ -29,8 +28,7 @@ inputs:
code_coverage:
description: Whether code coverage is on
required: true
default: "false"
default: 'false'
runs:
using: composite
steps:
@@ -57,3 +55,5 @@ runs:
with:
path: ${{ inputs.ccache_dir }}
key: clio-ccache-${{ runner.os }}-${{ inputs.build_type }}${{ inputs.code_coverage == 'true' && '-code_coverage' || '' }}-${{ inputs.conan_profile }}-develop-${{ steps.git_common_ancestor.outputs.commit }}

View File

@@ -1,33 +1,52 @@
name: Setup conan
description: Setup conan profile and artifactory
inputs:
conan_profile:
description: Conan profile name
required: true
outputs:
conan_profile:
description: Created conan profile name
value: ${{ steps.conan_export_output.outputs.conan_profile }}
runs:
using: composite
steps:
- name: Create conan profile on macOS
- name: On mac
if: ${{ runner.os == 'macOS' }}
shell: bash
env:
CONAN_PROFILE: ${{ inputs.conan_profile }}
CONAN_PROFILE: apple_clang_16
id: conan_setup_mac
run: |
echo "Creating \"$CONAN_PROFILE\" conan profile"
conan profile new "$CONAN_PROFILE" --detect --force
conan profile update settings.compiler.libcxx=libc++ "$CONAN_PROFILE"
conan profile update settings.compiler.cppstd=20 "$CONAN_PROFILE"
conan profile update env.CXXFLAGS=-DBOOST_ASIO_DISABLE_CONCEPTS "$CONAN_PROFILE"
conan profile update "conf.tools.build:cxxflags+=[\"-DBOOST_ASIO_DISABLE_CONCEPTS\"]" "$CONAN_PROFILE"
echo "Creating $CONAN_PROFILE conan profile"
conan profile new $CONAN_PROFILE --detect --force
conan profile update settings.compiler.libcxx=libc++ $CONAN_PROFILE
conan profile update settings.compiler.cppstd=20 $CONAN_PROFILE
conan profile update env.CXXFLAGS=-DBOOST_ASIO_DISABLE_CONCEPTS $CONAN_PROFILE
conan profile update "conf.tools.build:cxxflags+=[\"-DBOOST_ASIO_DISABLE_CONCEPTS\"]" $CONAN_PROFILE
echo "created_conan_profile=$CONAN_PROFILE" >> $GITHUB_OUTPUT
- name: On linux
if: ${{ runner.os == 'Linux' }}
shell: bash
id: conan_setup_linux
run: |
echo "created_conan_profile=${{ inputs.conan_profile }}" >> $GITHUB_OUTPUT
- name: Export output variable
shell: bash
id: conan_export_output
run: |
echo "conan_profile=${{ steps.conan_setup_mac.outputs.created_conan_profile || steps.conan_setup_linux.outputs.created_conan_profile }}" >> $GITHUB_OUTPUT
- name: Add conan-non-prod artifactory
shell: bash
run: |
if [[ -z "$(conan remote list | grep conan-non-prod)" ]]; then
if [[ -z $(conan remote list | grep conan-non-prod) ]]; then
echo "Adding conan-non-prod"
conan remote add --insert 0 conan-non-prod http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
else
echo "Conan-non-prod is available"
fi

6
.github/actions/test/Dockerfile vendored Normal file
View File

@@ -0,0 +1,6 @@
FROM cassandra:4.0.4
RUN apt-get update && apt-get install -y postgresql
COPY entrypoint.sh /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]

8
.github/actions/test/entrypoint.sh vendored Executable file
View File

@@ -0,0 +1,8 @@
#!/bin/bash
pg_ctlcluster 12 main start
su postgres -c"psql -c\"alter user postgres with password 'postgres'\""
su cassandra -c "/opt/cassandra/bin/cassandra -R"
sleep 90
chmod +x ./clio_tests
./clio_tests

161
.github/dependabot.yml vendored
View File

@@ -1,157 +1,16 @@
version: 2
updates:
- package-ecosystem: github-actions
directory: /
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: weekly
day: monday
interval: "weekly"
day: "monday"
time: "04:00"
timezone: Etc/GMT
timezone: "Etc/GMT"
reviewers:
- XRPLF/clio-dev-team
- "godexsoft"
- "kuznetsss"
- "PeterChen13579"
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/build_clio/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
reviewers:
- XRPLF/clio-dev-team
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/build_docker_image/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
reviewers:
- XRPLF/clio-dev-team
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/code_coverage/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
reviewers:
- XRPLF/clio-dev-team
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/create_issue/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
reviewers:
- XRPLF/clio-dev-team
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/generate/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
reviewers:
- XRPLF/clio-dev-team
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/get_number_of_threads/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
reviewers:
- XRPLF/clio-dev-team
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/git_common_ancestor/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
reviewers:
- XRPLF/clio-dev-team
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/prepare_runner/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
reviewers:
- XRPLF/clio-dev-team
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/restore_cache/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
reviewers:
- XRPLF/clio-dev-team
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/save_cache/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
reviewers:
- XRPLF/clio-dev-team
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
- package-ecosystem: github-actions
directory: .github/actions/setup_conan/
schedule:
interval: weekly
day: monday
time: "04:00"
timezone: Etc/GMT
reviewers:
- XRPLF/clio-dev-team
commit-message:
prefix: "ci: [DEPENDABOT] "
target-branch: develop
prefix: "[CI] "
target-branch: "develop"

View File

@@ -1,120 +1,170 @@
name: Build
on:
push:
branches: [master, release/*, develop]
pull_request:
branches: [master, release/*, develop]
paths:
- .github/workflows/build.yml
- .github/workflows/build_and_test.yml
- .github/workflows/build_impl.yml
- .github/workflows/test_impl.yml
- .github/workflows/upload_coverage_report.yml
- ".github/actions/**"
- "!.github/actions/build_docker_image/**"
- "!.github/actions/create_issue/**"
- CMakeLists.txt
- "cmake/**"
- "src/**"
- "tests/**"
- docs/config-description.md
workflow_dispatch:
concurrency:
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build-and-test:
name: Build and Test
check_format:
name: Check format
runs-on: ubuntu-latest
container:
image: rippleci/clio_ci:latest
steps:
- name: Fix git permissions on Linux
shell: bash
run: git config --global --add safe.directory $PWD
- uses: actions/checkout@v4
- name: Run formatters
id: run_formatters
run: |
./.githooks/check-format --diff
shell: bash
check_docs:
name: Check documentation
runs-on: ubuntu-latest
container:
image: rippleci/clio_ci:latest
steps:
- uses: actions/checkout@v4
- name: Run linter
id: run_linter
run: |
./.githooks/check-docs
shell: bash
build:
name: Build
needs:
- check_format
- check_docs
strategy:
fail-fast: false
matrix:
os: [heavy]
conan_profile: [gcc, clang]
build_type: [Release, Debug]
container: ['{ "image": "ghcr.io/xrplf/clio-ci:latest" }']
static: [true]
include:
- os: macos15
conan_profile: default_apple_clang
- os: heavy
conan_profile: gcc
build_type: Release
container: ""
container: '{ "image": "rippleci/clio_ci:latest" }'
code_coverage: false
static: true
- os: heavy
conan_profile: gcc
build_type: Debug
container: '{ "image": "rippleci/clio_ci:latest" }'
code_coverage: true
static: true
- os: heavy
conan_profile: clang
build_type: Release
container: '{ "image": "rippleci/clio_ci:latest" }'
code_coverage: false
static: true
- os: heavy
conan_profile: clang
build_type: Debug
container: '{ "image": "rippleci/clio_ci:latest" }'
code_coverage: false
static: true
- os: macos15
build_type: Release
code_coverage: false
static: false
uses: ./.github/workflows/build_and_test.yml
uses: ./.github/workflows/build_impl.yml
with:
runs_on: ${{ matrix.os }}
container: ${{ matrix.container }}
conan_profile: ${{ matrix.conan_profile }}
build_type: ${{ matrix.build_type }}
code_coverage: ${{ matrix.code_coverage }}
static: ${{ matrix.static }}
run_unit_tests: true
run_integration_tests: false
upload_clio_server: true
unit_tests: true
integration_tests: true
clio_server: true
code_coverage:
name: Run Code Coverage
uses: ./.github/workflows/build_impl.yml
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
conan_profile: gcc
build_type: Debug
disable_cache: false
code_coverage: true
static: true
upload_clio_server: false
targets: all
sanitizer: "false"
analyze_build_time: false
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
check_config:
name: Check Config Description
needs: build-and-test
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:latest
test:
name: Run Tests
needs: build
strategy:
fail-fast: false
matrix:
include:
- os: heavy
conan_profile: gcc
build_type: Release
container:
image: rippleci/clio_ci:latest
- os: heavy
conan_profile: clang
build_type: Release
container:
image: rippleci/clio_ci:latest
- os: heavy
conan_profile: clang
build_type: Debug
container:
image: rippleci/clio_ci:latest
- os: macos15
conan_profile: apple_clang_16
build_type: Release
runs-on: ${{ matrix.os }}
container: ${{ matrix.container }}
steps:
- uses: actions/checkout@v4
- name: Clean workdir
if: ${{ runner.os == 'macOS' }}
uses: kuznetsss/workspace-cleanup@1.0
- uses: actions/download-artifact@v4
with:
name: clio_server_Linux_Release_gcc
name: clio_tests_${{ runner.os }}_${{ matrix.build_type }}_${{ matrix.conan_profile }}
- name: Run clio_tests
run: |
chmod +x ./clio_tests
./clio_tests
check_config:
name: Check Config Description
needs: build
runs-on: heavy
container:
image: rippleci/clio_ci:latest
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
name: clio_server_Linux_Release_gcc
- name: Compare Config Description
shell: bash
run: |
repoConfigFile=docs/config-description.md
if ! [ -f "${repoConfigFile}" ]; then
if ! [ -f ${repoConfigFile} ]; then
echo "Config Description markdown file is missing in docs folder"
exit 1
fi
chmod +x ./clio_server
configDescriptionFile=config_description_new.md
./clio_server -d "${configDescriptionFile}"
./clio_server -d ${configDescriptionFile}
configDescriptionHash=$(sha256sum "${configDescriptionFile}" | cut -d' ' -f1)
repoConfigHash=$(sha256sum "${repoConfigFile}" | cut -d' ' -f1)
configDescriptionHash=$(sha256sum ${configDescriptionFile} | cut -d' ' -f1)
repoConfigHash=$(sha256sum ${repoConfigFile} | cut -d' ' -f1)
if [ "${configDescriptionHash}" != "${repoConfigHash}" ]; then
if [ ${configDescriptionHash} != ${repoConfigHash} ]; then
echo "Markdown file is not up to date"
diff -u "${repoConfigFile}" "${configDescriptionFile}"
rm -f "${configDescriptionFile}"
rm -f ${configDescriptionFile}
exit 1
fi
rm -f "${configDescriptionFile}"
rm -f ${configDescriptionFile}
exit 0

View File

@@ -1,92 +0,0 @@
name: Reusable build and test
on:
workflow_call:
inputs:
runs_on:
description: Runner to run the job on
required: true
type: string
container:
description: "The container object as a JSON string (leave empty to run natively)"
required: true
type: string
conan_profile:
description: Conan profile to use
required: true
type: string
build_type:
description: Build type
required: true
type: string
disable_cache:
description: Whether ccache and conan cache should be disabled
required: false
type: boolean
default: false
static:
description: Whether to build static binaries
required: true
type: boolean
default: true
run_unit_tests:
description: Whether to run unit tests
required: true
type: boolean
run_integration_tests:
description: Whether to run integration tests
required: true
type: boolean
default: false
upload_clio_server:
description: Whether to upload clio_server
required: true
type: boolean
targets:
description: Space-separated build target names
required: false
type: string
default: all
sanitizer:
description: Sanitizer to use
required: false
type: string
default: "false"
jobs:
build:
uses: ./.github/workflows/build_impl.yml
with:
runs_on: ${{ inputs.runs_on }}
container: ${{ inputs.container }}
conan_profile: ${{ inputs.conan_profile }}
build_type: ${{ inputs.build_type }}
disable_cache: ${{ inputs.disable_cache }}
code_coverage: false
static: ${{ inputs.static }}
upload_clio_server: ${{ inputs.upload_clio_server }}
targets: ${{ inputs.targets }}
sanitizer: ${{ inputs.sanitizer }}
analyze_build_time: false
test:
needs: build
uses: ./.github/workflows/test_impl.yml
with:
runs_on: ${{ inputs.runs_on }}
container: ${{ inputs.container }}
conan_profile: ${{ inputs.conan_profile }}
build_type: ${{ inputs.build_type }}
run_unit_tests: ${{ inputs.run_unit_tests }}
run_integration_tests: ${{ inputs.run_integration_tests }}
sanitizer: ${{ inputs.sanitizer }}

View File

@@ -1,5 +1,4 @@
name: Build and publish Clio docker image
on:
workflow_call:
inputs:
@@ -42,7 +41,6 @@ jobs:
build_and_publish_image:
name: Build and publish image
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
@@ -57,7 +55,7 @@ jobs:
if: ${{ inputs.clio_server_binary_url != null }}
shell: bash
run: |
wget "${{inputs.clio_server_binary_url}}" -P ./docker/clio/artifact/
wget ${{inputs.clio_server_binary_url}} -P ./docker/clio/artifact/
if [ "$(sha256sum ./docker/clio/clio_server | awk '{print $1}')" != "${{inputs.binary_sha256}}" ]; then
echo "Binary sha256 sum doesn't match"
exit 1
@@ -89,10 +87,7 @@ jobs:
DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
images: |
rippleci/clio
ghcr.io/xrplf/clio
dockerhub_repo: rippleci/clio
image_name: rippleci/clio
push_image: ${{ inputs.publish_image }}
directory: docker/clio
tags: ${{ inputs.tags }}

View File

@@ -1,5 +1,4 @@
name: Reusable build
on:
workflow_call:
inputs:
@@ -7,11 +6,13 @@ on:
description: Runner to run the job on
required: true
type: string
default: heavy
container:
description: "The container object as a JSON string (leave empty to run natively)"
required: true
type: string
default: ""
conan_profile:
description: Conan profile to use
@@ -27,51 +28,60 @@ on:
description: Whether ccache and conan cache should be disabled
required: false
type: boolean
default: false
code_coverage:
description: Whether to enable code coverage
required: true
type: boolean
default: false
static:
description: Whether to build static binaries
required: true
type: boolean
default: true
upload_clio_server:
description: Whether to upload clio_server
unit_tests:
description: Whether to run unit tests
required: true
type: boolean
default: false
targets:
description: Space-separated build target names
integration_tests:
description: Whether to run integration tests
required: true
type: boolean
default: false
clio_server:
description: Whether to build clio_server
required: true
type: boolean
default: true
target:
description: Build target name
required: false
type: string
default: all
sanitizer:
description: Sanitizer to use
required: true
type: string
analyze_build_time:
description: Whether to enable build time analysis
required: true
type: boolean
secrets:
CODECOV_TOKEN:
required: false
type: string
default: 'false'
jobs:
build:
name: Build ${{ inputs.container != '' && 'in container' || 'natively' }}
runs-on: ${{ inputs.runs_on }}
runs-on: ${{ inputs.runs_on }}
container: ${{ inputs.container != '' && fromJson(inputs.container) || null }}
steps:
- name: Clean workdir
if: ${{ runner.os == 'macOS' }}
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
uses: kuznetsss/workspace-cleanup@1.0
- uses: actions/checkout@v4
with:
@@ -84,6 +94,7 @@ jobs:
- name: Setup conan
uses: ./.github/actions/setup_conan
id: conan
with:
conan_profile: ${{ inputs.conan_profile }}
@@ -93,7 +104,7 @@ jobs:
id: restore_cache
with:
conan_dir: ${{ env.CONAN_USER_HOME }}/.conan
conan_profile: ${{ inputs.conan_profile }}
conan_profile: ${{ steps.conan.outputs.conan_profile }}
ccache_dir: ${{ env.CCACHE_DIR }}
build_type: ${{ inputs.build_type }}
code_coverage: ${{ inputs.code_coverage }}
@@ -101,33 +112,17 @@ jobs:
- name: Run conan and cmake
uses: ./.github/actions/generate
with:
conan_profile: ${{ inputs.conan_profile }}
conan_profile: ${{ steps.conan.outputs.conan_profile }}
conan_cache_hit: ${{ !inputs.disable_cache && steps.restore_cache.outputs.conan_cache_hit }}
build_type: ${{ inputs.build_type }}
code_coverage: ${{ inputs.code_coverage }}
static: ${{ inputs.static }}
sanitizer: ${{ inputs.sanitizer }}
time_trace: ${{ inputs.analyze_build_time }}
- name: Build Clio
uses: ./.github/actions/build_clio
with:
targets: ${{ inputs.targets }}
- name: Show build time analyze report
if: ${{ inputs.analyze_build_time }}
run: |
ClangBuildAnalyzer --all build/ build_time_report.bin
ClangBuildAnalyzer --analyze build_time_report.bin > build_time_report.txt
cat build_time_report.txt
shell: bash
- name: Upload build time analyze report
if: ${{ inputs.analyze_build_time }}
uses: actions/upload-artifact@v4
with:
name: build_time_report_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
path: build_time_report.txt
target: ${{ inputs.target }}
- name: Show ccache's statistics
if: ${{ !inputs.disable_cache }}
@@ -140,32 +135,32 @@ jobs:
cat /tmp/ccache.stats
- name: Strip unit_tests
if: inputs.sanitizer == 'false' && !inputs.code_coverage && !inputs.analyze_build_time
if: ${{ inputs.unit_tests && !inputs.code_coverage && inputs.sanitizer == 'false' }}
run: strip build/clio_tests
- name: Strip integration_tests
if: inputs.sanitizer == 'false' && !inputs.code_coverage && !inputs.analyze_build_time
if: ${{ inputs.integration_tests && !inputs.code_coverage }}
run: strip build/clio_integration_tests
- name: Upload clio_server
if: inputs.upload_clio_server && !inputs.code_coverage && !inputs.analyze_build_time
if: ${{ inputs.clio_server }}
uses: actions/upload-artifact@v4
with:
name: clio_server_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
name: clio_server_${{ runner.os }}_${{ inputs.build_type }}_${{ steps.conan.outputs.conan_profile }}
path: build/clio_server
- name: Upload clio_tests
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time }}
if: ${{ inputs.unit_tests && !inputs.code_coverage }}
uses: actions/upload-artifact@v4
with:
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ steps.conan.outputs.conan_profile }}
path: build/clio_tests
- name: Upload clio_integration_tests
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time }}
if: ${{ inputs.integration_tests && !inputs.code_coverage }}
uses: actions/upload-artifact@v4
with:
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ steps.conan.outputs.conan_profile }}
path: build/clio_integration_tests
- name: Save cache
@@ -180,25 +175,16 @@ jobs:
ccache_cache_miss_rate: ${{ steps.ccache_stats.outputs.miss_rate }}
build_type: ${{ inputs.build_type }}
code_coverage: ${{ inputs.code_coverage }}
conan_profile: ${{ inputs.conan_profile }}
conan_profile: ${{ steps.conan.outputs.conan_profile }}
# This is run as part of the build job, because it requires the following:
# - source code
# - generated source code (Build.cpp)
# - conan packages
# - .gcno files in build directory
#
# It's all available in the build job, but not in the test job
# TODO: This is not a part of build process but it is the easiest way to do it here.
# It will be refactored in https://github.com/XRPLF/clio/issues/1075
- name: Run code coverage
if: ${{ inputs.code_coverage }}
uses: ./.github/actions/code_coverage
# `codecov/codecov-action` will rerun `gcov` if it's available and build directory is present
# To prevent this from happening, we run this action in a separate workflow
#
# More info: https://github.com/XRPLF/clio/pull/2066
upload_coverage_report:
if: ${{ inputs.code_coverage }}
if: ${{ inputs.code_coverage }}
name: Codecov
needs: build
uses: ./.github/workflows/upload_coverage_report.yml

View File

@@ -1,28 +1,19 @@
name: Check new libXRPL
on:
repository_dispatch:
types: [check_libxrpl]
concurrency:
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
CONAN_PROFILE: gcc
jobs:
build:
name: Build Clio / `libXRPL ${{ github.event.client_payload.version }}`
runs-on: [self-hosted, heavy]
container:
image: ghcr.io/xrplf/clio-ci:latest
image: rippleci/clio_ci:latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
fetch-depth: 0
- name: Update libXRPL version requirement
shell: bash
@@ -32,17 +23,18 @@ jobs:
- name: Prepare runner
uses: ./.github/actions/prepare_runner
with:
disable_ccache: true
disable_ccache: true
- name: Setup conan
uses: ./.github/actions/setup_conan
id: conan
with:
conan_profile: ${{ env.CONAN_PROFILE }}
conan_profile: gcc
- name: Run conan and cmake
uses: ./.github/actions/generate
with:
conan_profile: ${{ env.CONAN_PROFILE }}
conan_profile: ${{ steps.conan.outputs.conan_profile }}
conan_cache_hit: ${{ steps.restore_cache.outputs.conan_cache_hit }}
build_type: Release
@@ -63,7 +55,7 @@ jobs:
needs: build
runs-on: [self-hosted, heavy]
container:
image: ghcr.io/xrplf/clio-ci:latest
image: rippleci/clio_ci:latest
steps:
- uses: actions/download-artifact@v4
@@ -80,11 +72,9 @@ jobs:
needs: [build, run_tests]
if: ${{ always() && contains(needs.*.result, 'failure') }}
runs-on: ubuntu-latest
permissions:
contents: write
issues: write
steps:
- uses: actions/checkout@v4
@@ -93,8 +83,8 @@ jobs:
env:
GH_TOKEN: ${{ github.token }}
with:
labels: "compatibility,bug"
title: "Proposed libXRPL check failed"
labels: 'compatibility,bug'
title: 'Proposed libXRPL check failed'
body: >
Clio build or tests failed against `libXRPL ${{ github.event.client_payload.version }}`.

View File

@@ -1,5 +1,4 @@
name: Check PR title
on:
pull_request:
types: [opened, edited, reopened, synchronize]
@@ -8,10 +7,12 @@ on:
jobs:
check_title:
runs-on: ubuntu-latest
# permissions:
# pull-requests: write
steps:
- uses: ytanikin/pr-conventional-commits@8267db1bacc237419f9ed0228bb9d94e94271a1d # v1.4.1
- uses: ytanikin/PRConventionalCommits@1.3.0
with:
task_types: '["build","feat","fix","docs","test","ci","style","refactor","perf","chore"]'
add_label: false
custom_labels: '{"build":"build", "feat":"enhancement", "fix":"bug", "docs":"documentation", "test":"testability", "ci":"ci", "style":"refactoring", "refactor":"refactoring", "perf":"performance", "chore":"tooling"}'
# Turned off labelling because it leads to an error, see https://github.com/ytanikin/PRConventionalCommits/issues/19
# custom_labels: '{"build":"build", "feat":"enhancement", "fix":"bug", "docs":"documentation", "test":"testability", "ci":"ci", "style":"refactoring", "refactor":"refactoring", "perf":"performance", "chore":"tooling"}'

View File

@@ -1,5 +1,4 @@
name: Clang-tidy check
on:
schedule:
- cron: "0 9 * * 1-5"
@@ -7,24 +6,15 @@ on:
pull_request:
branches: [develop]
paths:
- .github/workflows/clang-tidy.yml
- .clang_tidy
concurrency:
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
CONAN_PROFILE: clang
- .github/workflows/clang-tidy.yml
workflow_call:
jobs:
clang_tidy:
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:latest
image: rippleci/clio_ci:latest
permissions:
contents: write
issues: write
@@ -42,8 +32,9 @@ jobs:
- name: Setup conan
uses: ./.github/actions/setup_conan
id: conan
with:
conan_profile: ${{ env.CONAN_PROFILE }}
conan_profile: clang
- name: Restore cache
uses: ./.github/actions/restore_cache
@@ -51,12 +42,12 @@ jobs:
with:
conan_dir: ${{ env.CONAN_USER_HOME }}/.conan
ccache_dir: ${{ env.CCACHE_DIR }}
conan_profile: ${{ env.CONAN_PROFILE }}
conan_profile: ${{ steps.conan.outputs.conan_profile }}
- name: Run conan and cmake
uses: ./.github/actions/generate
with:
conan_profile: ${{ env.CONAN_PROFILE }}
conan_profile: ${{ steps.conan.outputs.conan_profile }}
conan_cache_hit: ${{ steps.restore_cache.outputs.conan_cache_hit }}
build_type: Release
@@ -69,14 +60,13 @@ jobs:
shell: bash
id: run_clang_tidy
run: |
run-clang-tidy-19 -p build -j "${{ steps.number_of_threads.outputs.threads_number }}" -fix -quiet 1>output.txt
run-clang-tidy-19 -p build -j ${{ steps.number_of_threads.outputs.threads_number }} -fix -quiet 1>output.txt
- name: Fix local includes and clang-format style
- name: Check format
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
continue-on-error: true
shell: bash
run: |
pre-commit run --all-files fix-local-includes || true
pre-commit run --all-files clang-format || true
run: ./.githooks/check-format
- name: Print issues found
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
@@ -87,20 +77,20 @@ jobs:
rm output.txt
- name: Create an issue
if: ${{ steps.run_clang_tidy.outcome != 'success' && github.event_name != 'pull_request' }}
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
id: create_issue
uses: ./.github/actions/create_issue
env:
GH_TOKEN: ${{ github.token }}
with:
title: "Clang-tidy found bugs in code 🐛"
title: 'Clang-tidy found bugs in code 🐛'
body: >
Clang-tidy found issues in the code:
List of the issues found: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/
- uses: crazy-max/ghaction-import-gpg@e89d40939c28e39f97cf32126055eeae86ba74ec # v6.3.0
if: ${{ steps.run_clang_tidy.outcome != 'success' && github.event_name != 'pull_request' }}
- uses: crazy-max/ghaction-import-gpg@v6
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
with:
gpg_private_key: ${{ secrets.ACTIONS_GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.ACTIONS_GPG_PASSPHRASE }}
@@ -108,8 +98,8 @@ jobs:
git_commit_gpgsign: true
- name: Create PR with fixes
if: ${{ steps.run_clang_tidy.outcome != 'success' && github.event_name != 'pull_request' }}
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
uses: peter-evans/create-pull-request@v7
env:
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ github.token }}
@@ -121,7 +111,7 @@ jobs:
delete-branch: true
title: "style: clang-tidy auto fixes"
body: "Fixes #${{ steps.create_issue.outputs.created_issue_id }}. Please review and commit clang-tidy fixes."
reviewers: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru"
reviewers: "godexsoft,kuznetsss,PeterChen13579"
- name: Fail the job
if: ${{ steps.run_clang_tidy.outcome != 'success' }}

View File

@@ -1,5 +1,4 @@
name: Restart clang-tidy workflow
on:
push:
branches: [develop]
@@ -18,8 +17,8 @@ jobs:
id: check
shell: bash
run: |
passed=$(if [[ "$(git log -1 --pretty=format:%s | grep 'style: clang-tidy auto fixes')" ]]; then echo 'true' ; else echo 'false' ; fi)
echo "passed=\"$passed\"" >> $GITHUB_OUTPUT
passed=$(if [[ $(git log -1 --pretty=format:%s | grep 'style: clang-tidy auto fixes') ]]; then echo 'true' ; else echo 'false' ; fi)
echo "passed=$passed" >> $GITHUB_OUTPUT
- name: Run clang-tidy workflow
if: ${{ contains(steps.check.outputs.passed, 'true') }}

View File

@@ -1,5 +1,4 @@
name: Documentation
on:
push:
branches: [develop]
@@ -11,8 +10,7 @@ permissions:
id-token: write
concurrency:
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
group: ${{ github.workflow }}-${{ github.ref }}
group: "pages"
cancel-in-progress: true
jobs:
@@ -23,19 +21,18 @@ jobs:
runs-on: ubuntu-latest
continue-on-error: true
container:
image: ghcr.io/xrplf/clio-ci:latest
image: rippleci/clio_ci:latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
with:
lfs: true
- name: Build docs
run: |
mkdir -p build_docs && cd build_docs
cmake ../docs && cmake --build . --target docs
- name: Setup Pages
uses: actions/configure-pages@v5
@@ -44,7 +41,7 @@ jobs:
with:
path: build_docs/html
name: docs-develop
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4

View File

@@ -1,123 +1,167 @@
name: Nightly release
on:
schedule:
- cron: "0 8 * * 1-5"
- cron: '0 8 * * 1-5'
workflow_dispatch:
pull_request:
paths:
- .github/workflows/nightly.yml
- .github/workflows/release_impl.yml
- .github/workflows/build_and_test.yml
- .github/workflows/build_impl.yml
- .github/workflows/build_clio_docker_image.yml
- ".github/actions/**"
- "!.github/actions/code_coverage/**"
concurrency:
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
- '.github/workflows/nightly.yml'
- '.github/workflows/build_clio_docker_image.yml'
jobs:
build-and-test:
name: Build and Test
build:
name: Build clio
strategy:
fail-fast: false
matrix:
include:
- os: macos15
conan_profile: default_apple_clang
build_type: Release
static: false
- os: heavy
conan_profile: gcc
build_type: Release
static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
container: '{ "image": "rippleci/clio_ci:latest" }'
- os: heavy
conan_profile: gcc
build_type: Debug
static: true
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
uses: ./.github/workflows/build_and_test.yml
with:
runs_on: ${{ matrix.os }}
container: ${{ matrix.container }}
conan_profile: ${{ matrix.conan_profile }}
build_type: ${{ matrix.build_type }}
static: ${{ matrix.static }}
run_unit_tests: true
run_integration_tests: true
upload_clio_server: true
disable_cache: true
analyze_build_time:
name: Analyze Build Time
strategy:
fail-fast: false
matrix:
include:
# TODO: Enable when we have at least ubuntu 22.04
# as ClangBuildAnalyzer requires relatively modern glibc
#
# - os: heavy
# conan_profile: clang
# container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
# static: true
- os: macos15
conan_profile: default_apple_clang
container: ""
static: false
container: '{ "image": "rippleci/clio_ci:latest" }'
uses: ./.github/workflows/build_impl.yml
with:
runs_on: ${{ matrix.os }}
container: ${{ matrix.container }}
conan_profile: ${{ matrix.conan_profile }}
build_type: Release
disable_cache: true
conan_profile: gcc
build_type: ${{ matrix.build_type }}
code_coverage: false
static: ${{ matrix.static }}
upload_clio_server: false
targets: all
sanitizer: "false"
analyze_build_time: true
unit_tests: true
integration_tests: true
clio_server: true
disable_cache: true
run_tests:
needs: build
strategy:
fail-fast: false
matrix:
include:
- os: macos15
conan_profile: apple_clang_16
build_type: Release
integration_tests: false
- os: heavy
conan_profile: gcc
build_type: Release
container:
image: rippleci/clio_ci:latest
integration_tests: true
- os: heavy
conan_profile: gcc
build_type: Debug
container:
image: rippleci/clio_ci:latest
integration_tests: true
runs-on: [self-hosted, "${{ matrix.os }}"]
container: ${{ matrix.container }}
services:
scylladb:
image: ${{ (matrix.integration_tests) && 'scylladb/scylla' || '' }}
options: >-
--health-cmd "cqlsh -e 'describe cluster'"
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Clean workdir
if: ${{ runner.os == 'macOS' }}
uses: kuznetsss/workspace-cleanup@1.0
- uses: actions/download-artifact@v4
with:
name: clio_tests_${{ runner.os }}_${{ matrix.build_type }}_${{ matrix.conan_profile }}
- name: Run clio_tests
run: |
chmod +x ./clio_tests
./clio_tests
- uses: actions/download-artifact@v4
with:
name: clio_integration_tests_${{ runner.os }}_${{ matrix.build_type }}_${{ matrix.conan_profile }}
# To be enabled back once docker in mac runner arrives
# https://github.com/XRPLF/clio/issues/1400
- name: Run clio_integration_tests
if: matrix.integration_tests
run: |
chmod +x ./clio_integration_tests
./clio_integration_tests --backend_host=scylladb
nightly_release:
needs: build-and-test
uses: ./.github/workflows/release_impl.yml
with:
overwrite_release: true
title: "Clio development (nightly) build"
version: nightly
notes_header_file: nightly_notes.md
if: ${{ github.event_name != 'pull_request' }}
needs: run_tests
runs-on: ubuntu-latest
env:
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ github.token }}
permissions:
contents: write
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
path: nightly_release
pattern: clio_server_*
- name: Prepare files
shell: bash
run: |
cp ${{ github.workspace }}/.github/workflows/nightly_notes.md "${RUNNER_TEMP}/nightly_notes.md"
cd nightly_release
for d in $(ls); do
archive_name=$(ls $d)
mv ${d}/${archive_name} ./
rm -r $d
sha256sum ./$archive_name > ./${archive_name}.sha256sum
cat ./$archive_name.sha256sum >> "${RUNNER_TEMP}/nightly_notes.md"
done
echo '```' >> "${RUNNER_TEMP}/nightly_notes.md"
- name: Remove current nightly release and nightly tag
shell: bash
run: |
gh release delete nightly --yes || true
git push origin :nightly || true
- name: Publish nightly release
shell: bash
run: |
gh release create nightly --prerelease --title "Clio development (nightly) build" \
--target $GITHUB_SHA --notes-file "${RUNNER_TEMP}/nightly_notes.md" \
./nightly_release/clio_server*
build_and_publish_docker_image:
uses: ./.github/workflows/build_clio_docker_image.yml
needs: build-and-test
needs: run_tests
secrets: inherit
with:
tags: |
type=raw,value=nightly
type=raw,value=${{ github.sha }}
type=raw,value=nightly
type=raw,value=${{ github.sha }}
artifact_name: clio_server_Linux_Release_gcc
strip_binary: true
publish_image: ${{ github.event_name != 'pull_request' }}
create_issue_on_failure:
needs: [build-and-test, nightly_release, build_and_publish_docker_image]
needs: [build, run_tests, nightly_release, build_and_publish_docker_image]
if: ${{ always() && contains(needs.*.result, 'failure') && github.event_name != 'pull_request' }}
runs-on: ubuntu-latest
permissions:
contents: write
issues: write
steps:
- uses: actions/checkout@v4
@@ -126,7 +170,7 @@ jobs:
env:
GH_TOKEN: ${{ github.token }}
with:
title: "Nightly release failed 🌙"
title: 'Nightly release failed 🌙'
body: >
Nightly release failed:

View File

@@ -1,7 +1,6 @@
# Release notes
> **Note:** Please remember that this is a development release and it is not recommended for production use.
Changelog (including previous releases): <https://github.com/XRPLF/clio/commits/nightly>
Changelog (including previous releases): https://github.com/XRPLF/clio/commits/nightly
## SHA256 checksums
```

View File

@@ -1,39 +0,0 @@
name: Pre-commit auto-update
on:
# every first day of the month
schedule:
- cron: "0 0 1 * *"
# on demand
workflow_dispatch:
jobs:
auto-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: 3.x
- run: pip install pre-commit
- run: pre-commit autoupdate --freeze
- run: pre-commit run --all-files || true
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
if: always()
env:
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ github.token }}
with:
branch: update/pre-commit-hooks
title: Update pre-commit hooks
commit-message: "style: update pre-commit hooks"
body: Update versions of pre-commit hooks to latest version.
reviewers: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru"

View File

@@ -1,28 +0,0 @@
name: Run pre-commit hooks
on:
pull_request:
push:
branches:
- develop
workflow_dispatch:
jobs:
run-hooks:
runs-on: heavy
container:
image: ghcr.io/xrplf/clio-ci:latest
steps:
- name: Checkout Repo ⚡️
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Prepare runner
uses: ./.github/actions/prepare_runner
with:
disable_ccache: true
- name: Run pre-commit ✅
run: pre-commit run --all-files

View File

@@ -1,78 +0,0 @@
name: Make release
on:
workflow_call:
inputs:
overwrite_release:
description: "Overwrite the current release and tag"
required: true
type: boolean
title:
description: "Release title"
required: true
type: string
version:
description: "Release version"
required: true
type: string
notes_header_file:
description: "Release notes header file"
required: true
type: string
jobs:
release:
runs-on: ubuntu-latest
env:
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ github.token }}
permissions:
contents: write
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
path: release_artifacts
pattern: clio_server_*
- name: Prepare files
shell: bash
working-directory: release_artifacts
run: |
cp ${{ github.workspace }}/.github/workflows/${{ inputs.notes_header_file }} "${RUNNER_TEMP}/release_notes.md"
echo '' >> "${RUNNER_TEMP}/release_notes.md"
echo '```' >> "${RUNNER_TEMP}/release_notes.md"
for d in $(ls); do
archive_name=$(ls $d)
mv ${d}/${archive_name} ./
rm -r $d
sha256sum ./$archive_name > ./${archive_name}.sha256sum
cat ./$archive_name.sha256sum >> "${RUNNER_TEMP}/release_notes.md"
done
echo '```' >> "${RUNNER_TEMP}/release_notes.md"
- name: Remove current release and tag
if: ${{ github.event_name != 'pull_request' && inputs.overwrite_release }}
shell: bash
run: |
gh release delete ${{ inputs.version }} --yes || true
git push origin :${{ inputs.version }} || true
- name: Publish release
if: ${{ github.event_name != 'pull_request' }}
shell: bash
run: |
gh release create ${{ inputs.version }} \
${{ inputs.overwrite_release && '--prerelease' || '' }} \
--title "${{ inputs.title }}" \
--target $GITHUB_SHA \
--notes-file "${RUNNER_TEMP}/release_notes.md" \
./release_artifacts/clio_server*

View File

@@ -1,37 +1,15 @@
name: Run tests with sanitizers
on:
schedule:
- cron: "0 4 * * 1-5"
workflow_dispatch:
pull_request:
paths:
- .github/workflows/sanitizers.yml
- .github/workflows/build_and_test.yml
- .github/workflows/build_impl.yml
- .github/workflows/test_impl.yml
- ".github/actions/**"
- "!.github/actions/build_docker_image/**"
- "!.github/actions/create_issue/**"
- .github/scripts/execute-tests-under-sanitizer
- CMakeLists.txt
- "cmake/**"
# We don't run sanitizer on code change, because it takes too long
# - "src/**"
# - "tests/**"
concurrency:
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
- '.github/workflows/sanitizers.yml'
jobs:
build-and-test:
name: Build and Test
build:
name: Build clio tests
strategy:
fail-fast: false
matrix:
@@ -40,19 +18,89 @@ jobs:
compiler: gcc
- sanitizer: asan
compiler: gcc
- sanitizer: ubsan
compiler: gcc
uses: ./.github/workflows/build_and_test.yml
# - sanitizer: ubsan # todo: enable when heavy runners are available
# compiler: gcc
uses: ./.github/workflows/build_impl.yml
with:
runs_on: heavy
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
runs_on: ubuntu-latest # todo: change to heavy
container: '{ "image": "rippleci/clio_ci:latest" }'
disable_cache: true
conan_profile: ${{ matrix.compiler }}.${{ matrix.sanitizer }}
build_type: Release
code_coverage: false
static: false
run_unit_tests: true
run_integration_tests: false
upload_clio_server: false
targets: clio_tests clio_integration_tests
unit_tests: true
integration_tests: false
clio_server: false
target: clio_tests
sanitizer: ${{ matrix.sanitizer }}
# consider combining this with the previous matrix instead
run_tests:
needs: build
strategy:
fail-fast: false
matrix:
include:
- sanitizer: tsan
compiler: gcc
- sanitizer: asan
compiler: gcc
# - sanitizer: ubsan # todo: enable when heavy runners are available
# compiler: gcc
runs-on: ubuntu-latest # todo: change to heavy
container:
image: rippleci/clio_ci:latest
permissions:
contents: write
issues: write
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/download-artifact@v4
with:
name: clio_tests_${{ runner.os }}_Release_${{ matrix.compiler }}.${{ matrix.sanitizer }}
- name: Run clio_tests [${{ matrix.compiler }} / ${{ matrix.sanitizer }}]
shell: bash
run: |
chmod +x ./clio_tests
./.github/scripts/execute-tests-under-sanitizer ./clio_tests
- name: Check for sanitizer report
shell: bash
id: check_report
run: |
if ls .sanitizer-report/* 1> /dev/null 2>&1; then
echo "found_report=true" >> $GITHUB_OUTPUT
else
echo "found_report=false" >> $GITHUB_OUTPUT
fi
- name: Upload report
if: ${{ steps.check_report.outputs.found_report == 'true' }}
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.compiler }}_${{ matrix.sanitizer }}_report
path: .sanitizer-report/*
include-hidden-files: true
#
# todo: enable when we have fixed all currently existing issues from sanitizers
#
# - name: Create an issue
# if: ${{ steps.check_report.outputs.found_report == 'true' }}
# uses: ./.github/actions/create_issue
# env:
# GH_TOKEN: ${{ github.token }}
# with:
# labels: 'bug'
# title: '[${{ matrix.sanitizer }}/${{ matrix.compiler }}] reported issues'
# body: >
# Clio tests failed one or more sanitizer checks when built with ${{ matrix.compiler }}`.
# Workflow: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/
# Reports are available as artifacts.

View File

@@ -1,165 +0,0 @@
name: Reusable test
on:
workflow_call:
inputs:
runs_on:
description: Runner to run the job on
required: true
type: string
container:
description: "The container object as a JSON string (leave empty to run natively)"
required: true
type: string
conan_profile:
description: Conan profile to use
required: true
type: string
build_type:
description: Build type
required: true
type: string
run_unit_tests:
description: Whether to run unit tests
required: true
type: boolean
run_integration_tests:
description: Whether to run integration tests
required: true
type: boolean
sanitizer:
description: Sanitizer to use
required: true
type: string
jobs:
unit_tests:
name: Unit testing ${{ inputs.container != '' && 'in container' || 'natively' }}
runs-on: ${{ inputs.runs_on }}
container: ${{ inputs.container != '' && fromJson(inputs.container) || null }}
if: inputs.run_unit_tests
env:
# TODO: remove when we have fixed all currently existing issues from sanitizers
SANITIZER_IGNORE_ERRORS: ${{ inputs.sanitizer != 'false' && inputs.sanitizer != 'ubsan' }}
steps:
- name: Clean workdir
if: ${{ runner.os == 'macOS' }}
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/download-artifact@v4
with:
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
- name: Make clio_tests executable
shell: bash
run: chmod +x ./clio_tests
- name: Run clio_tests (regular)
if: env.SANITIZER_IGNORE_ERRORS == 'false'
run: ./clio_tests
- name: Run clio_tests (sanitizer errors ignored)
if: env.SANITIZER_IGNORE_ERRORS == 'true'
run: ./.github/scripts/execute-tests-under-sanitizer ./clio_tests
- name: Check for sanitizer report
if: env.SANITIZER_IGNORE_ERRORS == 'true'
shell: bash
id: check_report
run: |
if ls .sanitizer-report/* 1> /dev/null 2>&1; then
echo "found_report=true" >> $GITHUB_OUTPUT
else
echo "found_report=false" >> $GITHUB_OUTPUT
fi
- name: Upload sanitizer report
if: env.SANITIZER_IGNORE_ERRORS == 'true' && steps.check_report.outputs.found_report == 'true'
uses: actions/upload-artifact@v4
with:
name: ${{ inputs.conan_profile }}_report
path: .sanitizer-report/*
include-hidden-files: true
- name: Create an issue
if: false && env.SANITIZER_IGNORE_ERRORS == 'true' && steps.check_report.outputs.found_report == 'true'
uses: ./.github/actions/create_issue
env:
GH_TOKEN: ${{ github.token }}
with:
labels: "bug"
title: "[${{ inputs.conan_profile }}] reported issues"
body: >
Clio tests failed one or more sanitizer checks when built with ${{ inputs.conan_profile }}`.
Workflow: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/
Reports are available as artifacts.
integration_tests:
name: Integration testing ${{ inputs.container != '' && 'in container' || 'natively' }}
runs-on: ${{ inputs.runs_on }}
container: ${{ inputs.container != '' && fromJson(inputs.container) || null }}
if: inputs.run_integration_tests
services:
scylladb:
image: ${{ inputs.container != '' && 'scylladb/scylla' || '' }}
options: >-
--health-cmd "cqlsh -e 'describe cluster'"
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Clean workdir
if: ${{ runner.os == 'macOS' }}
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
- name: Spin up scylladb
if: ${{ runner.os == 'macOS' }}
timeout-minutes: 3
run: |
docker rm --force scylladb || true
docker run \
--detach \
--name scylladb \
--health-cmd "cqlsh -e 'describe cluster'" \
--health-interval 10s \
--health-timeout 5s \
--health-retries 5 \
--publish 9042:9042 \
--memory 16G \
scylladb/scylla
until [ "$(docker inspect -f '{{.State.Health.Status}}' scylladb)" == "healthy" ]; do
sleep 5
done
- uses: actions/download-artifact@v4
with:
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
- name: Run clio_integration_tests
run: |
chmod +x ./clio_integration_tests
./clio_integration_tests ${{ runner.os != 'macOS' && '--backend_host=scylladb' || '' }}
- name: Show docker logs and stop scylladb
if: ${{ always() && runner.os == 'macOS' }}
run: |
docker logs scylladb
docker rm --force scylladb || true

View File

@@ -1,37 +1,22 @@
name: Update CI docker image
on:
pull_request:
paths:
- 'docker/ci/**'
- 'docker/compilers/**'
- .github/workflows/update_docker_ci.yml
- ".github/actions/build_docker_image/**"
- "docker/ci/**"
- "docker/compilers/**"
push:
branches: [develop]
paths:
- 'docker/ci/**' # CI image must update when either its dockerfile changes
- 'docker/compilers/**' # or any compilers changed and were pushed by hand
- .github/workflows/update_docker_ci.yml
- ".github/actions/build_docker_image/**"
# CI image must update when either its Dockerfile changes
# or any compilers changed and were pushed by hand
- "docker/ci/**"
- "docker/compilers/**"
workflow_dispatch:
concurrency:
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build_and_push:
name: Build and push docker image
runs-on: [self-hosted, heavy]
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/build_docker_image
@@ -40,10 +25,7 @@ jobs:
DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
images: |
rippleci/clio_ci
ghcr.io/xrplf/clio-ci
dockerhub_repo: rippleci/clio_ci
image_name: rippleci/clio_ci
push_image: ${{ github.event_name != 'pull_request' }}
directory: docker/ci
tags: |

View File

@@ -1,5 +1,4 @@
name: Upload report
on:
workflow_dispatch:
workflow_call:
@@ -11,7 +10,6 @@ jobs:
upload_report:
name: Upload report
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
@@ -25,9 +23,13 @@ jobs:
- name: Upload coverage report
if: ${{ hashFiles('build/coverage_report.xml') != '' }}
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
uses: wandalen/wretry.action@v3.7.3
with:
files: build/coverage_report.xml
fail_ci_if_error: true
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}
action: codecov/codecov-action@v4
with: |
files: build/coverage_report.xml
fail_ci_if_error: false
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}
attempt_limit: 5
attempt_delay: 10000

View File

@@ -1,8 +0,0 @@
---
ignored:
- DL3003
- DL3008
- DL3013
- DL3015
- DL3027
- DL3047

View File

@@ -1,6 +0,0 @@
# Default state for all rules
default: true
# MD013/line-length - Line length
MD013:
line_length: 1000

View File

@@ -1,109 +0,0 @@
---
# pre-commit is a tool to perform a predefined set of tasks manually and/or
# automatically before git commits are made.
#
# Config reference: https://pre-commit.com/#pre-commit-configyaml---top-level
#
# Common tasks
#
# - Run on all files: pre-commit run --all-files
# - Register git hooks: pre-commit install --hook-type pre-commit --hook-type pre-push
#
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
# `pre-commit sample-config` default hooks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: cef0300fd0fc4d2a87a85fa2093c6b283ea36f4b # frozen: v5.0.0
hooks:
- id: check-added-large-files
- id: check-executables-have-shebangs
- id: check-shebang-scripts-are-executable
- id: end-of-file-fixer
exclude: ^docs/doxygen-awesome-theme/
- id: trailing-whitespace
exclude: ^docs/doxygen-awesome-theme/
# Autoformat: YAML, JSON, Markdown, etc.
- repo: https://github.com/rbubley/mirrors-prettier
rev: 787fb9f542b140ba0b2aced38e6a3e68021647a3 # frozen: v3.5.3
hooks:
- id: prettier
exclude: ^docs/doxygen-awesome-theme/
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: 586c3ea3f51230da42bab657c6a32e9e66c364f0 # frozen: v0.44.0
hooks:
- id: markdownlint-fix
exclude: LICENSE.md
- repo: https://github.com/hadolint/hadolint
rev: c3dc18df7a501f02a560a2cc7ba3c69a85ca01d3 # frozen: v2.13.1-beta
hooks:
- id: hadolint-docker
# hadolint-docker is a special hook that runs hadolint in a Docker container
# Docker is not installed in the environment where pre-commit is run
stages: [manual]
entry: hadolint/hadolint:v2.12.1-beta hadolint
- repo: https://github.com/codespell-project/codespell
rev: 63c8f8312b7559622c0d82815639671ae42132ac # frozen: v2.4.1
hooks:
- id: codespell
args:
[
--write-changes,
--ignore-words=pre-commit-hooks/codespell_ignore.txt,
]
# Running fix-local-includes before clang-format
# to ensure that the include order is correct.
- repo: local
hooks:
- id: fix-local-includes
name: Fix Local Includes
entry: pre-commit-hooks/fix-local-includes.sh
types: [c++]
language: script
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: f9a52e87b6cdcb01b0a62b8611d9ba9f2dad0067 # frozen: v19.1.7
hooks:
- id: clang-format
args: [--style=file]
types: [c++]
- repo: https://github.com/cheshirekow/cmake-format-precommit
rev: e2c2116d86a80e72e7146a06e68b7c228afc6319 # frozen: v0.6.13
hooks:
- id: cmake-format
additional_dependencies: [PyYAML]
- repo: local
hooks:
- id: check-no-h-files
name: No .h files
entry: There should be no .h files in this repository
language: fail
files: \.h$
- repo: local
hooks:
- id: gofmt
name: Go Format
entry: pre-commit-hooks/run-go-fmt.sh
types: [go]
language: golang
description: "Runs `gofmt`, requires golang"
- id: check-docs
name: Check Doxygen Documentation
entry: pre-commit-hooks/check-doxygen-docs.sh
types: [text]
language: script
pass_filenames: false
- id: verify-commits
name: Verify Commits
entry: pre-commit-hooks/verify-commits.sh
always_run: true
stages: [pre-push]
language: script
pass_filenames: false

View File

@@ -17,7 +17,6 @@ option(packaging "Create distribution packages" FALSE)
option(lint "Run clang-tidy checks during compilation" FALSE)
option(static "Statically linked Clio" FALSE)
option(snapshot "Build snapshot tool" FALSE)
option(time_trace "Build using -ftime-trace to create compiler trace reports" FALSE)
# ========================================================================== #
set(san "" CACHE STRING "Add sanitizer instrumentation")

View File

@@ -1,57 +1,39 @@
# Contributing
Thank you for your interest in contributing to the `clio` project 🙏
## Workflow
To contribute, please:
1. Fork the repository under your own user.
2. Create a new branch on which to commit/push your changes.
3. Write and test your code.
4. Ensure that your code compiles with the provided build engine and update the provided build engine as part of your PR where needed and where appropriate.
5. Where applicable, write test cases for your code and include those in the relevant subfolder under `tests`.
6. Ensure your code passes [automated checks](#pre-commit-hooks)
6. Ensure your code passes automated checks (e.g. clang-format)
7. Squash your commits (i.e. rebase) into as few commits as is reasonable to describe your changes at a high level (typically a single commit for a small change). See below for more details.
8. Open a PR to the main repository onto the _develop_ branch, and follow the provided template.
> **Note:** Please read the [Style guide](#style-guide).
### `git lfs` hooks
## Install git hooks
Please run the following command in order to use git hooks that are helpful for `clio` development.
Install `git lfs` hooks using the following command:
```bash
git lfs install
``` bash
git config --local core.hooksPath .githooks
```
> **Note:** You need to install Git LFS hooks before installing `pre-commit` hooks.
### `pre-commit` hooks
To ensure code quality and style, we use [`pre-commit`](https://pre-commit.com/).
Run the following command to enable `pre-commit` hooks that help with Clio development:
```bash
pip3 install pre-commit
pre-commit install --hook-type pre-commit --hook-type pre-push
```
`pre-commit` takes care of running each tool in [`.pre-commit-config.yaml`](https://github.com/XRPLF/clio/blob/develop/.pre-commit-config.yaml) in a separate environment.
`pre-commit` also attempts to automatically use Doxygen to verify that everything public in the codebase has doc comments.
If Doxygen is not installed, the hook issues a warning and recommends installing Doxygen for future commits.
### Git commands
## Git hooks dependencies
The pre-commit hook requires `clang-format >= 19.0.0` and `cmake-format` to be installed on your machine.
`clang-format` can be installed using `brew` on macOS and default package manager on Linux.
`cmake-format` can be installed using `pip`.
The hook will also attempt to automatically use `doxygen` to verify that everything public in the codebase is covered by doc comments. If `doxygen` is not installed, the hook will raise a warning suggesting to install `doxygen` for future commits.
## Git commands
This sections offers a detailed look at the git commands you will need to use to get your PR submitted.
Please note that there are more than one way to do this and these commands are provided for your convenience.
At this point it's assumed that you have already finished working on your feature/bug.
> **Important:** Before you issue any of the commands below, please hit the `Sync fork` button and make sure your fork's `develop` branch is up-to-date with the main `clio` repository.
```bash
``` bash
# Create a backup of your branch
git branch <your feature branch>_bk
@@ -61,20 +43,18 @@ git pull origin develop
git checkout <your feature branch>
git rebase -i develop
```
For each commit in the list other than the first one, enter `s` to squash.
After this is done, you will have the opportunity to write a message for the squashed commit.
> **Hint:** Please use **imperative mood** in the commit message, and capitalize the first word.
```bash
``` bash
# You should now have a single commit on top of a commit in `develop`
git log
```
> **Note:** If there are merge conflicts, please resolve them now.
```bash
``` bash
# Use the same commit message as you did above
git commit -m 'Your message'
git rebase --continue
@@ -82,30 +62,27 @@ git rebase --continue
> **Important:** If you have no GPG keys set up, please follow [this tutorial](https://docs.github.com/en/authentication/managing-commit-signature-verification/adding-a-gpg-key-to-your-github-account)
```bash
``` bash
# Sign the commit with your GPG key, and push your changes
git commit --amend -S
git push --force
```
### Use ccache (optional)
## Use ccache (optional)
Clio uses `ccache` to speed up compilation. If you want to use it, please make sure it is installed on your machine.
CMake will automatically detect it and use it if it is available.
### Opening a pull request
## Opening a pull request
When a pull request is open CI will perform checks on the new code.
Title of the pull request and squashed commit should follow [conventional commits specification](https://www.conventionalcommits.org/en/v1.0.0/).
### Fixing issues found during code review
## Fixing issues found during code review
While your code is in review, it's possible that some changes will be requested by reviewer(s).
This section describes the process of adding your fixes.
We assume that you already made the required changes on your feature branch.
```bash
``` bash
# Add the changed code
git add <paths to add>
@@ -117,72 +94,63 @@ git commit -S -m "[FOLD] Your commit message"
git push
```
### After code review
## After code review
When your PR is approved and ready to merge, use `Squash and merge`.
The button for that is near the bottom of the PR's page on GitHub.
> **Important:** Please leave the automatically-generated mention/link to the PR in the subject line **and** in the description field add `"Fix #ISSUE_ID"` (replacing `ISSUE_ID` with yours) if the PR fixes an issue.
> **Note:** See [issues](https://github.com/XRPLF/clio/issues) to find the `ISSUE_ID` for the feature/bug you were working on.
## Style guide
# Style guide
This is a non-exhaustive list of recommended style guidelines. These are not always strictly enforced and serve as a way to keep the codebase coherent.
### Formatting
Code must conform to `clang-format`, unless the result is unreasonably difficult to read or maintain.
In most cases the `pre-commit` hook takes care of formatting and fixes any issues automatically.
To manually format your code, run `pre-commit run clang-format --files <your changed files>` for C++ files, and `pre-commit run cmake-format --files <your changed files>` for CMake files.
### Documentation
## Formatting
Code must conform to `clang-format` version 19, unless the result would be unreasonably difficult to read or maintain.
In most cases the pre-commit hook will take care of formatting and will fix any issues automatically.
To manually format your code, use `clang-format -i <your changed files>` for C++ files and `cmake-format -i <your changed files>` for CMake files.
## Documentation
All public namespaces, classes and functions must be covered by doc (`doxygen`) comments. Everything that is not within a nested `impl` namespace is considered public.
> **Note:** Keep in mind that this is enforced by Clio's CI and your build will fail if newly added public code lacks documentation.
### Avoid
## Avoid
* Proliferation of nearly identical code.
* Proliferation of new files and classes unless it improves readability or/and compilation time.
* Unmanaged memory allocation and raw pointers.
* Macros (unless they add significant value.)
* Lambda patterns (unless these add significant value.)
* CPU or architecture-specific code unless there is a good reason to include it, and where it is used guard it with macros and provide explanatory comments.
* Importing new libraries unless there is a very good reason to do so.
- Proliferation of nearly identical code.
- Proliferation of new files and classes unless it improves readability or/and compilation time.
- Unmanaged memory allocation and raw pointers.
- Macros (unless they add significant value.)
- Lambda patterns (unless these add significant value.)
- CPU or architecture-specific code unless there is a good reason to include it, and where it is used guard it with macros and provide explanatory comments.
- Importing new libraries unless there is a very good reason to do so.
### Seek to
- Extend functionality of existing code rather than creating new code.
- Prefer readability over terseness where important logic is concerned.
- Inline functions that are not used or are not likely to be used elsewhere in the codebase.
- Use clear and self-explanatory names for functions, variables, structs and classes.
- Use TitleCase for classes, structs and filenames, camelCase for function and variable names, lower case for namespaces and folders.
- Provide as many comments as you feel that a competent programmer would need to understand what your code does.
## Maintainers
## Seek to
* Extend functionality of existing code rather than creating new code.
* Prefer readability over terseness where important logic is concerned.
* Inline functions that are not used or are not likely to be used elsewhere in the codebase.
* Use clear and self-explanatory names for functions, variables, structs and classes.
* Use TitleCase for classes, structs and filenames, camelCase for function and variable names, lower case for namespaces and folders.
* Provide as many comments as you feel that a competent programmer would need to understand what your code does.
# Maintainers
Maintainers are ecosystem participants with elevated access to the repository. They are able to push new code, make decisions on when a release should be made, etc.
### Code Review
## Code Review
A PR must be reviewed and approved by at least one of the maintainers before it can be merged.
### Adding and Removing
## Adding and Removing
New maintainers can be proposed by two existing maintainers, subject to a vote by a quorum of the existing maintainers. A minimum of 50% support and a 50% participation is required. In the event of a tie vote, the addition of the new maintainer will be rejected.
Existing maintainers can resign, or be subject to a vote for removal at the behest of two existing maintainers. A minimum of 60% agreement and 50% participation are required. The XRP Ledger Foundation will have the ability, for cause, to remove an existing maintainer without a vote.
### Existing Maintainers
## Existing Maintainers
- [godexsoft](https://github.com/godexsoft) (Ripple)
- [kuznetsss](https://github.com/kuznetsss) (Ripple)
- [legleux](https://github.com/legleux) (Ripple)
- [PeterChen13579](https://github.com/PeterChen13579) (Ripple)
* [godexsoft](https://github.com/godexsoft) (Ripple)
* [kuznetsss](https://github.com/kuznetsss) (Ripple)
* [legleux](https://github.com/legleux) (Ripple)
* [PeterChen13579](https://github.com/PeterChen13579) (Ripple)
### Honorable ex-Maintainers
## Honorable ex-Maintainers
- [cindyyan317](https://github.com/cindyyan317) (ex-Ripple)
- [cjcobb23](https://github.com/cjcobb23) (ex-Ripple)
- [natenichols](https://github.com/natenichols) (ex-Ripple)
* [cindyyan317](https://github.com/cindyyan317) (ex-Ripple)
* [cjcobb23](https://github.com/cjcobb23) (ex-Ripple)
* [natenichols](https://github.com/natenichols) (ex-Ripple)

View File

@@ -1,7 +1,8 @@
ISC License
Copyright (c) 2022, the clio developers
Copyright (c) 2022, the clio developers
Permission to use, copy, modify, and distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@@ -1,4 +1,4 @@
# <img src='./docs/img/xrpl-logo.svg' width='40' valign="top" /> Clio <!-- markdownlint-disable-line MD033 MD045 -->
# <img src='./docs/img/xrpl-logo.svg' width='40' valign="top" /> Clio
[![Build status](https://github.com/XRPLF/clio/actions/workflows/build.yml/badge.svg?branch=develop)](https://github.com/XRPLF/clio/actions/workflows/build.yml?query=branch%3Adevelop)
[![Nightly release status](https://github.com/XRPLF/clio/actions/workflows/nightly.yml/badge.svg?branch=develop)](https://github.com/XRPLF/clio/actions/workflows/nightly.yml?query=branch%3Adevelop)
@@ -16,9 +16,9 @@ Multiple Clio nodes can share access to the same dataset, which allows for a hig
Clio offers the full `rippled` API, with the caveat that Clio by default only returns validated data. This means that `ledger_index` defaults to `validated` instead of `current` for all requests. Other non-validated data, such as information about queued transactions, is also not returned.
Clio retrieves data from a designated group of `rippled` nodes instead of connecting to the peer-to-peer network.
For requests that require access to the peer-to-peer network, such as `fee` or `submit`, Clio automatically forwards the request to a `rippled` node and propagates the response back to the client. To access non-validated data for _any_ request, simply add `ledger_index: "current"` to the request, and Clio will forward the request to `rippled`.
For requests that require access to the peer-to-peer network, such as `fee` or `submit`, Clio automatically forwards the request to a `rippled` node and propagates the response back to the client. To access non-validated data for *any* request, simply add `ledger_index: "current"` to the request, and Clio will forward the request to `rippled`.
> [!NOTE]
> [!NOTE]
> Clio requires access to at least one `rippled` node, which can run on the same machine as Clio or separately.
## 📚 Learn more about Clio

View File

@@ -26,7 +26,7 @@ set(COMPILER_FLAGS
# TODO: Address these and others in https://github.com/XRPLF/clio/issues/1273
)
# TODO: re-enable when we change CI #884 if (is_gcc AND NOT lint) list(APPEND COMPILER_FLAGS -Wduplicated-branches
# TODO: reenable when we change CI #884 if (is_gcc AND NOT lint) list(APPEND COMPILER_FLAGS -Wduplicated-branches
# -Wduplicated-cond -Wlogical-op -Wuseless-cast ) endif ()
if (is_clang)
@@ -70,12 +70,4 @@ endif ()
# See https://github.com/cpp-best-practices/cppbestpractices/blob/master/02-Use_the_Tools_Available.md#gcc--clang for
# the flags description
if (time_trace)
if (is_clang OR is_appleclang)
list(APPEND COMPILER_FLAGS -ftime-trace)
else ()
message(FATAL_ERROR "Clang or AppleClang is required to use `-ftime-trace`")
endif ()
endif ()
target_compile_options(clio_options INTERFACE ${COMPILER_FLAGS})

View File

@@ -1,7 +1,6 @@
from conan import ConanFile
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
class Clio(ConanFile):
name = 'clio'
license = 'ISC'
@@ -21,7 +20,6 @@ class Clio(ConanFile):
'coverage': [True, False], # build for test coverage report; create custom target `clio_tests-ccov`
'lint': [True, False], # run clang-tidy checks during compilation
'snapshot': [True, False], # build export/import snapshot tool
'time_trace': [True, False] # build using -ftime-trace to create compiler trace reports
}
requires = [
@@ -31,7 +29,7 @@ class Clio(ConanFile):
'protobuf/3.21.9',
'grpc/1.50.1',
'openssl/1.1.1v',
'xrpl/2.5.0-b1',
'xrpl/2.4.0',
'zlib/1.3.1',
'libbacktrace/cci.20210118'
]
@@ -48,8 +46,7 @@ class Clio(ConanFile):
'lint': False,
'docs': False,
'snapshot': False,
'time_trace': False,
'xrpl/*:tests': False,
'xrpl/*:rocksdb': False,
'cassandra-cpp-driver/*:shared': False,
@@ -81,12 +78,11 @@ class Clio(ConanFile):
def layout(self):
cmake_layout(self)
# Fix this setting to follow the default introduced in Conan 1.48
# Fix this setting to follow the default introduced in Conan 1.48
# to align with our build instructions.
self.folders.generators = 'build/generators'
generators = 'CMakeDeps'
def generate(self):
tc = CMakeToolchain(self)
tc.variables['verbose'] = self.options.verbose
@@ -99,7 +95,6 @@ class Clio(ConanFile):
tc.variables['packaging'] = self.options.packaging
tc.variables['benchmark'] = self.options.benchmark
tc.variables['snapshot'] = self.options.snapshot
tc.variables['time_trace'] = self.options.time_trace
tc.generate()
def build(self):

View File

@@ -4,7 +4,6 @@ This image contains an environment to build [Clio](https://github.com/XRPLF/clio
It is used in [Clio Github Actions](https://github.com/XRPLF/clio/actions) but can also be used to compile Clio locally.
The image is based on Ubuntu 20.04 and contains:
- clang 16.0.6
- gcc 12.3
- doxygen 1.12
@@ -14,4 +13,4 @@ The image is based on Ubuntu 20.04 and contains:
- and some other useful tools
Conan is set up to build Clio without any additional steps. There are two preset conan profiles: `clang` and `gcc` to use corresponding compiler. By default conan is setup to use `gcc`.
Sanitizer builds for `ASAN`, `TSAN` and `UBSAN` are enabled via conan profiles for each of the supported compilers. These can be selected using the following pattern (all lowercase): `[compiler].[sanitizer]` (e.g. `--profile gcc.tsan`).
Sanitizer builds for `ASAN`, `TSAN` and `UBSAN` are enabled via conan profiles for each of the supported compilers. These can be selected using the following pattern (all lowercase): `[compiler].[sanitizer]` (e.g. `--profile gcc.tsan`).

View File

@@ -2,36 +2,27 @@ FROM rippleci/clio_clang:16
ARG DEBIAN_FRONTEND=noninteractive
ARG TARGETARCH
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# Using root by default is not very secure but github checkout action doesn't work with any other user
# https://github.com/actions/checkout/issues/956
# And Github Actions doc recommends using root
# https://docs.github.com/en/actions/sharing-automations/creating-actions/dockerfile-support-for-github-actions#user
# hadolint ignore=DL3002
SHELL ["/bin/bash", "-c"]
USER root
WORKDIR /root
ENV CCACHE_VERSION=4.10.2 \
LLVM_TOOLS_VERSION=19 \
GH_VERSION=2.40.0 \
DOXYGEN_VERSION=1.12.0 \
CLANG_BUILD_ANALYZER_VERSION=1.6.0 \
GIT_CLIFF_VERSION=2.8.0
DOXYGEN_VERSION=1.12.0
# Add repositories
RUN apt-get -qq update \
&& apt-get -qq install -y --no-install-recommends --no-install-suggests gnupg wget curl software-properties-common \
&& echo "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-${LLVM_TOOLS_VERSION} main" >> /etc/apt/sources.list \
&& wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add - && \
apt-get clean && rm -rf /var/lib/apt/lists/*
&& wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
# Install packages
RUN apt update -qq \
&& apt install -y --no-install-recommends --no-install-suggests python3 python3-pip git git-lfs make ninja-build flex bison jq graphviz \
clang-tidy-${LLVM_TOOLS_VERSION} clang-tools-${LLVM_TOOLS_VERSION} \
&& pip3 install -q --upgrade --no-cache-dir pip && pip3 install -q --no-cache-dir conan==1.62 gcovr cmake==3.31.6 pre-commit \
clang-format-${LLVM_TOOLS_VERSION} clang-tidy-${LLVM_TOOLS_VERSION} clang-tools-${LLVM_TOOLS_VERSION} \
&& update-alternatives --install /usr/bin/clang-format clang-format /usr/bin/clang-format-${LLVM_TOOLS_VERSION} 100 \
&& pip3 install -q --upgrade --no-cache-dir pip && pip3 install -q --no-cache-dir conan==1.62 gcovr cmake cmake-format \
&& apt-get clean && apt remove -y software-properties-common
# Install gcc-12 and make ldconfig aware of the new libstdc++ location (for gcc)
@@ -71,25 +62,17 @@ RUN wget "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN
&& cmake --build . --target install \
&& rm -rf /tmp/* /var/tmp/*
# Install ClangBuildAnalyzer
RUN wget "https://github.com/aras-p/ClangBuildAnalyzer/releases/download/v${CLANG_BUILD_ANALYZER_VERSION}/ClangBuildAnalyzer-linux" \
&& chmod +x ClangBuildAnalyzer-linux \
&& mv ClangBuildAnalyzer-linux /usr/bin/ClangBuildAnalyzer \
&& rm -rf /tmp/* /var/tmp/*
# Install git-cliff
RUN wget "https://github.com/orhun/git-cliff/releases/download/v${GIT_CLIFF_VERSION}/git-cliff-${GIT_CLIFF_VERSION}-x86_64-unknown-linux-musl.tar.gz" \
&& tar xf git-cliff-${GIT_CLIFF_VERSION}-x86_64-unknown-linux-musl.tar.gz \
&& mv git-cliff-${GIT_CLIFF_VERSION}/git-cliff /usr/bin/git-cliff \
&& rm -rf /tmp/* /var/tmp/*
# Install gh
RUN wget "https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz" \
RUN wget https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz \
&& tar xf gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz \
&& mv gh_${GH_VERSION}_linux_${TARGETARCH}/bin/gh /usr/bin/gh \
&& rm -rf /tmp/* /var/tmp/*
WORKDIR /root
# Using root by default is not very secure but github checkout action doesn't work with any other user
# https://github.com/actions/checkout/issues/956
# And Github Actions doc recommends using root
# https://docs.github.com/en/actions/creating-actions/dockerfile-support-for-github-actions#user
# Setup conan
RUN conan remote add --insert 0 conan-non-prod http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
@@ -112,7 +95,7 @@ RUN conan profile new clang --detect \
&& conan profile update env.CC=/usr/bin/clang-16 clang \
&& conan profile update env.CXX=/usr/bin/clang++-16 clang \
&& conan profile update env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS" clang \
&& conan profile update "conf.tools.build:compiler_executables={\"c\": \"/usr/bin/clang-16\", \"cpp\": \"/usr/bin/clang++-16\"}" clang
&& conan profile update "conf.tools.build:compiler_executables={\"c\": \"/usr/bin/clang-16\", \"cpp\": \"/usr/bin/clang++-16\"}" clang
RUN echo "include(gcc)" >> .conan/profiles/default

View File

@@ -12,14 +12,12 @@ Your configuration file should be mounted under the path `/opt/clio/etc/config.j
Clio repository provides an [example](https://github.com/XRPLF/clio/blob/develop/docs/examples/config/example-config.json) of the configuration file.
Config file recommendations:
- Set `log_to_console` to `false` if you want to avoid logs being written to `stdout`.
- Set `log_directory` to `/opt/clio/log` to store logs in a volume.
## Usage
The following command can be used to run Clio in docker (assuming server's port is `51233` in your config):
```bash
docker run -d -v <path to your config.json>:/opt/clio/etc/config.json -v <path to store logs>:/opt/clio/log -p 51233:51233 rippleci/clio
```

View File

@@ -1,10 +1,8 @@
FROM ubuntu:focal
FROM ubuntu:focal
ARG DEBIAN_FRONTEND=noninteractive
ARG TARGETARCH
SHELL ["/bin/bash", "-c"]
# hadolint ignore=DL3002
USER root
WORKDIR /root

View File

@@ -43,7 +43,7 @@ RUN /gcc-$GCC_VERSION/configure \
--disable-multilib \
--without-cuda-driver \
--enable-checking=release \
&& make -j "$(nproc)" \
&& make -j`nproc` \
&& make install-strip DESTDIR=/gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION \
&& mkdir -p /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/usr/share/gdb/auto-load/usr/lib64 \
&& mv /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/usr/lib64/libstdc++.so.6.0.30-gdb.py /gcc-$GCC_VERSION-$BUILD_VERSION-ubuntu-$UBUNTU_VERSION/usr/share/gdb/auto-load/usr/lib64/libstdc++.so.6.0.30-gdb.py
@@ -63,7 +63,7 @@ COPY --from=build /gcc12.deb /
# Make gcc-12 available but also leave gcc12.deb for others to copy if needed
RUN apt update && apt-get install -y binutils libc6-dev \
&& dpkg -i /gcc12.deb
&& dpkg -i /gcc12.deb
RUN update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 100 \
&& update-alternatives --install /usr/bin/c++ c++ /usr/bin/g++-12 100 \

View File

@@ -1,6 +1,6 @@
services:
clio_develop:
image: ghcr.io/xrplf/clio-ci:latest
image: rippleci/clio_ci:latest
volumes:
- clio_develop_conan_data:/root/.conan/data
- clio_develop_ccache:/root/.ccache

View File

@@ -59,3 +59,4 @@ case $1 in
esac
popd > /dev/null

View File

@@ -3,8 +3,6 @@ project(docs)
include(${CMAKE_CURRENT_SOURCE_DIR}/../cmake/ClioVersion.cmake)
# cmake-format: off
# Generate `docs` target for doxygen documentation
# Note: use `cmake --build . --target docs` from your `build` directory to generate the documentation
# cmake-format: on
include(${CMAKE_CURRENT_SOURCE_DIR}/../cmake/Docs.cmake)

View File

@@ -34,7 +34,7 @@ FULL_SIDEBAR = NO
HTML_HEADER = ${SOURCE}/docs/doxygen-awesome-theme/header.html
HTML_EXTRA_STYLESHEET = ${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome.css \
${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-sidebar-only.css \
${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-sidebar-only-darkmode-toggle.css
${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-sidebar-only-darkmode-toggle.css
HTML_EXTRA_FILES = ${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-darkmode-toggle.js \
${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-interactive-toc.js

View File

@@ -1,32 +1,31 @@
# How to build Clio
`Clio` is built with [CMake](https://cmake.org/) and uses [Conan](https://conan.io/) for managing dependencies.
`Clio` is written in C++23 and therefore requires a modern compiler.
Clio is built with [CMake](https://cmake.org/) and uses [Conan](https://conan.io/) for managing dependencies. It is written in C++20 and therefore requires a modern compiler.
## Minimum Requirements
- [Python 3.7](https://www.python.org/downloads/)
- [Conan 1.55, <2.0](https://conan.io/downloads.html)
- [CMake 3.20, <4.0](https://cmake.org/download/)
- [Conan 1.55](https://conan.io/downloads.html)
- [CMake 3.20](https://cmake.org/download/)
- [**Optional**] [GCovr](https://gcc.gnu.org/onlinedocs/gcc/Gcov.html): needed for code coverage generation
- [**Optional**] [CCache](https://ccache.dev/): speeds up compilation if you are going to compile Clio often
| Compiler | Version |
| ----------- | ------- |
|-------------|---------|
| GCC | 12.3 |
| Clang | 16 |
| Apple Clang | 15 |
### Conan Configuration
Clio requires `compiler.cppstd=20` in your Conan profile (`~/.conan/profiles/default`).
Clio does not require anything other than `compiler.cppstd=20` in your (`~/.conan/profiles/default`) Conan profile.
> [!NOTE]
> Although Clio is built using C++23, it's required to set `compiler.cppstd=20` for the time being as some of Clio's dependencies are not yet capable of building under C++23.
**Mac example**:
> Mac example:
```text
```
[settings]
os=Macos
os_build=Macos
@@ -41,9 +40,9 @@ compiler.cppstd=20
tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]
```
**Linux example**:
> Linux example:
```text
```
[settings]
os=Linux
os_build=Linux
@@ -81,8 +80,7 @@ Navigate to Clio's root directory and run:
```sh
mkdir build && cd build
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True
# You can also add -GNinja to use Ninja build system instead of Make
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=False
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
cmake --build . --parallel 8 # or without the number if you feel extra adventurous
```
@@ -95,23 +93,21 @@ If successful, `conan install` will find the required packages and `cmake` will
> [!TIP]
> To generate a Code Coverage report, include `-o coverage=True` in the `conan install` command above, along with `-o tests=True` to enable tests. After running the `cmake` commands, execute `make clio_tests-ccov`. The coverage report will be found at `clio_tests-llvm-cov/index.html`.
<!-- markdownlint-disable-line MD028 -->
> [!NOTE]
> If you've built Clio before and the build is now failing, it's likely due to updated dependencies. Try deleting the build folder and then rerunning the Conan and CMake commands mentioned above.
### Generating API docs for Clio
The API documentation for Clio is generated by [Doxygen](https://www.doxygen.nl/index.html). If you want to generate the API documentation when building Clio, make sure to install Doxygen 1.12.0 on your system.
The API documentation for Clio is generated by [Doxygen](https://www.doxygen.nl/index.html). If you want to generate the API documentation when building Clio, make sure to install Doxygen on your system.
To generate the API docs:
1. First, include `-o docs=True` in the conan install command. For example:
```sh
mkdir build && cd build
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o docs=True
```
```sh
mkdir build && cd build
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=False -o docs=True
```
2. Once that has completed successfully, run the `cmake` command and add the `--target docs` option:
@@ -122,19 +118,19 @@ To generate the API docs:
3. Go to `build/docs/html` to view the generated files.
Open the `index.html` file in your browser to see the documentation pages.
Open the `index.html` file in your browser to see the documentation pages.
![API index page](./img/doxygen-docs-output.png "API index page")
![API index page](./img/doxygen-docs-output.png "API index page")
## Building Clio with Docker
It is also possible to build Clio using [Docker](https://www.docker.com/) if you don't want to install all the dependencies on your machine.
```sh
docker run -it ghcr.io/xrplf/clio-ci:latest
docker run -it rippleci/clio_ci:latest
git clone https://github.com/XRPLF/clio
mkdir build && cd build
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=False
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
cmake --build . --parallel 8 # or without the number if you feel extra adventurous
```
@@ -150,56 +146,54 @@ If you wish to develop against a `rippled` instance running in standalone mode t
Sometimes, during development, you need to build against a custom version of `libxrpl`. (For example, you may be developing compatibility for a proposed amendment that is not yet merged to the main `rippled` codebase.) To build Clio with compatibility for a custom fork or branch of `rippled`, follow these steps:
1. First, pull/clone the appropriate `rippled` fork and switch to the branch you want to build.
The following example uses an in-development build with [XLS-33d Multi-Purpose Tokens](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0033d-multi-purpose-tokens):
1. First, pull/clone the appropriate `rippled` fork and switch to the branch you want to build. For example, the following example uses an in-development build with [XLS-33d Multi-Purpose Tokens](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0033d-multi-purpose-tokens):
```sh
git clone https://github.com/shawnxie999/rippled/
cd rippled
git switch mpt-1.1
```
```sh
git clone https://github.com/shawnxie999/rippled/
cd rippled
git switch mpt-1.1
```
2. Export a custom package to your local Conan store using a user/channel:
```sh
conan export . my/feature
```
```sh
conan export . my/feature
```
3. Patch your local Clio build to use the right package.
Edit `conanfile.py` (from the Clio repository root). Replace the `xrpl` requirement with the custom package version from the previous step. This must also include the current version number from your `rippled` branch. For example:
Edit `conanfile.py` (from the Clio repository root). Replace the `xrpl` requirement with the custom package version from the previous step. This must also include the current version number from your `rippled` branch. For example:
```py
# ... (excerpt from conanfile.py)
requires = [
'boost/1.83.0',
'cassandra-cpp-driver/2.17.0',
'fmt/10.1.1',
'protobuf/3.21.9',
'grpc/1.50.1',
'openssl/1.1.1v',
'xrpl/2.3.0-b1@my/feature', # Update this line
'zlib/1.3.1',
'libbacktrace/cci.20210118'
]
```
```py
# ... (excerpt from conanfile.py)
requires = [
'boost/1.82.0',
'cassandra-cpp-driver/2.17.0',
'fmt/10.1.1',
'protobuf/3.21.9',
'grpc/1.50.1',
'openssl/1.1.1u',
'xrpl/2.3.0-b1@my/feature', # Update this line
'libbacktrace/cci.20210118'
]
```
4. Build Clio as you would have before.
See [Building Clio](#building-clio) for details.
See [Building Clio](#building-clio) for details.
## Using `clang-tidy` for static analysis
The minimum [clang-tidy](https://clang.llvm.org/extra/clang-tidy/) version required is 19.0.
Clang-tidy can be run by CMake when building the project. To achieve this, you just need to provide the option `-o lint=True` for the `conan install` command:
Clang-tidy can be run by Cmake when building the project. To achieve this, you just need to provide the option `-o lint=True` for the `conan install` command:
```sh
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=True
```
By default CMake will try to find `clang-tidy` automatically in your system.
To force CMake to use your desired binary, set the `CLIO_CLANG_TIDY_BIN` environment variable to the path of the `clang-tidy` binary. For example:
By default Cmake will try to find `clang-tidy` automatically in your system.
To force Cmake to use your desired binary, set the `CLIO_CLANG_TIDY_BIN` environment variable to the path of the `clang-tidy` binary. For example:
```sh
export CLIO_CLANG_TIDY_BIN=/opt/homebrew/opt/llvm@19/bin/clang-tidy

View File

@@ -381,7 +381,7 @@ This document provides a list of all available Clio configuration properties in
- **Type**: int
- **Default value**: `48`
- **Constraints**: The minimum value is `1`. The maximum value is `65535`.
- **Description**: Specifies how many markers are placed randomly within the cache. These markers define the positions on the ledger that will be loaded concurrently by the workers. The higher the number, the more places within the cache we potentially cover.
- **Description**: Specifies how many markers are placed randomly within the cache. These markers define the positions on the ledger that will be loaded concurrently by the workers. The higher the number, the more places within the cache we potentially cover.
### cache.num_cursors_from_diff
@@ -445,7 +445,7 @@ This document provides a list of all available Clio configuration properties in
- **Type**: string
- **Default value**: `%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%`
- **Constraints**: None
- **Description**: The format string for log messages. The format is described here: <https://www.boost.org/doc/libs/1_83_0/libs/log/doc/html/log/tutorial/formatters.html>.
- **Description**: The format string for log messages. The format is described here: https://www.boost.org/doc/libs/1_83_0/libs/log/doc/html/log/tutorial/formatters.html.
### log_to_console

View File

@@ -53,7 +53,7 @@ Here is an example snippet from the config file:
## SSL
The parameters `ssl_cert_file` and `ssl_key_file` can also be added to the top level of precedence of our Clio config. The `ssl_cert_file` field specifies the filepath for your SSL cert, while `ssl_key_file` specifies the filepath for your SSL key. It is up to you how to change ownership of these folders for your designated Clio user.
The parameters `ssl_cert_file` and `ssl_key_file` can also be added to the top level of precedence of our Clio config. The `ssl_cert_file` field specifies the filepath for your SSL cert, while `ssl_key_file` specifies the filepath for your SSL key. It is up to you how to change ownership of these folders for your designated Clio user.
Your options include:

View File

@@ -2,144 +2,146 @@
* This is an example configuration file. Please do not use without modifying to suit your needs.
*/
{
"database": {
"type": "cassandra",
"cassandra": {
"contact_points": "127.0.0.1",
"port": 9042,
"keyspace": "clio",
"replication_factor": 1,
"table_prefix": "",
"max_write_requests_outstanding": 25000,
"max_read_requests_outstanding": 30000,
"threads": 8,
//
// Advanced options. USE AT OWN RISK:
// ---
"core_connections_per_host": 1, // Defaults to 1
"write_batch_size": 20 // Defaults to 20
//
// Below options will use defaults from cassandra driver if left unspecified.
// See https://docs.datastax.com/en/developer/cpp-driver/2.17/api/struct.CassCluster/ for details.
//
// "queue_size_io": 2
//
// ---
"database": {
"type": "cassandra",
"cassandra": {
"contact_points": "127.0.0.1",
"port": 9042,
"keyspace": "clio",
"replication_factor": 1,
"table_prefix": "",
"max_write_requests_outstanding": 25000,
"max_read_requests_outstanding": 30000,
"threads": 8,
//
// Advanced options. USE AT OWN RISK:
// ---
"core_connections_per_host": 1, // Defaults to 1
"write_batch_size": 20 // Defaults to 20
//
// Below options will use defaults from cassandra driver if left unspecified.
// See https://docs.datastax.com/en/developer/cpp-driver/2.17/api/struct.CassCluster/ for details.
//
// "queue_size_io": 2
//
// ---
}
},
"allow_no_etl": false, // Allow Clio to run without valid ETL source, otherwise Clio will stop if ETL check fails
"etl_sources": [
{
"ip": "127.0.0.1",
"ws_port": "6005",
"grpc_port": "50051"
}
],
"forwarding": {
"cache_timeout": 0.250, // in seconds, could be 0, which means no cache
"request_timeout": 10.0 // time for Clio to wait for rippled to reply on a forwarded request (default is 10 seconds)
},
"rpc": {
"cache_timeout": 0.5 // in seconds, could be 0, which means no cache for rpc
},
"dos_guard": {
// Comma-separated list of IPs to exclude from rate limiting
"whitelist": [
"127.0.0.1"
],
//
// The below values are the default values and are only specified here
// for documentation purposes. The rate limiter currently limits
// connections and bandwidth per IP. The rate limiter looks at the raw
// IP of a client connection, and so requests routed through a load
// balancer will all have the same IP and be treated as a single client.
//
"max_fetches": 1000000, // Max bytes per IP per sweep interval
"max_connections": 20, // Max connections per IP
"max_requests": 20, // Max connections per IP per sweep interval
"sweep_interval": 1 // Time in seconds before resetting max_fetches and max_requests
},
"server": {
"ip": "0.0.0.0",
"port": 51233,
// Max number of requests to queue up before rejecting further requests.
// Defaults to 0, which disables the limit.
"max_queue_size": 500,
// If request contains header with authorization, Clio will check if it matches the prefix 'Password ' + this value's sha256 hash
// If matches, the request will be considered as admin request
"admin_password": "xrp",
// If local_admin is true, Clio will consider requests come from 127.0.0.1 as admin requests
// It's true by default unless admin_password is set,'local_admin' : true and 'admin_password' can not be set at the same time
"local_admin": false,
"processing_policy": "parallel", // Could be "sequent" or "parallel".
// For sequent policy request from one client connection will be processed one by one and the next one will not be read before
// the previous one is processed. For parallel policy Clio will take all requests and process them in parallel and
// send a reply for each request whenever it is ready.
"parallel_requests_limit": 10, // Optional parameter, used only if "processing_strategy" is "parallel". It limits the number of requests for one client connection processed in parallel. Infinite if not specified.
// Max number of responses to queue up before sent successfully. If a client's waiting queue is too long, the server will close the connection.
"ws_max_sending_queue_size": 1500,
"__ng_web_server": false // Use ng web server. This is a temporary setting which will be deleted after switching to ng web server
},
// Time in seconds for graceful shutdown. Defaults to 10 seconds. Not fully implemented yet.
"graceful_period": 10.0,
// Overrides log level on a per logging channel.
// Defaults to global "log_level" for each unspecified channel.
"log_channels": [
{
"channel": "Backend",
"log_level": "fatal"
},
{
"channel": "WebServer",
"log_level": "info"
},
{
"channel": "Subscriptions",
"log_level": "info"
},
{
"channel": "RPC",
"log_level": "error"
},
{
"channel": "ETL",
"log_level": "debug"
},
{
"channel": "Performance",
"log_level": "trace"
}
],
"cache": {
// Configure this to use either "num_diffs", "num_cursors_from_diff", or "num_cursors_from_account". By default, Clio uses "num_diffs".
"num_diffs": 32, // Generate the cursors from the latest ledger diff, then use the cursors to partition the ledger to load concurrently. The cursors number is affected by the busyness of the network.
// "num_cursors_from_diff": 3200, // Read the cursors from the diff table until we have enough cursors to partition the ledger to load concurrently.
// "num_cursors_from_account": 3200, // Read the cursors from the account table until we have enough cursors to partition the ledger to load concurrently.
"num_markers": 48, // The number of markers is the number of coroutines to load the cache concurrently.
"page_fetch_size": 512, // The number of rows to load for each page.
"load": "async" // "sync" to load cache synchronously or "async" to load cache asynchronously or "none"/"no" to turn off the cache.
},
"prometheus": {
"enabled": true,
"compress_reply": true
},
"log_level": "info",
// Log format (this is the default format)
"log_format": "%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%",
"log_to_console": true,
// Clio logs to file in the specified directory only if "log_directory" is set
// "log_directory": "./clio_log",
"log_rotation_size": 2048,
"log_directory_max_size": 51200,
"log_rotation_hour_interval": 12,
"log_tag_style": "uint",
"extractor_threads": 8,
"read_only": false,
// "start_sequence": [integer] the ledger index to start from,
// "finish_sequence": [integer] the ledger index to finish at,
// "ssl_cert_file" : "/full/path/to/cert.file",
// "ssl_key_file" : "/full/path/to/key.file"
"api_version": {
"min": 1, // Minimum API version supported (could be 1 or 2)
"max": 2, // Maximum API version supported (could be 1 or 2, but >= min)
"default": 1 // Clio behaves the same as rippled by default
}
},
"allow_no_etl": false, // Allow Clio to run without valid ETL source, otherwise Clio will stop if ETL check fails
"etl_sources": [
{
"ip": "127.0.0.1",
"ws_port": "6005",
"grpc_port": "50051"
}
],
"forwarding": {
"cache_timeout": 0.25, // in seconds, could be 0, which means no cache
"request_timeout": 10.0 // time for Clio to wait for rippled to reply on a forwarded request (default is 10 seconds)
},
"rpc": {
"cache_timeout": 0.5 // in seconds, could be 0, which means no cache for rpc
},
"dos_guard": {
// Comma-separated list of IPs to exclude from rate limiting
"whitelist": ["127.0.0.1"],
//
// The below values are the default values and are only specified here
// for documentation purposes. The rate limiter currently limits
// connections and bandwidth per IP. The rate limiter looks at the raw
// IP of a client connection, and so requests routed through a load
// balancer will all have the same IP and be treated as a single client.
//
"max_fetches": 1000000, // Max bytes per IP per sweep interval
"max_connections": 20, // Max connections per IP
"max_requests": 20, // Max connections per IP per sweep interval
"sweep_interval": 1 // Time in seconds before resetting max_fetches and max_requests
},
"server": {
"ip": "0.0.0.0",
"port": 51233,
// Max number of requests to queue up before rejecting further requests.
// Defaults to 0, which disables the limit.
"max_queue_size": 500,
// If request contains header with authorization, Clio will check if it matches the prefix 'Password ' + this value's sha256 hash
// If matches, the request will be considered as admin request
"admin_password": "xrp",
// If local_admin is true, Clio will consider requests come from 127.0.0.1 as admin requests
// It's true by default unless admin_password is set,'local_admin' : true and 'admin_password' can not be set at the same time
"local_admin": false,
"processing_policy": "parallel", // Could be "sequent" or "parallel".
// For sequent policy request from one client connection will be processed one by one and the next one will not be read before
// the previous one is processed. For parallel policy Clio will take all requests and process them in parallel and
// send a reply for each request whenever it is ready.
"parallel_requests_limit": 10, // Optional parameter, used only if "processing_strategy" is "parallel". It limits the number of requests for one client connection processed in parallel. Infinite if not specified.
// Max number of responses to queue up before sent successfully. If a client's waiting queue is too long, the server will close the connection.
"ws_max_sending_queue_size": 1500,
"__ng_web_server": false // Use ng web server. This is a temporary setting which will be deleted after switching to ng web server
},
// Time in seconds for graceful shutdown. Defaults to 10 seconds. Not fully implemented yet.
"graceful_period": 10.0,
// Overrides log level on a per logging channel.
// Defaults to global "log_level" for each unspecified channel.
"log_channels": [
{
"channel": "Backend",
"log_level": "fatal"
},
{
"channel": "WebServer",
"log_level": "info"
},
{
"channel": "Subscriptions",
"log_level": "info"
},
{
"channel": "RPC",
"log_level": "error"
},
{
"channel": "ETL",
"log_level": "debug"
},
{
"channel": "Performance",
"log_level": "trace"
}
],
"cache": {
// Configure this to use either "num_diffs", "num_cursors_from_diff", or "num_cursors_from_account". By default, Clio uses "num_diffs".
"num_diffs": 32, // Generate the cursors from the latest ledger diff, then use the cursors to partition the ledger to load concurrently. The cursors number is affected by the busyness of the network.
// "num_cursors_from_diff": 3200, // Read the cursors from the diff table until we have enough cursors to partition the ledger to load concurrently.
// "num_cursors_from_account": 3200, // Read the cursors from the account table until we have enough cursors to partition the ledger to load concurrently.
"num_markers": 48, // The number of markers is the number of coroutines to load the cache concurrently.
"page_fetch_size": 512, // The number of rows to load for each page.
"load": "async" // "sync" to load cache synchronously or "async" to load cache asynchronously or "none"/"no" to turn off the cache.
},
"prometheus": {
"enabled": true,
"compress_reply": true
},
"log_level": "info",
// Log format (this is the default format)
"log_format": "%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%",
"log_to_console": true,
// Clio logs to file in the specified directory only if "log_directory" is set
// "log_directory": "./clio_log",
"log_rotation_size": 2048,
"log_directory_max_size": 51200,
"log_rotation_hour_interval": 12,
"log_tag_style": "uint",
"extractor_threads": 8,
"read_only": false,
// "start_sequence": [integer] the ledger index to start from,
// "finish_sequence": [integer] the ledger index to finish at,
// "ssl_cert_file" : "/full/path/to/cert.file",
// "ssl_key_file" : "/full/path/to/key.file"
"api_version": {
"min": 1, // Minimum API version supported (could be 1 or 2)
"max": 2, // Maximum API version supported (could be 1 or 2, but >= min)
"default": 1 // Clio behaves the same as rippled by default
}
}

View File

@@ -4,17 +4,16 @@
> This is only an example of Grafana dashboard for Clio. It was created for demonstration purposes only and may contain errors.
> Clio team would not recommend to relate on data from this dashboard or use it for monitoring your Clio instances.
This directory contains an example of docker based infrastructure to collect and visualize metrics from clio.
This directory contains an example of docker based infrastructure to collect and visualise metrics from clio.
The structure of the directory:
- `compose.yaml`
Docker Compose file with Prometheus and Grafana set up.
Docker-compose file with Prometheus and Grafana set up.
- `prometheus.yaml`
Defines metrics collection from Clio and Prometheus itself.
Demonstrates how to setup Clio target and Clio's admin authorization in Prometheus.
Demonstrates how to setup Clio target and Clio's admin authorisation in Prometheus.
- `grafana/clio_dashboard.json`
Json file containing pre-configured dashboard in Grafana format.
Json file containing preconfigured dashboard in Grafana format.
- `grafana/dashboard_local.yaml`
Grafana configuration file defining the directory to search for dashboards json files.
- `grafana/datasources.yaml`
@@ -22,9 +21,9 @@ The structure of the directory:
## How to try
1. Make sure you have Docker (with `Docker Compose`) installed.
2. Run `docker compose up -d` from this directory. It will start docker containers with Prometheus and Grafana.
1. Make sure you have `docker` and `docker-compose` installed.
2. Run `docker-compose up -d` from this directory. It will start docker containers with Prometheus and Grafana.
3. Open [http://localhost:3000/dashboards](http://localhost:3000/dashboards). Grafana login `admin`, password `grafana`.
There will be pre-configured Clio dashboard.
There will be preconfigured Clio dashboard.
If Clio is not running yet launch Clio to see metrics. Some of the metrics may appear only after requests to Clio.

View File

@@ -6,7 +6,7 @@ services:
volumes:
- ./prometheus.yaml:/etc/prometheus/prometheus.yml
command:
- "--config.file=/etc/prometheus/prometheus.yml"
- '--config.file=/etc/prometheus/prometheus.yml'
grafana:
image: grafana/grafana
ports:

View File

@@ -80,7 +80,9 @@
"orientation": "auto",
"percentChangeColorMode": "standard",
"reduceOptions": {
"calcs": ["lastNotNull"],
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
@@ -159,7 +161,9 @@
"orientation": "auto",
"percentChangeColorMode": "standard",
"reduceOptions": {
"calcs": ["lastNotNull"],
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
@@ -242,7 +246,9 @@
"orientation": "auto",
"percentChangeColorMode": "standard",
"reduceOptions": {
"calcs": ["lastNotNull"],
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
@@ -325,7 +331,9 @@
"orientation": "auto",
"percentChangeColorMode": "standard",
"reduceOptions": {
"calcs": ["lastNotNull"],
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
@@ -1398,7 +1406,7 @@
"refId": "B"
}
],
"title": "DB Operations Error Rate",
"title": "DB Opperations Error Rate",
"type": "timeseries"
},
{

View File

@@ -1,13 +1,13 @@
apiVersion: 1
providers:
- name: "Clio dashboard"
- name: 'Clio dashboard'
# <int> Org id. Default to 1
orgId: 1
# <string> name of the dashboard folder.
folder: ""
folder: ''
# <string> folder UID. will be automatically generated if not specified
folderUid: ""
folderUid: ''
# <string> provider type. Default to 'file'
type: file
# <bool> disable dashboard deletion

View File

@@ -1,8 +1,8 @@
apiVersion: 1
datasources:
- name: Prometheus
type: prometheus
url: http://prometheus:9090
isDefault: true
access: proxy
- name: Prometheus
type: prometheus
url: http://prometheus:9090
isDefault: true
access: proxy

View File

@@ -1,19 +1,19 @@
scrape_configs:
- job_name: clio
scrape_interval: 5s
scrape_timeout: 5s
authorization:
type: Password
# sha256sum from password `xrp`
# use echo -n 'your_password' | shasum -a 256 to get hash
credentials: 0e1dcf1ff020cceabf8f4a60a32e814b5b46ee0bb8cd4af5c814e4071bd86a18
static_configs:
- targets:
- host.docker.internal:51233
- job_name: prometheus
honor_timestamps: true
scrape_interval: 15s
scrape_timeout: 10s
static_configs:
- targets:
- localhost:9090
- job_name: clio
scrape_interval: 5s
scrape_timeout: 5s
authorization:
type: Password
# sha256sum from password `xrp`
# use echo -n 'your_password' | shasum -a 256 to get hash
credentials: 0e1dcf1ff020cceabf8f4a60a32e814b5b46ee0bb8cd4af5c814e4071bd86a18
static_configs:
- targets:
- host.docker.internal:51233
- job_name: prometheus
honor_timestamps: true
scrape_interval: 15s
scrape_timeout: 10s
static_configs:
- targets:
- localhost:9090

View File

@@ -8,12 +8,12 @@ The minimum level of severity at which the log message will be outputted by defa
## `log_format`
The format of log lines produced by Clio. Defaults to `"%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%"`.
The format of log lines produced by Clio. Defaults to `"%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%"`.
Each of the variables expands like so:
- `TimeStamp`: The full date and time of the log entry
- `SourceLocation`: A partial path to the c++ file and the line number in said file (`source/file/path:linenumber`)
- `SourceLocation`: A partial path to the c++ file and the line number in said file (`source/file/path:linenumber`)
- `ThreadID`: The ID of the thread the log entry is written from
- `Channel`: The channel that this log entry was sent to
- `Severity`: The severity (aka log level) the entry was sent at
@@ -30,8 +30,8 @@ Each object is of this format:
```json
{
"channel": "Backend",
"log_level": "fatal"
"channel": "Backend",
"log_level": "fatal"
}
```

View File

@@ -3,7 +3,6 @@
## Prerequisites
- Access to a Cassandra cluster or ScyllaDB cluster. Can be local or remote.
> [!IMPORTANT]
> There are some key considerations when using **ScyllaDB**. By default, Scylla reserves all free RAM on a machine for itself. If you are running `rippled` or other services on the same machine, restrict its memory usage using the `--memory` argument.
>
@@ -92,4 +91,4 @@ To completely disable Prometheus metrics add `"prometheus": { "enabled": false }
It is important to know that Clio responds to Prometheus request only if they are admin requests. If you are using the admin password feature, the same password should be provided in the Authorization header of Prometheus requests.
You can find an example Docker Compose file, with Prometheus and Grafana configs, in [examples/infrastructure](../docs/examples/infrastructure/).
You can find an example docker-compose file, with Prometheus and Grafana configs, in [examples/infrastructure](../docs/examples/infrastructure/).

View File

@@ -1,60 +1,47 @@
# Troubleshooting Guide
This guide will help you troubleshoot common issues of Clio.
## Can't connect to DB
If you see the error log message `Could not connect to Cassandra: No hosts available`, this means that Clio can't connect to the database. Check the following:
- Make sure the database is running at the specified address and port.
- Make sure the database is accessible from the machine where Clio is running.
You can use [cqlsh](https://pypi.org/project/cqlsh/) to check the connection to the database.
You can use [cqlsh](https://pypi.org/project/cqlsh/) to check the connection to the database.
If you would like to run a local ScyllaDB, you can call:
```sh
docker run --rm -p 9042:9042 --name clio-scylla -d scylladb/scylla
docker run --rm -p 9042:9042 --name clio-scylla -d scylladb/scylla
```
## Check the server status of Clio
To check if Clio is syncing with rippled:
```sh
curl -v -d '{"method":"server_info", "params":[{}]}' 127.0.0.1:51233|python3 -m json.tool|grep seq
```
If Clio is syncing with rippled, the `seq` value will be increasing.
## Clio fails to start
If you see the error log message `Failed to fetch ETL state from...`, this means the configured rippled node is not reachable. Check the following:
- Make sure the rippled node is running at the specified address and port.
- Make sure the rippled node is accessible from the machine where Clio is running.
If you would like to run Clio without an available rippled node, you can add below setting to Clio's configuration file:
```text
If you would like to run Clio without an avaliable rippled node, you can add below setting to Clio's configuration file:
```
"allow_no_etl": true
```
## Clio is not added to secure_gateway in rippled's config
If you see the warning message `AsyncCallData is_unlimited is false.`, this means that Clio is not added to the `secure_gateway` of `port_grpc` session in the rippled configuration file. It will slow down the sync process. Please add Clio's IP to the `secure_gateway` in the rippled configuration file for both grpc and ws port.
## Clio is slow
To speed up the response time, Clio has a cache inside. However, cache can take time to warm up. If you see slow response time, you can firstly check if cache is still loading.
You can check the cache status by calling:
```sh
curl -v -d '{"method":"server_info", "params":[{}]}' 127.0.0.1:51233|python3 -m json.tool|grep is_full
curl -v -d '{"method":"server_info", "params":[{}]}' 127.0.0.1:51233|python3 -m json.tool|grep is_enabled
```
If `is_full` is false, it means the cache is still loading. Normally, the Clio can respond quicker if cache finishes loading. If `is_enabled` is false, it means the cache is disabled in the configuration file or there is data corruption in the database.
If `is_full` is false, it means the cache is still loading. Normally, the Clio can respond quicker if cache finishs loading. If `is_enabled` is false, it means the cache is disabled in the configuration file or there is data corruption in the database.
## Receive error message `Too many requests`
If client sees the error message `Too many requests`, this means that the client is blocked by Clio's DosGuard protection. You may want to add the client's IP to the whitelist in the configuration file, Or update other your DosGuard settings.

View File

@@ -1,9 +0,0 @@
ser
onWs
datas
AtLeast
AtMost
compiletime
tring
trings
strat

View File

@@ -1,44 +0,0 @@
#!/bin/bash
# Note: This script is intended to be run from the root of the repository.
#
# This script will fix local includes in the C++ code for a given file.
# Usage: ./pre-commit-hooks/fix-local-includes.sh <file1> <file2> ...
files="$@"
echo "+ Fixing includes in $files..."
GNU_SED=$(sed --version 2>&1 | grep -q 'GNU' && echo true || echo false)
if [[ "$GNU_SED" == "false" ]]; then # macOS sed
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | tr '\n' '|' | sed 's/|$//' | sed 's/|/\\|/g')
else
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | paste -sd '|' | sed 's/|/\\|/g')
fi
fix_includes() {
file_path="$1"
file_path_all_global="${file_path}.tmp.global"
file_path_fixed="${file_path}.tmp.fixed"
# Make all includes to be <...> style
sed -E 's|#include "(.*)"|#include <\1>|g' "$file_path" > "$file_path_all_global"
# Make local includes to be "..." style
sed -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g" "$file_path_all_global" > "$file_path_fixed"
rm "$file_path_all_global"
# Check if the temporary file is different from the original file
if ! cmp -s "$file_path" "$file_path_fixed"; then
# Replace the original file with the temporary file
mv "$file_path_fixed" "$file_path"
else
# Remove the temporary file if it's the same as the original
rm "$file_path_fixed"
fi
}
for file in $files; do
fix_includes "$file"
done

View File

@@ -1,14 +0,0 @@
#!/usr/bin/env bash
#
# Capture and print stdout, since gofmt doesn't use proper exit codes
#
set -e -o pipefail
if ! command -v gofmt &> /dev/null ; then
echo "gofmt not installed or available in the PATH" >&2
exit 1
fi
output="$(gofmt -l -w "$@")"
echo "$output"
[[ -z "$output" ]]

View File

@@ -21,7 +21,7 @@
#include "migration/MigrationApplication.hpp"
#include "util/build/Build.hpp"
#include "util/config/ConfigDescription.hpp"
#include "util/newconfig/ConfigDescription.hpp"
#include <boost/program_options/options_description.hpp>
#include <boost/program_options/parsers.hpp>

View File

@@ -37,15 +37,14 @@
#include "rpc/WorkQueue.hpp"
#include "rpc/common/impl/HandlerProvider.hpp"
#include "util/build/Build.hpp"
#include "util/config/ConfigDefinition.hpp"
#include "util/log/Logger.hpp"
#include "util/newconfig/ConfigDefinition.hpp"
#include "util/prometheus/Prometheus.hpp"
#include "web/AdminVerificationStrategy.hpp"
#include "web/RPCServerHandler.hpp"
#include "web/Server.hpp"
#include "web/dosguard/DOSGuard.hpp"
#include "web/dosguard/IntervalSweepHandler.hpp"
#include "web/dosguard/Weights.hpp"
#include "web/dosguard/WhitelistHandler.hpp"
#include "web/ng/RPCServerHandler.hpp"
#include "web/ng/Server.hpp"
@@ -105,8 +104,7 @@ ClioApplication::run(bool const useNgWebServer)
// Rate limiter, to prevent abuse
auto whitelistHandler = web::dosguard::WhitelistHandler{config_};
auto const dosguardWeights = web::dosguard::Weights::make(config_);
auto dosGuard = web::dosguard::DOSGuard{config_, whitelistHandler, dosguardWeights};
auto dosGuard = web::dosguard::DOSGuard{config_, whitelistHandler};
auto sweepHandler = web::dosguard::IntervalSweepHandler{config_, ioc, dosGuard};
auto cache = data::LedgerCache{};
@@ -160,7 +158,7 @@ ClioApplication::run(bool const useNgWebServer)
RPCEngineType::makeRPCEngine(config_, backend, balancer, dosGuard, workQueue, counters, handlerProvider);
if (useNgWebServer or config_.get<bool>("server.__ng_web_server")) {
web::ng::RPCServerHandler<RPCEngineType> handler{config_, backend, rpcEngine, etl, dosGuard};
web::ng::RPCServerHandler<RPCEngineType> handler{config_, backend, rpcEngine, etl};
auto expectedAdminVerifier = web::makeAdminVerificationStrategy(config_);
if (not expectedAdminVerifier.has_value()) {
@@ -178,7 +176,7 @@ ClioApplication::run(bool const useNgWebServer)
httpServer->onGet("/metrics", MetricsHandler{adminVerifier});
httpServer->onGet("/health", HealthCheckHandler{});
auto requestHandler = RequestHandler{adminVerifier, handler};
auto requestHandler = RequestHandler{adminVerifier, handler, dosGuard};
httpServer->onPost("/", requestHandler);
httpServer->onWs(std::move(requestHandler));
@@ -201,7 +199,7 @@ ClioApplication::run(bool const useNgWebServer)
}
// Init the web server
auto handler = std::make_shared<web::RPCServerHandler<RPCEngineType>>(config_, backend, rpcEngine, etl, dosGuard);
auto handler = std::make_shared<web::RPCServerHandler<RPCEngineType>>(config_, backend, rpcEngine, etl);
auto const httpServer = web::makeHttpServer(config_, ioc, dosGuard, handler);

View File

@@ -21,7 +21,7 @@
#include "app/Stopper.hpp"
#include "util/SignalsHandler.hpp"
#include "util/config/ConfigDefinition.hpp"
#include "util/newconfig/ConfigDefinition.hpp"
namespace app {

View File

@@ -19,8 +19,8 @@
#pragma once
#include "util/config/ConfigDefinition.hpp"
#include "util/config/ConfigFileJson.hpp"
#include "util/newconfig/ConfigDefinition.hpp"
#include "util/newconfig/ConfigFileJson.hpp"
#include <cstdlib>
#include <iostream>

View File

@@ -147,6 +147,7 @@ class RequestHandler {
util::Logger webServerLog_{"WebServer"};
std::shared_ptr<web::AdminVerificationStrategy> adminVerifier_;
std::reference_wrapper<RpcHandlerType> rpcHandler_;
std::reference_wrapper<web::dosguard::DOSGuardInterface> dosguard_;
public:
/**
@@ -154,9 +155,14 @@ public:
*
* @param adminVerifier The AdminVerificationStrategy to use for verifying the connection for admin access.
* @param rpcHandler The RPC handler to use for handling the request.
* @param dosguard The DOSGuardInterface to use for checking the connection.
*/
RequestHandler(std::shared_ptr<web::AdminVerificationStrategy> adminVerifier, RpcHandlerType& rpcHandler)
: adminVerifier_(std::move(adminVerifier)), rpcHandler_(rpcHandler)
RequestHandler(
std::shared_ptr<web::AdminVerificationStrategy> adminVerifier,
RpcHandlerType& rpcHandler,
web::dosguard::DOSGuardInterface& dosguard
)
: adminVerifier_(std::move(adminVerifier)), rpcHandler_(rpcHandler), dosguard_(dosguard)
{
}
@@ -177,6 +183,21 @@ public:
boost::asio::yield_context yield
)
{
if (not dosguard_.get().request(connectionMetadata.ip())) {
auto error = rpc::makeError(rpc::RippledError::rpcSLOW_DOWN);
if (not request.isHttp()) {
try {
auto requestJson = boost::json::parse(request.message());
if (requestJson.is_object() && requestJson.as_object().contains("id"))
error["id"] = requestJson.as_object().at("id");
error["request"] = request.message();
} catch (std::exception const&) {
error["request"] = request.message();
}
}
return web::ng::Response{boost::beast::http::status::service_unavailable, error, request};
}
LOG(webServerLog_.info()) << connectionMetadata.tag()
<< "Received request from ip = " << connectionMetadata.ip()
<< " - posting to WorkQueue";
@@ -186,7 +207,20 @@ public:
});
try {
return rpcHandler_(request, connectionMetadata, std::move(subscriptionContext), yield);
auto response = rpcHandler_(request, connectionMetadata, std::move(subscriptionContext), yield);
if (not dosguard_.get().add(connectionMetadata.ip(), response.message().size())) {
auto jsonResponse = boost::json::parse(response.message()).as_object();
jsonResponse["warning"] = "load";
if (jsonResponse.contains("warnings") && jsonResponse["warnings"].is_array()) {
jsonResponse["warnings"].as_array().push_back(rpc::makeWarning(rpc::WarnRpcRateLimit));
} else {
jsonResponse["warnings"] = boost::json::array{rpc::makeWarning(rpc::WarnRpcRateLimit)};
}
response.setMessage(jsonResponse);
}
return response;
} catch (std::exception const&) {
return web::ng::Response{
boost::beast::http::status::internal_server_error,

View File

@@ -36,7 +36,6 @@
#include <chrono>
#include <ctime>
#include <memory>
#include <string>
#include <utility>
#include <vector>
@@ -112,12 +111,9 @@ ClusterCommunicationService::selfData() const
return result;
}
std::expected<std::vector<ClioNode>, std::string>
std::vector<ClioNode>
ClusterCommunicationService::clusterData() const
{
if (not isHealthy_) {
return std::unexpected{"Service is not healthy"};
}
std::vector<ClioNode> result;
boost::asio::spawn(strand_, [this, &result](boost::asio::yield_context) {
result = otherNodesData_;
@@ -131,13 +127,7 @@ ClusterCommunicationService::doRead(boost::asio::yield_context yield)
{
otherNodesData_.clear();
BackendInterface::ClioNodesDataFetchResult expectedResult;
try {
expectedResult = backend_->fetchClioNodesData(yield);
} catch (...) {
expectedResult = std::unexpected{"Failed to fecth Clio nodes data"};
}
auto const expectedResult = backend_->fetchClioNodesData(yield);
if (!expectedResult.has_value()) {
LOG(log_.error()) << "Failed to fetch nodes data";
isHealthy_ = false;

View File

@@ -34,7 +34,6 @@
#include <chrono>
#include <memory>
#include <string>
#include <vector>
namespace cluster {
@@ -51,7 +50,7 @@ class ClusterCommunicationService : public ClusterCommunicationServiceInterface
util::prometheus::Bool isHealthy_ = PrometheusService::boolMetric(
"cluster_communication_is_healthy",
{},
"Whether cluster communication service is operating healthy (1 - healthy, 0 - we have a problem)"
"Whether cluster communicaton service is operating healthy (1 - healthy, 0 - we have a problem)"
);
// TODO: Use util::async::CoroExecutionContext after https://github.com/XRPLF/clio/issues/1973 is implemented
@@ -126,9 +125,9 @@ public:
/**
* @brief Get the data of all nodes in the cluster (including self).
*
* @return The data of all nodes in the cluster or error if the service is not healthy.
* @return The data of all nodes in the cluster.
*/
std::expected<std::vector<ClioNode>, std::string>
std::vector<ClioNode>
clusterData() const override;
private:

View File

@@ -21,8 +21,6 @@
#include "cluster/ClioNode.hpp"
#include <expected>
#include <string>
#include <vector>
namespace cluster {
@@ -45,9 +43,9 @@ public:
/**
* @brief Get the data of all nodes in the cluster (including self).
*
* @return The data of all nodes in the cluster or error if the service is not healthy.
* @return The data of all nodes in the cluster.
*/
[[nodiscard]] virtual std::expected<std::vector<ClioNode>, std::string>
[[nodiscard]] virtual std::vector<ClioNode>
clusterData() const = 0;
};

View File

@@ -137,8 +137,6 @@ struct Amendments {
REGISTER(fixInvalidTxFlags);
REGISTER(fixFrozenLPTokenTransfer);
REGISTER(DeepFreeze);
REGISTER(PermissionDelegation);
REGISTER(fixPayChanCancelAfter);
// Obsolete but supported by libxrpl
REGISTER(CryptoConditionsSuite);

View File

@@ -23,8 +23,8 @@
#include "data/CassandraBackend.hpp"
#include "data/LedgerCacheInterface.hpp"
#include "data/cassandra/SettingsProvider.hpp"
#include "util/config/ConfigDefinition.hpp"
#include "util/log/Logger.hpp"
#include "util/newconfig/ConfigDefinition.hpp"
#include <boost/algorithm/string.hpp>
#include <boost/algorithm/string/predicate.hpp>

View File

@@ -61,7 +61,7 @@ BackendInterface::finishWrites(std::uint32_t const ledgerSequence)
LOG(gLog.debug()) << "Want finish writes for " << ledgerSequence;
auto commitRes = doFinishWrites();
if (commitRes) {
LOG(gLog.debug()) << "Successfully committed. Updating range now to " << ledgerSequence;
LOG(gLog.debug()) << "Successfully commited. Updating range now to " << ledgerSequence;
updateRange(ledgerSequence);
}
return commitRes;
@@ -246,7 +246,7 @@ BackendInterface::fetchBookOffers(
auto end = std::chrono::system_clock::now();
LOG(gLog.debug()) << "Fetching " << std::to_string(keys.size()) << " offers took "
<< std::to_string(getMillis(mid - begin)) << " milliseconds. Fetching next dir took "
<< std::to_string(succMillis) << " milliseconds. Fetched next dir " << std::to_string(numSucc)
<< std::to_string(succMillis) << " milliseonds. Fetched next dir " << std::to_string(numSucc)
<< " times"
<< " Fetching next page of dir took " << std::to_string(pageMillis) << " milliseconds"
<< ". num pages = " << std::to_string(numPages) << ". Fetching all objects took "

View File

@@ -69,7 +69,7 @@ public:
static constexpr std::size_t kDEFAULT_WAIT_BETWEEN_RETRY = 500;
/**
* @brief A helper function that catches DatabaseTimeout exceptions and retries indefinitely.
* @brief A helper function that catches DatabaseTimout exceptions and retries indefinitely.
*
* @tparam FnType The type of function object to execute
* @param func The function object to execute
@@ -398,7 +398,7 @@ public:
* @brief Fetches a specific ledger object.
*
* Currently the real fetch happens in doFetchLedgerObject and fetchLedgerObject attempts to fetch from Cache first
* and only calls out to the real DB if a cache miss occurred.
* and only calls out to the real DB if a cache miss ocurred.
*
* @param key The key of the object
* @param sequence The ledger sequence to fetch for
@@ -512,7 +512,7 @@ public:
* @param key The key to fetch for
* @param ledgerSequence The ledger sequence to fetch for
* @param yield The coroutine context
* @return The successor on success; nullopt otherwise
* @return The sucessor on success; nullopt otherwise
*/
std::optional<LedgerObject>
fetchSuccessorObject(ripple::uint256 key, std::uint32_t ledgerSequence, boost::asio::yield_context yield) const;
@@ -526,7 +526,7 @@ public:
* @param key The key to fetch for
* @param ledgerSequence The ledger sequence to fetch for
* @param yield The coroutine context
* @return The successor key on success; nullopt otherwise
* @return The sucessor key on success; nullopt otherwise
*/
std::optional<ripple::uint256>
fetchSuccessorKey(ripple::uint256 key, std::uint32_t ledgerSequence, boost::asio::yield_context yield) const;
@@ -537,7 +537,7 @@ public:
* @param key The key to fetch for
* @param ledgerSequence The ledger sequence to fetch for
* @param yield The coroutine context
* @return The successor on success; nullopt otherwise
* @return The sucessor on success; nullopt otherwise
*/
virtual std::optional<ripple::uint256>
doFetchSuccessorKey(ripple::uint256 key, std::uint32_t ledgerSequence, boost::asio::yield_context yield) const = 0;
@@ -569,17 +569,13 @@ public:
virtual std::optional<std::string>
fetchMigratorStatus(std::string const& migratorName, boost::asio::yield_context yield) const = 0;
/** @brief Return type for fetchClioNodesData() method */
using ClioNodesDataFetchResult =
std::expected<std::vector<std::pair<boost::uuids::uuid, std::string>>, std::string>;
/**
* @brief Fetches the data of all nodes in the cluster.
*
* @param yield The coroutine context
*@return The data of all nodes in the cluster.
*/
[[nodiscard]] virtual ClioNodesDataFetchResult
[[nodiscard]] virtual std::expected<std::vector<std::pair<boost::uuids::uuid, std::string>>, std::string>
fetchClioNodesData(boost::asio::yield_context yield) const = 0;
/**

View File

@@ -5,7 +5,6 @@ target_sources(
BackendCounters.cpp
BackendInterface.cpp
LedgerCache.cpp
LedgerHeaderCache.cpp
cassandra/impl/Future.cpp
cassandra/impl/Cluster.cpp
cassandra/impl/Batch.cpp

View File

@@ -22,7 +22,6 @@
#include "data/BackendInterface.hpp"
#include "data/DBHelpers.hpp"
#include "data/LedgerCacheInterface.hpp"
#include "data/LedgerHeaderCache.hpp"
#include "data/Types.hpp"
#include "data/cassandra/Concepts.hpp"
#include "data/cassandra/Handle.hpp"
@@ -63,8 +62,6 @@
#include <utility>
#include <vector>
class CacheBackendCassandraTest;
namespace data::cassandra {
/**
@@ -74,27 +71,21 @@ namespace data::cassandra {
*
* @tparam SettingsProviderType The settings provider type to use
* @tparam ExecutionStrategyType The execution strategy type to use
* @tparam FetchLedgerCacheType The ledger header cache type to use
*/
template <
SomeSettingsProvider SettingsProviderType,
SomeExecutionStrategy ExecutionStrategyType,
typename FetchLedgerCacheType = FetchLedgerCache>
template <SomeSettingsProvider SettingsProviderType, SomeExecutionStrategy ExecutionStrategyType>
class BasicCassandraBackend : public BackendInterface {
util::Logger log_{"Backend"};
SettingsProviderType settingsProvider_;
Schema<SettingsProviderType> schema_;
std::atomic_uint32_t ledgerSequence_ = 0u;
friend class ::CacheBackendCassandraTest;
protected:
Handle handle_;
// have to be mutable because BackendInterface constness :(
mutable ExecutionStrategyType executor_;
// TODO: move to interface level
mutable FetchLedgerCacheType ledgerCache_{};
public:
/**
@@ -138,6 +129,7 @@ public:
LOG(log_.error()) << error;
throw std::runtime_error(error);
}
LOG(log_.info()) << "Created (revamped) CassandraBackend";
}
@@ -271,16 +263,11 @@ public:
std::optional<ripple::LedgerHeader>
fetchLedgerBySequence(std::uint32_t const sequence, boost::asio::yield_context yield) const override
{
if (auto const lock = ledgerCache_.get(); lock.has_value() && lock->seq == sequence)
return lock->ledger;
auto const res = executor_.read(yield, schema_->selectLedgerBySeq, sequence);
if (res) {
if (auto const& result = res.value(); result) {
if (auto const maybeValue = result.template get<std::vector<unsigned char>>(); maybeValue) {
auto const header = util::deserializeHeader(ripple::makeSlice(*maybeValue));
ledgerCache_.put(FetchLedgerCache::CacheEntry{header, sequence});
return header;
return util::deserializeHeader(ripple::makeSlice(*maybeValue));
}
LOG(log_.error()) << "Could not fetch ledger by sequence - no rows";
@@ -793,7 +780,7 @@ public:
while (liveAccounts.size() < number) {
Statement const statement = lastItem ? schema_->selectAccountFromToken.bind(*lastItem, Limit{pageSize})
: schema_->selectAccountFromBeginning.bind(Limit{pageSize});
: schema_->selectAccountFromBegining.bind(Limit{pageSize});
auto const res = executor_.read(yield, statement);
if (res) {

View File

@@ -63,7 +63,7 @@ LedgerCache::update(std::vector<LedgerObject> const& objs, uint32_t seq, bool is
if (seq > latestSeq_) {
ASSERT(
seq == latestSeq_ + 1 || latestSeq_ == 0,
"New sequence must be either next or first. seq = {}, latestSeq_ = {}",
"New sequense must be either next or first. seq = {}, latestSeq_ = {}",
seq,
latestSeq_
);

View File

@@ -1,46 +0,0 @@
//------------------------------------------------------------------------------
/*
This file is part of clio: https://github.com/XRPLF/clio
Copyright (c) 2025, the clio developers.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#include "data/LedgerHeaderCache.hpp"
#include "util/Mutex.hpp"
#include <mutex>
#include <optional>
#include <shared_mutex>
namespace data {
FetchLedgerCache::FetchLedgerCache() = default;
void
FetchLedgerCache::put(CacheEntry const& cacheEntry)
{
auto lock = mutex_.lock<std::unique_lock>();
*lock = cacheEntry;
}
std::optional<FetchLedgerCache::CacheEntry>
FetchLedgerCache::get() const
{
auto const lock = mutex_.lock<std::shared_lock>();
return lock.get();
}
} // namespace data

View File

@@ -1,85 +0,0 @@
//------------------------------------------------------------------------------
/*
This file is part of clio: https://github.com/XRPLF/clio
Copyright (c) 2025, the clio developers.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#pragma once
#include "util/Mutex.hpp"
#include <xrpl/protocol/LedgerHeader.h>
#include <cstdint>
#include <optional>
#include <shared_mutex>
namespace data {
/**
* @brief A simple cache holding one `ripple::LedgerHeader` to reduce DB lookups.
*
* Used internally by backend implementations. When a ledger header is
* fetched via `FetchLedgerBySeq` (often triggered by RPC commands),
* the result can be stored here. Subsequent requests for the same ledger
* sequence can proceed to retrieve the header from this cache, avoiding unnecessary
* database reads and improving performance.
*/
class FetchLedgerCache {
public:
FetchLedgerCache();
/**
* @brief Struct to store ledger header cache entry and the sequence it belongs to
*/
struct CacheEntry {
ripple::LedgerHeader ledger;
uint32_t seq{};
/**
* @brief Comparing CacheEntry. Used in testing for EXPECT_CALL
*
* @param other The other cacheEntry to compare
* @return true if two CacheEntry is the same, false otherwise
*/
bool
operator==(CacheEntry const& other) const
{
return ledger.hash == other.ledger.hash && seq == other.seq;
}
};
/**
* @brief Put CacheEntry into thread-safe container
*
* @param cacheEntry The Cache to store into thread-safe container.
*/
void
put(CacheEntry const& cacheEntry);
/**
* @brief Read CacheEntry from thread-safe container.
*
* @return Optional CacheEntry, depending on if it exists in thread-safe container or not.
*/
std::optional<CacheEntry>
get() const;
private:
mutable util::Mutex<std::optional<CacheEntry>, std::shared_mutex> mutex_;
};
} // namespace data

View File

@@ -6,7 +6,7 @@ To support additional database types, you can create new classes that implement
## Data Model
The data model used by Clio to read and write ledger data is different from what `rippled` uses. `rippled` uses a novel data structure named [_SHAMap_](https://github.com/ripple/rippled/blob/master/src/ripple/shamap/README.md), which is a combination of a Merkle Tree and a Radix Trie. In a SHAMap, ledger objects are stored in the root vertices of the tree. Thus, looking up a record located at the leaf node of the SHAMap executes a tree search, where the path from the root node to the leaf node is the key of the record.
The data model used by Clio to read and write ledger data is different from what `rippled` uses. `rippled` uses a novel data structure named [*SHAMap*](https://github.com/ripple/rippled/blob/master/src/ripple/shamap/README.md), which is a combination of a Merkle Tree and a Radix Trie. In a SHAMap, ledger objects are stored in the root vertices of the tree. Thus, looking up a record located at the leaf node of the SHAMap executes a tree search, where the path from the root node to the leaf node is the key of the record.
`rippled` nodes can also generate a proof-tree by forming a subtree with all the path nodes and their neighbors, which can then be used to prove the existence of the leaf node data to other `rippled` nodes. In short, the main purpose of the SHAMap data structure is to facilitate the fast validation of data integrity between different decentralized `rippled` nodes.
@@ -22,19 +22,19 @@ There are three main types of data in each XRP Ledger version:
Due to the structural differences of the different types of databases, Clio may choose to represent these data types using a different schema for each unique database type.
### Keywords
### Keywords
**Sequence**: A unique incrementing identification number used to label the different ledger versions.
**Hash**: The SHA512-half (calculate SHA512 and take the first 256 bits) hash of various ledger data like the entire ledger or specific ledger objects.
**Ledger Object**: The [binary-encoded](https://xrpl.org/serialization.html) STObject containing specific data (i.e. metadata, transaction data).
**Ledger Object**: The [binary-encoded](https://xrpl.org/serialization.html) STObject containing specific data (i.e. metadata, transaction data).
**Metadata**: The data containing [detailed information](https://xrpl.org/transaction-metadata.html#transaction-metadata) of the outcome of a specific transaction, regardless of whether the transaction was successful.
**Metadata**: The data containing [detailed information](https://xrpl.org/transaction-metadata.html#transaction-metadata) of the outcome of a specific transaction, regardless of whether the transaction was successful.
**Transaction data**: The data containing the [full details](https://xrpl.org/transaction-common-fields.html) of a specific transaction.
**Transaction data**: The data containing the [full details](https://xrpl.org/transaction-common-fields.html) of a specific transaction.
**Object Index**: The pseudo-random unique identifier of a ledger object, created by hashing the data of the object.
**Object Index**: The pseudo-random unique identifier of a ledger object, created by hashing the data of the object.
## Cassandra Implementation
@@ -58,11 +58,11 @@ Their schemas and how they work are detailed in the following sections.
### ledger_transactions
```sql
CREATE TABLE clio.ledger_transactions (
ledger_sequence bigint, # The sequence number of the ledger version
hash blob, # Hash of all the transactions on this ledger version
PRIMARY KEY (ledger_sequence, hash)
```
CREATE TABLE clio.ledger_transactions (
ledger_sequence bigint, # The sequence number of the ledger version
hash blob, # Hash of all the transactions on this ledger version
PRIMARY KEY (ledger_sequence, hash)
) WITH CLUSTERING ORDER BY (hash ASC) ...
```
@@ -70,37 +70,37 @@ This table stores the hashes of all transactions in a given ledger sequence and
### transactions
```sql
CREATE TABLE clio.transactions (
hash blob PRIMARY KEY, # The transaction hash
date bigint, # Date of the transaction
ledger_sequence bigint, # The sequence that the transaction was validated
metadata blob, # Metadata of the transaction
transaction blob # Data of the transaction
```
CREATE TABLE clio.transactions (
hash blob PRIMARY KEY, # The transaction hash
date bigint, # Date of the transaction
ledger_sequence bigint, # The sequence that the transaction was validated
metadata blob, # Metadata of the transaction
transaction blob # Data of the transaction
) ...
```
This table stores the full transaction and metadata of each ledger version with the transaction hash as the primary key.
To lookup all the transactions that were validated in a ledger version with sequence `n`, first get the all the transaction hashes in that ledger version by querying `SELECT * FROM ledger_transactions WHERE ledger_sequence = n;`. Then, iterate through the list of hashes and query `SELECT * FROM transactions WHERE hash = one_of_the_hash_from_the_list;` to get the detailed transaction data.
To lookup all the transactions that were validated in a ledger version with sequence `n`, first get the all the transaction hashes in that ledger version by querying `SELECT * FROM ledger_transactions WHERE ledger_sequence = n;`. Then, iterate through the list of hashes and query `SELECT * FROM transactions WHERE hash = one_of_the_hash_from_the_list;` to get the detailed transaction data.
### ledger_hashes
```sql
```
CREATE TABLE clio.ledger_hashes (
hash blob PRIMARY KEY, # Hash of entire ledger version's data
sequence bigint # The sequence of the ledger version
hash blob PRIMARY KEY, # Hash of entire ledger version's data
sequence bigint # The sequence of the ledger version
) ...
```
This table stores the hash of all ledger versions by their sequences.
This table stores the hash of all ledger versions by their sequences.
### ledger_range
```sql
```
CREATE TABLE clio.ledger_range (
is_latest boolean PRIMARY KEY, # Whether this sequence is the stopping range
sequence bigint # The sequence number of the starting/stopping range
is_latest boolean PRIMARY KEY, # Whether this sequence is the stopping range
sequence bigint # The sequence number of the starting/stopping range
) ...
```
@@ -108,12 +108,12 @@ This table marks the range of ledger versions that is stored on this specific Ca
### objects
```sql
```
CREATE TABLE clio.objects (
key blob, # Object index of the object
sequence bigint, # The sequence this object was last updated
object blob, # Data of the object
PRIMARY KEY (key, sequence)
key blob, # Object index of the object
sequence bigint, # The sequence this object was last updated
object blob, # Data of the object
PRIMARY KEY (key, sequence)
) WITH CLUSTERING ORDER BY (sequence DESC) ...
```
@@ -123,10 +123,10 @@ The table is updated when all data for a given ledger sequence has been written
### ledgers
```sql
```
CREATE TABLE clio.ledgers (
sequence bigint PRIMARY KEY, # Sequence of the ledger version
header blob # Data of the header
sequence bigint PRIMARY KEY, # Sequence of the ledger version
header blob # Data of the header
) ...
```
@@ -134,11 +134,11 @@ This table stores the ledger header data of specific ledger versions by their se
### diff
```sql
```
CREATE TABLE clio.diff (
seq bigint, # Sequence of the ledger version
key blob, # Hash of changes in the ledger version
PRIMARY KEY (seq, key)
seq bigint, # Sequence of the ledger version
key blob, # Hash of changes in the ledger version
PRIMARY KEY (seq, key)
) WITH CLUSTERING ORDER BY (key ASC) ...
```
@@ -146,12 +146,12 @@ This table stores the object index of all the changes in each ledger version.
### account_tx
```sql
```
CREATE TABLE clio.account_tx (
account blob,
seq_idx frozen<tuple<bigint, bigint>>, # Tuple of (ledger_index, transaction_index)
hash blob, # Hash of the transaction
PRIMARY KEY (account, seq_idx)
account blob,
seq_idx frozen<tuple<bigint, bigint>>, # Tuple of (ledger_index, transaction_index)
hash blob, # Hash of the transaction
PRIMARY KEY (account, seq_idx)
) WITH CLUSTERING ORDER BY (seq_idx DESC) ...
```
@@ -159,18 +159,18 @@ This table stores the list of transactions affecting a given account. This inclu
### successor
```sql
```
CREATE TABLE clio.successor (
key blob, # Object index
seq bigint, # The sequence that this ledger object's predecessor and successor was updated
next blob, # Index of the next object that existed in this sequence
PRIMARY KEY (key, seq)
key blob, # Object index
seq bigint, # The sequnce that this ledger object's predecessor and successor was updated
next blob, # Index of the next object that existed in this sequence
PRIMARY KEY (key, seq)
) WITH CLUSTERING ORDER BY (seq ASC) ...
```
This table is the important backbone of how histories of ledger objects are stored in Cassandra. The `successor` table stores the object index of all ledger objects that were validated on the XRP network along with the ledger sequence that the object was updated on.
As each key is ordered by the sequence, which is achieved by tracing through the table with a specific sequence number, Clio can recreate a Linked List data structure that represents all the existing ledger objects at that ledger sequence. The special values of `0x00...00` and `0xFF...FF` are used to label the _head_ and _tail_ of the Linked List in the successor table.
As each key is ordered by the sequence, which is achieved by tracing through the table with a specific sequence number, Clio can recreate a Linked List data structure that represents all the existing ledger objects at that ledger sequence. The special values of `0x00...00` and `0xFF...FF` are used to label the *head* and *tail* of the Linked List in the successor table.
The diagram below showcases how tracing through the same table, but with different sequence parameter filtering, can result in different Linked List data representing the corresponding past state of the ledger objects. A query like `SELECT * FROM successor WHERE key = ? AND seq <= n ORDER BY seq DESC LIMIT 1;` can effectively trace through the successor table and get the Linked List of a specific sequence `n`.
@@ -182,12 +182,12 @@ In each new ledger version with sequence `n`, a ledger object `v` can either be
For all three of these operations, the procedure to update the successor table can be broken down into two steps:
1. Trace through the Linked List of the previous sequence to find the ledger object `e` with the greatest object index smaller or equal than the `v`'s index. Save `e`'s `next` value (the index of the next ledger object) as `w`.
1. Trace through the Linked List of the previous sequence to find the ledger object `e` with the greatest object index smaller or equal than the `v`'s index. Save `e`'s `next` value (the index of the next ledger object) as `w`.
2. If `v` is...
1. Being **created**, add two new records of `seq=n` with one being `e` pointing to `v`, and `v` pointing to `w` (Linked List insertion operation).
2. Being **modified**, do nothing.
3. Being **deleted**, add a record of `seq=n` with `e` pointing to `v`'s `next` value (Linked List deletion operation).
2. If `v` is...
1. Being **created**, add two new records of `seq=n` with one being `e` pointing to `v`, and `v` pointing to `w` (Linked List insertion operation).
2. Being **modified**, do nothing.
3. Being **deleted**, add a record of `seq=n` with `e` pointing to `v`'s `next` value (Linked List deletion operation).
## NFT data model
@@ -195,13 +195,13 @@ In `rippled` NFTs are stored in `NFTokenPage` ledger objects. This object is imp
### nf_tokens
```sql
```
CREATE TABLE clio.nf_tokens (
token_id blob, # The NFT's ID
sequence bigint, # Sequence of ledger version
owner blob, # The account ID of the owner of this NFT at this ledger
is_burned boolean, # True if token was burned in this ledger
PRIMARY KEY (token_id, sequence)
token_id blob, # The NFT's ID
sequence bigint, # Sequence of ledger version
owner blob, # The account ID of the owner of this NFT at this ledger
is_burned boolean, # True if token was burned in this ledger
PRIMARY KEY (token_id, sequence)
) WITH CLUSTERING ORDER BY (sequence DESC) ...
```
@@ -209,7 +209,7 @@ This table indexes NFT IDs with their owner at a given ledger.
The example query below shows how you could search for the owner of token `N` at ledger `Y` and see whether the token was burned.
```sql
```
SELECT * FROM nf_tokens
WHERE token_id = N AND seq <= Y
ORDER BY seq DESC LIMIT 1;
@@ -219,12 +219,12 @@ If the token is burned, the owner field indicates the account that owned the tok
### issuer_nf_tokens_v2
```sql
```
CREATE TABLE clio.issuer_nf_tokens_v2 (
issuer blob, # The NFT issuer's account ID
taxon bigint, # The NFT's token taxon
token_id blob, # The NFT's ID
PRIMARY KEY (issuer, taxon, token_id)
issuer blob, # The NFT issuer's account ID
taxon bigint, # The NFT's token taxon
token_id blob, # The NFT's ID
PRIMARY KEY (issuer, taxon, token_id)
) WITH CLUSTERING ORDER BY (taxon ASC, token_id ASC) ...
```
@@ -233,12 +233,12 @@ combination. This is useful for determining all the NFTs a specific account issu
### nf_token_uris
```sql
```
CREATE TABLE clio.nf_token_uris (
token_id blob, # The NFT's ID
sequence bigint, # Sequence of ledger version
uri blob, # The NFT's URI
PRIMARY KEY (token_id, sequence)
token_id blob, # The NFT's ID
sequence bigint, # Sequence of ledger version
uri blob, # The NFT's URI
PRIMARY KEY (token_id, sequence)
) WITH CLUSTERING ORDER BY (sequence DESC) ...
```
@@ -252,12 +252,12 @@ A given NFT will have only one entry in this table (see caveat below), and will
### nf_token_transactions
```sql
```
CREATE TABLE clio.nf_token_transactions (
token_id blob, # The NFT's ID
seq_idx tuple<bigint, bigint>, # Tuple of (ledger_index, transaction_index)
hash blob, # Hash of the transaction
PRIMARY KEY (token_id, seq_idx)
token_id blob, # The NFT's ID
seq_idx tuple<bigint, bigint>, # Tuple of (ledger_index, transaction_index)
hash blob, # Hash of the transaction
PRIMARY KEY (token_id, seq_idx)
) WITH CLUSTERING ORDER BY (seq_idx DESC) ...
```
@@ -265,12 +265,12 @@ The `nf_token_transactions` table serves as the NFT counterpart to `account_tx`,
### migrator_status
```sql
```
CREATE TABLE clio.migrator_status (
migrator_name TEXT, # The name of the migrator
status TEXT, # The status of the migrator
PRIMARY KEY (migrator_name)
)
)
```
The `migrator_status` table stores the status of the migratior in this database. If a migrator's status is `migrated`, it means this database has finished data migration for this migrator.

View File

@@ -89,7 +89,7 @@ public:
asyncConnect() const;
/**
* @brief Synchronous version of the above.
* @brief Synchonous version of the above.
*
* See @ref asyncConnect() const for how this works.
*
@@ -108,7 +108,7 @@ public:
asyncConnect(std::string_view keyspace) const;
/**
* @brief Synchronous version of the above.
* @brief Synchonous version of the above.
*
* See @ref asyncConnect(std::string_view) const for how this works.
*
@@ -127,7 +127,7 @@ public:
asyncDisconnect() const;
/**
* @brief Synchronous version of the above.
* @brief Synchonous version of the above.
*
* See @ref asyncDisconnect() const for how this works.
*
@@ -146,7 +146,7 @@ public:
asyncReconnect(std::string_view keyspace) const;
/**
* @brief Synchronous version of the above.
* @brief Synchonous version of the above.
*
* See @ref asyncReconnect(std::string_view) const for how this works.
*
@@ -172,7 +172,7 @@ public:
}
/**
* @brief Synchronous version of the above.
* @brief Synchonous version of the above.
*
* See asyncExecute(std::string_view, Args&&...) const for how this works.
*
@@ -201,7 +201,7 @@ public:
asyncExecuteEach(std::vector<StatementType> const& statements) const;
/**
* @brief Synchronous version of the above.
* @brief Synchonous version of the above.
*
* See @ref asyncExecuteEach(std::vector<StatementType> const&) const for how this works.
*
@@ -227,7 +227,7 @@ public:
}
/**
* @brief Synchronous version of the above.
* @brief Synchonous version of the above.
*
* See asyncExecute(std::vector<StatementType> const&, Args&&...) const for how this works.
*
@@ -262,7 +262,7 @@ public:
asyncExecute(StatementType const& statement, std::function<void(ResultOrErrorType)>&& cb) const;
/**
* @brief Synchronous version of the above.
* @brief Synchonous version of the above.
*
* See @ref asyncExecute(StatementType const&) const for how this works.
*
@@ -282,7 +282,7 @@ public:
asyncExecute(std::vector<StatementType> const& statements) const;
/**
* @brief Synchronous version of the above.
* @brief Synchonous version of the above.
*
* See @ref asyncExecute(std::vector<StatementType> const&) const for how this works.
*

Some files were not shown because too many files have changed in this diff Show More