mirror of
https://github.com/XRPLF/clio.git
synced 2025-11-04 11:55:51 +00:00
Compare commits
70 Commits
2.4.1-rc2
...
clang_tidy
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
038b2d2f35 | ||
|
|
68f832a832 | ||
|
|
5e43c3b099 | ||
|
|
9af36e5235 | ||
|
|
90103431ea | ||
|
|
ed27c41641 | ||
|
|
3daa735b31 | ||
|
|
5d2c2a8bfc | ||
|
|
f2384a47f1 | ||
|
|
c774a40a59 | ||
|
|
477d8e1f5f | ||
|
|
7fd51d8a60 | ||
|
|
b2a1b34ae6 | ||
|
|
5860a90b94 | ||
|
|
f2f342f7c2 | ||
|
|
cc1da5afa9 | ||
|
|
de055934e1 | ||
|
|
0c201ed952 | ||
|
|
9b83eb4033 | ||
|
|
fadd60e68a | ||
|
|
eef85b520a | ||
|
|
8b99013ff8 | ||
|
|
af0a8af287 | ||
|
|
de43be887e | ||
|
|
b03cd63efd | ||
|
|
76cf2fc57e | ||
|
|
f7f619fd1b | ||
|
|
4a0267fbac | ||
|
|
b51ed8dd98 | ||
|
|
cb01c9f596 | ||
|
|
7c55529c90 | ||
|
|
424af5dfe0 | ||
|
|
bebc683ba2 | ||
|
|
1e0a2f5162 | ||
|
|
593d7298b1 | ||
|
|
79e6f07863 | ||
|
|
1cb09a01f6 | ||
|
|
9c92a2b51b | ||
|
|
99580a2602 | ||
|
|
ade6289de2 | ||
|
|
3916635037 | ||
|
|
f195a30a76 | ||
|
|
cc610a0964 | ||
|
|
c5012eb854 | ||
|
|
01ae4c4312 | ||
|
|
88d27a7265 | ||
|
|
e01d7d12cf | ||
|
|
1e38ad5ec0 | ||
|
|
bd9e39ee85 | ||
|
|
46514c8fe9 | ||
|
|
39d1ceace4 | ||
|
|
89eb962d85 | ||
|
|
4a5fee7548 | ||
|
|
fcd891148b | ||
|
|
99adb31184 | ||
|
|
2c1a90a20d | ||
|
|
2385bf547b | ||
|
|
1d011cf8d9 | ||
|
|
6896a2545a | ||
|
|
91484c64e4 | ||
|
|
bdf7382d44 | ||
|
|
8a3e71e91f | ||
|
|
e61ee30180 | ||
|
|
d3df6d10e4 | ||
|
|
60df3a1914 | ||
|
|
f454076fb6 | ||
|
|
66b3f40268 | ||
|
|
b31b7633c9 | ||
|
|
a36aa3618f | ||
|
|
7943f47939 |
@@ -1,5 +1,5 @@
|
||||
---
|
||||
Language: Cpp
|
||||
Language: Cpp
|
||||
AccessModifierOffset: -4
|
||||
AlignAfterOpenBracket: BlockIndent
|
||||
AlignConsecutiveAssignments: false
|
||||
@@ -22,31 +22,31 @@ BreakBeforeBinaryOperators: false
|
||||
BreakBeforeBraces: WebKit
|
||||
BreakBeforeTernaryOperators: true
|
||||
BreakConstructorInitializersBeforeComma: true
|
||||
ColumnLimit: 120
|
||||
CommentPragmas: '^ IWYU pragma:'
|
||||
ColumnLimit: 120
|
||||
CommentPragmas: "^ IWYU pragma:"
|
||||
ConstructorInitializerAllOnOneLineOrOnePerLine: true
|
||||
ConstructorInitializerIndentWidth: 4
|
||||
ContinuationIndentWidth: 4
|
||||
Cpp11BracedListStyle: true
|
||||
DerivePointerAlignment: false
|
||||
DisableFormat: false
|
||||
DisableFormat: false
|
||||
ExperimentalAutoDetectBinPacking: false
|
||||
FixNamespaceComments: true
|
||||
ForEachMacros: [ Q_FOREACH, BOOST_FOREACH ]
|
||||
ForEachMacros: [Q_FOREACH, BOOST_FOREACH]
|
||||
IncludeBlocks: Regroup
|
||||
IncludeCategories:
|
||||
- Regex: '^".*"$'
|
||||
Priority: 1
|
||||
- Regex: '^<.*\.(h|hpp)>$'
|
||||
Priority: 2
|
||||
- Regex: '^<.*>$'
|
||||
Priority: 3
|
||||
- Regex: '.*'
|
||||
Priority: 4
|
||||
IncludeIsMainRegex: '$'
|
||||
- Regex: '^".*"$'
|
||||
Priority: 1
|
||||
- Regex: '^<.*\.(h|hpp)>$'
|
||||
Priority: 2
|
||||
- Regex: "^<.*>$"
|
||||
Priority: 3
|
||||
- Regex: ".*"
|
||||
Priority: 4
|
||||
IncludeIsMainRegex: "$"
|
||||
IndentCaseLabels: true
|
||||
IndentFunctionDeclarationAfterType: false
|
||||
IndentWidth: 4
|
||||
IndentWidth: 4
|
||||
IndentWrappedFunctionNames: false
|
||||
IndentRequiresClause: true
|
||||
RequiresClausePosition: OwnLine
|
||||
@@ -63,18 +63,18 @@ PenaltyExcessCharacter: 1000000
|
||||
PenaltyReturnTypeOnItsOwnLine: 200
|
||||
PointerAlignment: Left
|
||||
QualifierAlignment: Right
|
||||
ReflowComments: true
|
||||
SortIncludes: true
|
||||
ReflowComments: true
|
||||
SortIncludes: true
|
||||
SpaceAfterCStyleCast: false
|
||||
SpaceBeforeAssignmentOperators: true
|
||||
SpaceBeforeParens: ControlStatements
|
||||
SpaceInEmptyParentheses: false
|
||||
SpacesBeforeTrailingComments: 2
|
||||
SpacesInAngles: false
|
||||
SpacesInAngles: false
|
||||
SpacesInContainerLiterals: true
|
||||
SpacesInCStyleCastParentheses: false
|
||||
SpacesInParentheses: false
|
||||
SpacesInSquareBrackets: false
|
||||
Standard: Cpp11
|
||||
TabWidth: 8
|
||||
UseTab: Never
|
||||
Standard: Cpp11
|
||||
TabWidth: 8
|
||||
UseTab: Never
|
||||
|
||||
24
.clang-tidy
24
.clang-tidy
@@ -1,5 +1,5 @@
|
||||
---
|
||||
Checks: '-*,
|
||||
Checks: "-*,
|
||||
bugprone-argument-comment,
|
||||
bugprone-assert-side-effect,
|
||||
bugprone-bad-signal-to-kill-thread,
|
||||
@@ -146,7 +146,7 @@ Checks: '-*,
|
||||
readability-static-definition-in-anonymous-namespace,
|
||||
readability-suspicious-call-argument,
|
||||
readability-use-std-min-max
|
||||
'
|
||||
"
|
||||
|
||||
CheckOptions:
|
||||
readability-braces-around-statements.ShortStatementLines: 2
|
||||
@@ -158,21 +158,21 @@ CheckOptions:
|
||||
readability-identifier-naming.EnumConstantCase: CamelCase
|
||||
readability-identifier-naming.ScopedEnumConstantCase: CamelCase
|
||||
readability-identifier-naming.GlobalConstantCase: UPPER_CASE
|
||||
readability-identifier-naming.GlobalConstantPrefix: 'k'
|
||||
readability-identifier-naming.GlobalConstantPrefix: "k"
|
||||
readability-identifier-naming.GlobalVariableCase: CamelCase
|
||||
readability-identifier-naming.GlobalVariablePrefix: 'g'
|
||||
readability-identifier-naming.GlobalVariablePrefix: "g"
|
||||
readability-identifier-naming.ConstexprFunctionCase: camelBack
|
||||
readability-identifier-naming.ConstexprMethodCase: camelBack
|
||||
readability-identifier-naming.ClassMethodCase: camelBack
|
||||
readability-identifier-naming.ClassMemberCase: camelBack
|
||||
readability-identifier-naming.ClassConstantCase: UPPER_CASE
|
||||
readability-identifier-naming.ClassConstantPrefix: 'k'
|
||||
readability-identifier-naming.ClassConstantPrefix: "k"
|
||||
readability-identifier-naming.StaticConstantCase: UPPER_CASE
|
||||
readability-identifier-naming.StaticConstantPrefix: 'k'
|
||||
readability-identifier-naming.StaticConstantPrefix: "k"
|
||||
readability-identifier-naming.StaticVariableCase: UPPER_CASE
|
||||
readability-identifier-naming.StaticVariablePrefix: 'k'
|
||||
readability-identifier-naming.StaticVariablePrefix: "k"
|
||||
readability-identifier-naming.ConstexprVariableCase: UPPER_CASE
|
||||
readability-identifier-naming.ConstexprVariablePrefix: 'k'
|
||||
readability-identifier-naming.ConstexprVariablePrefix: "k"
|
||||
readability-identifier-naming.LocalConstantCase: camelBack
|
||||
readability-identifier-naming.LocalVariableCase: camelBack
|
||||
readability-identifier-naming.TemplateParameterCase: CamelCase
|
||||
@@ -181,11 +181,11 @@ CheckOptions:
|
||||
readability-identifier-naming.MemberCase: camelBack
|
||||
readability-identifier-naming.PrivateMemberSuffix: _
|
||||
readability-identifier-naming.ProtectedMemberSuffix: _
|
||||
readability-identifier-naming.PublicMemberSuffix: ''
|
||||
readability-identifier-naming.FunctionIgnoredRegexp: '.*tag_invoke.*'
|
||||
readability-identifier-naming.PublicMemberSuffix: ""
|
||||
readability-identifier-naming.FunctionIgnoredRegexp: ".*tag_invoke.*"
|
||||
bugprone-unsafe-functions.ReportMoreUnsafeFunctions: true
|
||||
bugprone-unused-return-value.CheckedReturnTypes: ::std::error_code;::std::error_condition;::std::errc
|
||||
misc-include-cleaner.IgnoreHeaders: '.*/(detail|impl)/.*;.*(expected|unexpected).*;.*ranges_lower_bound\.h;time.h;stdlib.h'
|
||||
misc-include-cleaner.IgnoreHeaders: '.*/(detail|impl)/.*;.*(expected|unexpected).*;.*ranges_lower_bound\.h;time.h;stdlib.h;__chrono/.*;fmt/chrono.h;boost/uuid/uuid_hash.hpp'
|
||||
|
||||
HeaderFilterRegex: '^.*/(src|tests)/.*\.(h|hpp)$'
|
||||
WarningsAsErrors: '*'
|
||||
WarningsAsErrors: "*"
|
||||
|
||||
@@ -1,222 +1,222 @@
|
||||
_help_parse: Options affecting listfile parsing
|
||||
parse:
|
||||
_help_additional_commands:
|
||||
- Specify structure for custom cmake functions
|
||||
- Specify structure for custom cmake functions
|
||||
additional_commands:
|
||||
foo:
|
||||
flags:
|
||||
- BAR
|
||||
- BAZ
|
||||
- BAR
|
||||
- BAZ
|
||||
kwargs:
|
||||
HEADERS: '*'
|
||||
SOURCES: '*'
|
||||
DEPENDS: '*'
|
||||
HEADERS: "*"
|
||||
SOURCES: "*"
|
||||
DEPENDS: "*"
|
||||
_help_override_spec:
|
||||
- Override configurations per-command where available
|
||||
- Override configurations per-command where available
|
||||
override_spec: {}
|
||||
_help_vartags:
|
||||
- Specify variable tags.
|
||||
- Specify variable tags.
|
||||
vartags: []
|
||||
_help_proptags:
|
||||
- Specify property tags.
|
||||
- Specify property tags.
|
||||
proptags: []
|
||||
_help_format: Options affecting formatting.
|
||||
format:
|
||||
_help_disable:
|
||||
- Disable formatting entirely, making cmake-format a no-op
|
||||
- Disable formatting entirely, making cmake-format a no-op
|
||||
disable: false
|
||||
_help_line_width:
|
||||
- How wide to allow formatted cmake files
|
||||
- How wide to allow formatted cmake files
|
||||
line_width: 120
|
||||
_help_tab_size:
|
||||
- How many spaces to tab for indent
|
||||
- How many spaces to tab for indent
|
||||
tab_size: 2
|
||||
_help_use_tabchars:
|
||||
- If true, lines are indented using tab characters (utf-8
|
||||
- 0x09) instead of <tab_size> space characters (utf-8 0x20).
|
||||
- In cases where the layout would require a fractional tab
|
||||
- character, the behavior of the fractional indentation is
|
||||
- governed by <fractional_tab_policy>
|
||||
- If true, lines are indented using tab characters (utf-8
|
||||
- 0x09) instead of <tab_size> space characters (utf-8 0x20).
|
||||
- In cases where the layout would require a fractional tab
|
||||
- character, the behavior of the fractional indentation is
|
||||
- governed by <fractional_tab_policy>
|
||||
use_tabchars: false
|
||||
_help_fractional_tab_policy:
|
||||
- If <use_tabchars> is True, then the value of this variable
|
||||
- indicates how fractional indentions are handled during
|
||||
- whitespace replacement. If set to 'use-space', fractional
|
||||
- indentation is left as spaces (utf-8 0x20). If set to
|
||||
- '`round-up` fractional indentation is replaced with a single'
|
||||
- tab character (utf-8 0x09) effectively shifting the column
|
||||
- to the next tabstop
|
||||
- If <use_tabchars> is True, then the value of this variable
|
||||
- indicates how fractional indentions are handled during
|
||||
- whitespace replacement. If set to 'use-space', fractional
|
||||
- indentation is left as spaces (utf-8 0x20). If set to
|
||||
- "`round-up` fractional indentation is replaced with a single"
|
||||
- tab character (utf-8 0x09) effectively shifting the column
|
||||
- to the next tabstop
|
||||
fractional_tab_policy: use-space
|
||||
_help_max_subgroups_hwrap:
|
||||
- If an argument group contains more than this many sub-groups
|
||||
- (parg or kwarg groups) then force it to a vertical layout.
|
||||
- If an argument group contains more than this many sub-groups
|
||||
- (parg or kwarg groups) then force it to a vertical layout.
|
||||
max_subgroups_hwrap: 4
|
||||
_help_max_pargs_hwrap:
|
||||
- If a positional argument group contains more than this many
|
||||
- arguments, then force it to a vertical layout.
|
||||
- If a positional argument group contains more than this many
|
||||
- arguments, then force it to a vertical layout.
|
||||
max_pargs_hwrap: 6
|
||||
_help_max_rows_cmdline:
|
||||
- If a cmdline positional group consumes more than this many
|
||||
- lines without nesting, then invalidate the layout (and nest)
|
||||
- If a cmdline positional group consumes more than this many
|
||||
- lines without nesting, then invalidate the layout (and nest)
|
||||
max_rows_cmdline: 2
|
||||
_help_separate_ctrl_name_with_space:
|
||||
- If true, separate flow control names from their parentheses
|
||||
- with a space
|
||||
- If true, separate flow control names from their parentheses
|
||||
- with a space
|
||||
separate_ctrl_name_with_space: true
|
||||
_help_separate_fn_name_with_space:
|
||||
- If true, separate function names from parentheses with a
|
||||
- space
|
||||
- If true, separate function names from parentheses with a
|
||||
- space
|
||||
separate_fn_name_with_space: false
|
||||
_help_dangle_parens:
|
||||
- If a statement is wrapped to more than one line, than dangle
|
||||
- the closing parenthesis on its own line.
|
||||
- If a statement is wrapped to more than one line, than dangle
|
||||
- the closing parenthesis on its own line.
|
||||
dangle_parens: true
|
||||
_help_dangle_align:
|
||||
- If the trailing parenthesis must be 'dangled' on its on
|
||||
- 'line, then align it to this reference: `prefix`: the start'
|
||||
- 'of the statement, `prefix-indent`: the start of the'
|
||||
- 'statement, plus one indentation level, `child`: align to'
|
||||
- the column of the arguments
|
||||
- If the trailing parenthesis must be 'dangled' on its on
|
||||
- "line, then align it to this reference: `prefix`: the start"
|
||||
- "of the statement, `prefix-indent`: the start of the"
|
||||
- "statement, plus one indentation level, `child`: align to"
|
||||
- the column of the arguments
|
||||
dangle_align: prefix
|
||||
_help_min_prefix_chars:
|
||||
- If the statement spelling length (including space and
|
||||
- parenthesis) is smaller than this amount, then force reject
|
||||
- nested layouts.
|
||||
- If the statement spelling length (including space and
|
||||
- parenthesis) is smaller than this amount, then force reject
|
||||
- nested layouts.
|
||||
min_prefix_chars: 4
|
||||
_help_max_prefix_chars:
|
||||
- If the statement spelling length (including space and
|
||||
- parenthesis) is larger than the tab width by more than this
|
||||
- amount, then force reject un-nested layouts.
|
||||
- If the statement spelling length (including space and
|
||||
- parenthesis) is larger than the tab width by more than this
|
||||
- amount, then force reject un-nested layouts.
|
||||
max_prefix_chars: 10
|
||||
_help_max_lines_hwrap:
|
||||
- If a candidate layout is wrapped horizontally but it exceeds
|
||||
- this many lines, then reject the layout.
|
||||
- If a candidate layout is wrapped horizontally but it exceeds
|
||||
- this many lines, then reject the layout.
|
||||
max_lines_hwrap: 2
|
||||
_help_line_ending:
|
||||
- What style line endings to use in the output.
|
||||
- What style line endings to use in the output.
|
||||
line_ending: unix
|
||||
_help_command_case:
|
||||
- Format command names consistently as 'lower' or 'upper' case
|
||||
- Format command names consistently as 'lower' or 'upper' case
|
||||
command_case: canonical
|
||||
_help_keyword_case:
|
||||
- Format keywords consistently as 'lower' or 'upper' case
|
||||
- Format keywords consistently as 'lower' or 'upper' case
|
||||
keyword_case: unchanged
|
||||
_help_always_wrap:
|
||||
- A list of command names which should always be wrapped
|
||||
- A list of command names which should always be wrapped
|
||||
always_wrap: []
|
||||
_help_enable_sort:
|
||||
- If true, the argument lists which are known to be sortable
|
||||
- will be sorted lexicographicall
|
||||
- If true, the argument lists which are known to be sortable
|
||||
- will be sorted lexicographicall
|
||||
enable_sort: true
|
||||
_help_autosort:
|
||||
- If true, the parsers may infer whether or not an argument
|
||||
- list is sortable (without annotation).
|
||||
- If true, the parsers may infer whether or not an argument
|
||||
- list is sortable (without annotation).
|
||||
autosort: true
|
||||
_help_require_valid_layout:
|
||||
- By default, if cmake-format cannot successfully fit
|
||||
- everything into the desired linewidth it will apply the
|
||||
- last, most agressive attempt that it made. If this flag is
|
||||
- True, however, cmake-format will print error, exit with non-
|
||||
- zero status code, and write-out nothing
|
||||
- By default, if cmake-format cannot successfully fit
|
||||
- everything into the desired linewidth it will apply the
|
||||
- last, most aggressive attempt that it made. If this flag is
|
||||
- True, however, cmake-format will print error, exit with non-
|
||||
- zero status code, and write-out nothing
|
||||
require_valid_layout: false
|
||||
_help_layout_passes:
|
||||
- A dictionary mapping layout nodes to a list of wrap
|
||||
- decisions. See the documentation for more information.
|
||||
- A dictionary mapping layout nodes to a list of wrap
|
||||
- decisions. See the documentation for more information.
|
||||
layout_passes: {}
|
||||
_help_markup: Options affecting comment reflow and formatting.
|
||||
markup:
|
||||
_help_bullet_char:
|
||||
- What character to use for bulleted lists
|
||||
bullet_char: '*'
|
||||
- What character to use for bulleted lists
|
||||
bullet_char: "*"
|
||||
_help_enum_char:
|
||||
- What character to use as punctuation after numerals in an
|
||||
- enumerated list
|
||||
- What character to use as punctuation after numerals in an
|
||||
- enumerated list
|
||||
enum_char: .
|
||||
_help_first_comment_is_literal:
|
||||
- If comment markup is enabled, don't reflow the first comment
|
||||
- block in each listfile. Use this to preserve formatting of
|
||||
- your copyright/license statements.
|
||||
- If comment markup is enabled, don't reflow the first comment
|
||||
- block in each listfile. Use this to preserve formatting of
|
||||
- your copyright/license statements.
|
||||
first_comment_is_literal: false
|
||||
_help_literal_comment_pattern:
|
||||
- If comment markup is enabled, don't reflow any comment block
|
||||
- which matches this (regex) pattern. Default is `None`
|
||||
- (disabled).
|
||||
- If comment markup is enabled, don't reflow any comment block
|
||||
- which matches this (regex) pattern. Default is `None`
|
||||
- (disabled).
|
||||
literal_comment_pattern: null
|
||||
_help_fence_pattern:
|
||||
- Regular expression to match preformat fences in comments
|
||||
- default= ``r'^\s*([`~]{3}[`~]*)(.*)$'``
|
||||
- Regular expression to match preformat fences in comments
|
||||
- default= ``r'^\s*([`~]{3}[`~]*)(.*)$'``
|
||||
fence_pattern: ^\s*([`~]{3}[`~]*)(.*)$
|
||||
_help_ruler_pattern:
|
||||
- Regular expression to match rulers in comments default=
|
||||
- '``r''^\s*[^\w\s]{3}.*[^\w\s]{3}$''``'
|
||||
- Regular expression to match rulers in comments default=
|
||||
- '``r''^\s*[^\w\s]{3}.*[^\w\s]{3}$''``'
|
||||
ruler_pattern: ^\s*[^\w\s]{3}.*[^\w\s]{3}$
|
||||
_help_explicit_trailing_pattern:
|
||||
- If a comment line matches starts with this pattern then it
|
||||
- is explicitly a trailing comment for the preceeding
|
||||
- argument. Default is '#<'
|
||||
explicit_trailing_pattern: '#<'
|
||||
- If a comment line matches starts with this pattern then it
|
||||
- is explicitly a trailing comment for the preceding
|
||||
- argument. Default is '#<'
|
||||
explicit_trailing_pattern: "#<"
|
||||
_help_hashruler_min_length:
|
||||
- If a comment line starts with at least this many consecutive
|
||||
- hash characters, then don't lstrip() them off. This allows
|
||||
- for lazy hash rulers where the first hash char is not
|
||||
- separated by space
|
||||
- If a comment line starts with at least this many consecutive
|
||||
- hash characters, then don't lstrip() them off. This allows
|
||||
- for lazy hash rulers where the first hash char is not
|
||||
- separated by space
|
||||
hashruler_min_length: 10
|
||||
_help_canonicalize_hashrulers:
|
||||
- If true, then insert a space between the first hash char and
|
||||
- remaining hash chars in a hash ruler, and normalize its
|
||||
- length to fill the column
|
||||
- If true, then insert a space between the first hash char and
|
||||
- remaining hash chars in a hash ruler, and normalize its
|
||||
- length to fill the column
|
||||
canonicalize_hashrulers: true
|
||||
_help_enable_markup:
|
||||
- enable comment markup parsing and reflow
|
||||
- enable comment markup parsing and reflow
|
||||
enable_markup: true
|
||||
_help_lint: Options affecting the linter
|
||||
lint:
|
||||
_help_disabled_codes:
|
||||
- a list of lint codes to disable
|
||||
- a list of lint codes to disable
|
||||
disabled_codes: []
|
||||
_help_function_pattern:
|
||||
- regular expression pattern describing valid function names
|
||||
function_pattern: '[0-9a-z_]+'
|
||||
- regular expression pattern describing valid function names
|
||||
function_pattern: "[0-9a-z_]+"
|
||||
_help_macro_pattern:
|
||||
- regular expression pattern describing valid macro names
|
||||
macro_pattern: '[0-9A-Z_]+'
|
||||
- regular expression pattern describing valid macro names
|
||||
macro_pattern: "[0-9A-Z_]+"
|
||||
_help_global_var_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- variables with global (cache) scope
|
||||
global_var_pattern: '[A-Z][0-9A-Z_]+'
|
||||
- regular expression pattern describing valid names for
|
||||
- variables with global (cache) scope
|
||||
global_var_pattern: "[A-Z][0-9A-Z_]+"
|
||||
_help_internal_var_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- variables with global scope (but internal semantic)
|
||||
- regular expression pattern describing valid names for
|
||||
- variables with global scope (but internal semantic)
|
||||
internal_var_pattern: _[A-Z][0-9A-Z_]+
|
||||
_help_local_var_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- variables with local scope
|
||||
local_var_pattern: '[a-z][a-z0-9_]+'
|
||||
- regular expression pattern describing valid names for
|
||||
- variables with local scope
|
||||
local_var_pattern: "[a-z][a-z0-9_]+"
|
||||
_help_private_var_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- privatedirectory variables
|
||||
- regular expression pattern describing valid names for
|
||||
- privatedirectory variables
|
||||
private_var_pattern: _[0-9a-z_]+
|
||||
_help_public_var_pattern:
|
||||
- regular expression pattern describing valid names for public
|
||||
- directory variables
|
||||
public_var_pattern: '[A-Z][0-9A-Z_]+'
|
||||
- regular expression pattern describing valid names for public
|
||||
- directory variables
|
||||
public_var_pattern: "[A-Z][0-9A-Z_]+"
|
||||
_help_argument_var_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- function/macro arguments and loop variables.
|
||||
argument_var_pattern: '[a-z][a-z0-9_]+'
|
||||
- regular expression pattern describing valid names for
|
||||
- function/macro arguments and loop variables.
|
||||
argument_var_pattern: "[a-z][a-z0-9_]+"
|
||||
_help_keyword_pattern:
|
||||
- regular expression pattern describing valid names for
|
||||
- keywords used in functions or macros
|
||||
keyword_pattern: '[A-Z][0-9A-Z_]+'
|
||||
- regular expression pattern describing valid names for
|
||||
- keywords used in functions or macros
|
||||
keyword_pattern: "[A-Z][0-9A-Z_]+"
|
||||
_help_max_conditionals_custom_parser:
|
||||
- In the heuristic for C0201, how many conditionals to match
|
||||
- within a loop in before considering the loop a parser.
|
||||
- In the heuristic for C0201, how many conditionals to match
|
||||
- within a loop in before considering the loop a parser.
|
||||
max_conditionals_custom_parser: 2
|
||||
_help_min_statement_spacing:
|
||||
- Require at least this many newlines between statements
|
||||
- Require at least this many newlines between statements
|
||||
min_statement_spacing: 1
|
||||
_help_max_statement_spacing:
|
||||
- Require no more than this many newlines between statements
|
||||
- Require no more than this many newlines between statements
|
||||
max_statement_spacing: 2
|
||||
max_returns: 6
|
||||
max_branches: 12
|
||||
@@ -226,20 +226,20 @@ lint:
|
||||
_help_encode: Options affecting file encoding
|
||||
encode:
|
||||
_help_emit_byteorder_mark:
|
||||
- If true, emit the unicode byte-order mark (BOM) at the start
|
||||
- of the file
|
||||
- If true, emit the unicode byte-order mark (BOM) at the start
|
||||
- of the file
|
||||
emit_byteorder_mark: false
|
||||
_help_input_encoding:
|
||||
- Specify the encoding of the input file. Defaults to utf-8
|
||||
- Specify the encoding of the input file. Defaults to utf-8
|
||||
input_encoding: utf-8
|
||||
_help_output_encoding:
|
||||
- Specify the encoding of the output file. Defaults to utf-8.
|
||||
- Note that cmake only claims to support utf-8 so be careful
|
||||
- when using anything else
|
||||
- Specify the encoding of the output file. Defaults to utf-8.
|
||||
- Note that cmake only claims to support utf-8 so be careful
|
||||
- when using anything else
|
||||
output_encoding: utf-8
|
||||
_help_misc: Miscellaneous configurations options.
|
||||
misc:
|
||||
_help_per_command:
|
||||
- A dictionary containing any per-command configuration
|
||||
- overrides. Currently only `command_case` is supported.
|
||||
- A dictionary containing any per-command configuration
|
||||
- overrides. Currently only `command_case` is supported.
|
||||
per_command: {}
|
||||
|
||||
11
.codecov.yml
11
.codecov.yml
@@ -9,3 +9,14 @@ coverage:
|
||||
default:
|
||||
target: 20% # Need to bump this number https://docs.codecov.com/docs/commit-status#patch-status
|
||||
threshold: 2%
|
||||
|
||||
# `codecov/codecov-action` reruns `gcovr` if build files present
|
||||
# That's why we run it in a separate workflow
|
||||
# This ignore list is not currently used
|
||||
#
|
||||
# More info: https://github.com/XRPLF/clio/pull/2066
|
||||
ignore:
|
||||
- "tests"
|
||||
- "src/data/cassandra/"
|
||||
- "src/data/CassandraBackend.hpp"
|
||||
- "src/data/BackendFactory.*"
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Note: This script is intended to be run from the root of the repository.
|
||||
#
|
||||
# This script checks the format of the code and cmake files.
|
||||
# In many cases it will automatically fix the issues and abort the commit.
|
||||
|
||||
no_formatted_directories_staged() {
|
||||
staged_directories=$(git diff-index --cached --name-only HEAD | awk -F/ '{print $1}')
|
||||
for sd in $staged_directories; do
|
||||
if [[ "$sd" =~ ^(benchmark|cmake|src|tests)$ ]]; then
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
return 0
|
||||
}
|
||||
|
||||
if no_formatted_directories_staged ; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "+ Checking code format..."
|
||||
|
||||
# paths to check and re-format
|
||||
sources="src tests"
|
||||
formatter="clang-format -i"
|
||||
version=$($formatter --version | grep -o '[0-9\.]*')
|
||||
|
||||
if [[ "19.0.0" > "$version" ]]; then
|
||||
cat <<EOF
|
||||
|
||||
ERROR
|
||||
-----------------------------------------------------------------------------
|
||||
A minimum of version 19 of `which clang-format` is required.
|
||||
Your version is $version.
|
||||
Please fix paths and run again.
|
||||
-----------------------------------------------------------------------------
|
||||
|
||||
EOF
|
||||
exit 3
|
||||
fi
|
||||
|
||||
# check there is no .h headers, only .hpp
|
||||
wrong_headers=$(find $sources -name "*.h" | sed 's/^/ - /')
|
||||
if [[ ! -z "$wrong_headers" ]]; then
|
||||
cat <<EOF
|
||||
|
||||
ERROR
|
||||
-----------------------------------------------------------------------------
|
||||
Found .h headers in the source code. Please rename them to .hpp:
|
||||
|
||||
$wrong_headers
|
||||
-----------------------------------------------------------------------------
|
||||
|
||||
EOF
|
||||
exit 2
|
||||
fi
|
||||
|
||||
if ! command -v cmake-format &> /dev/null; then
|
||||
cat <<EOF
|
||||
|
||||
ERROR
|
||||
-----------------------------------------------------------------------------
|
||||
'cmake-format' is required to run this script.
|
||||
Please install it and run again.
|
||||
-----------------------------------------------------------------------------
|
||||
|
||||
EOF
|
||||
exit 3
|
||||
fi
|
||||
|
||||
function grep_code {
|
||||
grep -l "${1}" ${sources} -r --include \*.hpp --include \*.cpp
|
||||
}
|
||||
|
||||
GNU_SED=$(sed --version 2>&1 | grep -q 'GNU' && echo true || echo false)
|
||||
|
||||
if [[ "$GNU_SED" == "false" ]]; then # macOS sed
|
||||
# make all includes to be <...> style
|
||||
grep_code '#include ".*"' | xargs sed -i '' -E 's|#include "(.*)"|#include <\1>|g'
|
||||
|
||||
# make local includes to be "..." style
|
||||
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | tr '\n' '|' | sed 's/|$//' | sed 's/|/\\|/g')
|
||||
grep_code "#include <\($main_src_dirs\)/.*>" | xargs sed -i '' -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g"
|
||||
else
|
||||
# make all includes to be <...> style
|
||||
grep_code '#include ".*"' | xargs sed -i -E 's|#include "(.*)"|#include <\1>|g'
|
||||
|
||||
# make local includes to be "..." style
|
||||
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | paste -sd '|' | sed 's/|/\\|/g')
|
||||
grep_code "#include <\($main_src_dirs\)/.*>" | xargs sed -i -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g"
|
||||
fi
|
||||
|
||||
cmake_dirs=$(echo cmake $sources)
|
||||
cmake_files=$(find $cmake_dirs -type f \( -name "CMakeLists.txt" -o -name "*.cmake" \))
|
||||
cmake_files=$(echo $cmake_files ./CMakeLists.txt)
|
||||
|
||||
first=$(git diff $sources $cmake_files)
|
||||
find $sources -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.ipp' \) -print0 | xargs -0 $formatter
|
||||
cmake-format -i $cmake_files
|
||||
second=$(git diff $sources $cmake_files)
|
||||
changes=$(diff <(echo "$first") <(echo "$second"))
|
||||
changes_number=$(echo -n "$changes" | wc -l | sed -e 's/^[[:space:]]*//')
|
||||
|
||||
if [ "$changes_number" != "0" ]; then
|
||||
cat <<\EOF
|
||||
|
||||
WARNING
|
||||
-----------------------------------------------------------------------------
|
||||
Automatically re-formatted code with 'clang-format' - commit was aborted.
|
||||
Please manually add any updated files and commit again.
|
||||
-----------------------------------------------------------------------------
|
||||
|
||||
EOF
|
||||
if [[ "$1" == "--diff" ]]; then
|
||||
echo "$changes"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
@@ -1,7 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This script is intended to be run from the root of the repository.
|
||||
|
||||
source .githooks/check-format
|
||||
source .githooks/check-docs
|
||||
|
||||
9
.github/ISSUE_TEMPLATE/bug_report.md
vendored
9
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -3,29 +3,34 @@ name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: "[Title with short description] (Version: [Clio version])"
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
<!-- Please search existing issues to avoid creating duplicates. -->
|
||||
<!-- Kindly refrain from posting any credentials or sensitive information in this issue -->
|
||||
|
||||
## Issue Description
|
||||
|
||||
<!-- Provide a summary for your issue/bug. -->
|
||||
|
||||
## Steps to Reproduce
|
||||
|
||||
<!-- List in detail the exact steps to reproduce the unexpected behavior of the software. -->
|
||||
|
||||
## Expected Result
|
||||
|
||||
<!-- Explain in detail what behavior you expected to happen. -->
|
||||
|
||||
## Actual Result
|
||||
|
||||
<!-- Explain in detail what behavior actually happened. -->
|
||||
|
||||
## Environment
|
||||
|
||||
<!-- Please describe your environment setup (such as Ubuntu 20.04.2 with Boost 1.82). -->
|
||||
<!-- Please use the version returned by './clio_server --version' as the version number -->
|
||||
|
||||
## Supporting Files
|
||||
|
||||
<!-- If you have supporting files such as a log, feel free to post a link here using Github Gist. -->
|
||||
<!-- Consider adding configuration files with private information removed via Github Gist. -->
|
||||
|
||||
7
.github/ISSUE_TEMPLATE/feature_request.md
vendored
7
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -3,21 +3,24 @@ name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: "[Title with short description] (Version: [Clio version])"
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
<!-- Please search existing issues to avoid creating duplicates. -->
|
||||
<!-- Kindly refrain from posting any credentials or sensitive information in this issue -->
|
||||
|
||||
## Summary
|
||||
|
||||
<!-- Provide a summary to the feature request -->
|
||||
|
||||
## Motivation
|
||||
|
||||
<!-- Why do we need this feature? -->
|
||||
|
||||
## Solution
|
||||
|
||||
<!-- What is the solution? -->
|
||||
|
||||
## Paths Not Taken
|
||||
|
||||
<!-- What other alternatives have been considered? -->
|
||||
|
||||
5
.github/ISSUE_TEMPLATE/question.md
vendored
5
.github/ISSUE_TEMPLATE/question.md
vendored
@@ -3,8 +3,7 @@ name: Question
|
||||
about: A question in form of an issue
|
||||
title: "[Title with short description] (Version: Clio version)"
|
||||
labels: question
|
||||
assignees: ''
|
||||
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
<!-- Please search existing issues to avoid creating duplicates. -->
|
||||
@@ -12,7 +11,9 @@ assignees: ''
|
||||
<!-- Kindly refrain from posting any credentials or sensitive information in this issue -->
|
||||
|
||||
## Question
|
||||
|
||||
<!-- Your question -->
|
||||
|
||||
## Paths Not Taken
|
||||
|
||||
<!-- If applicable, what other alternatives have been considered? -->
|
||||
|
||||
21
.github/actions/build_clio/action.yml
vendored
21
.github/actions/build_clio/action.yml
vendored
@@ -1,13 +1,15 @@
|
||||
name: Build clio
|
||||
description: Build clio in build directory
|
||||
|
||||
inputs:
|
||||
target:
|
||||
description: Build target name
|
||||
targets:
|
||||
description: Space-separated build target names
|
||||
default: all
|
||||
substract_threads:
|
||||
description: An option for the action get_number_of_threads. See get_number_of_threads
|
||||
subtract_threads:
|
||||
description: An option for the action get_number_of_threads. See get_number_of_threads
|
||||
required: true
|
||||
default: '0'
|
||||
default: "0"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
@@ -15,10 +17,13 @@ runs:
|
||||
uses: ./.github/actions/get_number_of_threads
|
||||
id: number_of_threads
|
||||
with:
|
||||
substract_threads: ${{ inputs.substract_threads }}
|
||||
subtract_threads: ${{ inputs.subtract_threads }}
|
||||
|
||||
- name: Build Clio
|
||||
- name: Build targets
|
||||
shell: bash
|
||||
run: |
|
||||
cd build
|
||||
cmake --build . --parallel ${{ steps.number_of_threads.outputs.threads_number }} --target ${{ inputs.target }}
|
||||
cmake \
|
||||
--build . \
|
||||
--parallel ${{ steps.number_of_threads.outputs.threads_number }} \
|
||||
--target ${{ inputs.targets }}
|
||||
|
||||
84
.github/actions/build_docker_image/action.yml
vendored
84
.github/actions/build_docker_image/action.yml
vendored
@@ -1,8 +1,12 @@
|
||||
name: Build and push Docker image
|
||||
description: Build and push Docker image to DockerHub and GitHub Container Registry
|
||||
|
||||
inputs:
|
||||
image_name:
|
||||
description: Name of the image to build
|
||||
images:
|
||||
description: Name of the images to use as a base name
|
||||
required: true
|
||||
dockerhub_repo:
|
||||
description: DockerHub repository name
|
||||
required: true
|
||||
push_image:
|
||||
description: Whether to push the image to the registry (true/false)
|
||||
@@ -19,48 +23,50 @@ inputs:
|
||||
description:
|
||||
description: Short description of the image
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Login to DockerHub
|
||||
if: ${{ inputs.push_image == 'true' }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ env.DOCKERHUB_USER }}
|
||||
password: ${{ env.DOCKERHUB_PW }}
|
||||
- name: Login to DockerHub
|
||||
if: ${{ inputs.push_image == 'true' }}
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: ${{ env.DOCKERHUB_USER }}
|
||||
password: ${{ env.DOCKERHUB_PW }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: ${{ inputs.push_image == 'true' }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ env.GITHUB_TOKEN }}
|
||||
- name: Login to GitHub Container Registry
|
||||
if: ${{ inputs.push_image == 'true' }}
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ env.GITHUB_TOKEN }}
|
||||
|
||||
- uses: docker/setup-qemu-action@v3
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
with:
|
||||
cache-image: false
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- uses: docker/metadata-action@v5
|
||||
id: meta
|
||||
with:
|
||||
images: ${{ inputs.image_name }}
|
||||
tags: ${{ inputs.tags }}
|
||||
- uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
id: meta
|
||||
with:
|
||||
images: ${{ inputs.images }}
|
||||
tags: ${{ inputs.tags }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ${{ inputs.directory }}
|
||||
platforms: ${{ inputs.platforms }}
|
||||
push: ${{ inputs.push_image == 'true' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
|
||||
- name: Update DockerHub description
|
||||
if: ${{ inputs.push_image == 'true' }}
|
||||
uses: peter-evans/dockerhub-description@v4
|
||||
with:
|
||||
username: ${{ env.DOCKERHUB_USER }}
|
||||
password: ${{ env.DOCKERHUB_PW }}
|
||||
repository: ${{ inputs.image_name }}
|
||||
short-description: ${{ inputs.description }}
|
||||
readme-filepath: ${{ inputs.directory }}/README.md
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: ${{ inputs.directory }}
|
||||
platforms: ${{ inputs.platforms }}
|
||||
push: ${{ inputs.push_image == 'true' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
|
||||
- name: Update DockerHub description
|
||||
if: ${{ inputs.push_image == 'true' }}
|
||||
uses: peter-evans/dockerhub-description@432a30c9e07499fd01da9f8a49f0faf9e0ca5b77 # v4.0.2
|
||||
with:
|
||||
username: ${{ env.DOCKERHUB_USER }}
|
||||
password: ${{ env.DOCKERHUB_PW }}
|
||||
repository: ${{ inputs.dockerhub_repo }}
|
||||
short-description: ${{ inputs.description }}
|
||||
readme-filepath: ${{ inputs.directory }}/README.md
|
||||
|
||||
9
.github/actions/code_coverage/action.yml
vendored
9
.github/actions/code_coverage/action.yml
vendored
@@ -1,21 +1,26 @@
|
||||
name: Generate code coverage report
|
||||
description: Run tests, generate code coverage report and upload it to codecov.io
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
|
||||
steps:
|
||||
- name: Run tests
|
||||
shell: bash
|
||||
run: |
|
||||
build/clio_tests
|
||||
|
||||
# Please keep exclude list in sync with .codecov.yml
|
||||
- name: Run gcovr
|
||||
shell: bash
|
||||
run: |
|
||||
gcovr -e tests \
|
||||
gcovr \
|
||||
-e tests \
|
||||
-e src/data/cassandra \
|
||||
-e src/data/CassandraBackend.hpp \
|
||||
-e 'src/data/BackendFactory.*' \
|
||||
--xml build/coverage_report.xml -j8 --exclude-throw-branches
|
||||
--xml build/coverage_report.xml \
|
||||
-j8 --exclude-throw-branches
|
||||
|
||||
- name: Archive coverage report
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
32
.github/actions/create_issue/action.yml
vendored
32
.github/actions/create_issue/action.yml
vendored
@@ -1,5 +1,6 @@
|
||||
name: Create an issue
|
||||
description: Create an issue
|
||||
|
||||
inputs:
|
||||
title:
|
||||
description: Issue title
|
||||
@@ -10,26 +11,31 @@ inputs:
|
||||
labels:
|
||||
description: Comma-separated list of labels
|
||||
required: true
|
||||
default: 'bug'
|
||||
default: "bug"
|
||||
assignees:
|
||||
description: Comma-separated list of assignees
|
||||
required: true
|
||||
default: 'cindyyan317,godexsoft,kuznetsss'
|
||||
default: "godexsoft,kuznetsss,PeterChen13579"
|
||||
|
||||
outputs:
|
||||
created_issue_id:
|
||||
description: Created issue id
|
||||
value: ${{ steps.create_issue.outputs.created_issue }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Create an issue
|
||||
id: create_issue
|
||||
shell: bash
|
||||
run: |
|
||||
echo -e '${{ inputs.body }}' > issue.md
|
||||
gh issue create --assignee '${{ inputs.assignees }}' --label '${{ inputs.labels }}' --title '${{ inputs.title }}' --body-file ./issue.md > create_issue.log
|
||||
created_issue=$(cat create_issue.log | sed 's|.*/||')
|
||||
echo "created_issue=$created_issue" >> $GITHUB_OUTPUT
|
||||
rm create_issue.log issue.md
|
||||
|
||||
|
||||
- name: Create an issue
|
||||
id: create_issue
|
||||
shell: bash
|
||||
run: |
|
||||
echo -e '${{ inputs.body }}' > issue.md
|
||||
gh issue create \
|
||||
--assignee '${{ inputs.assignees }}' \
|
||||
--label '${{ inputs.labels }}' \
|
||||
--title '${{ inputs.title }}' \
|
||||
--body-file ./issue.md \
|
||||
> create_issue.log
|
||||
created_issue=$(cat create_issue.log | sed 's|.*/||')
|
||||
echo "created_issue=$created_issue" >> $GITHUB_OUTPUT
|
||||
rm create_issue.log issue.md
|
||||
|
||||
46
.github/actions/generate/action.yml
vendored
46
.github/actions/generate/action.yml
vendored
@@ -1,5 +1,6 @@
|
||||
name: Run conan and cmake
|
||||
description: Run conan and cmake
|
||||
|
||||
inputs:
|
||||
conan_profile:
|
||||
description: Conan profile name
|
||||
@@ -7,27 +8,37 @@ inputs:
|
||||
conan_cache_hit:
|
||||
description: Whether conan cache has been downloaded
|
||||
required: true
|
||||
default: 'false'
|
||||
default: "false"
|
||||
build_type:
|
||||
description: Build type for third-party libraries and clio. Could be 'Release', 'Debug'
|
||||
required: true
|
||||
default: 'Release'
|
||||
default: "Release"
|
||||
build_integration_tests:
|
||||
description: Whether to build integration tests
|
||||
required: true
|
||||
default: 'true'
|
||||
default: "true"
|
||||
code_coverage:
|
||||
description: Whether conan's coverage option should be on or not
|
||||
required: true
|
||||
default: 'false'
|
||||
default: "false"
|
||||
static:
|
||||
description: Whether Clio is to be statically linked
|
||||
required: true
|
||||
default: 'false'
|
||||
default: "false"
|
||||
sanitizer:
|
||||
description: Sanitizer to use
|
||||
required: true
|
||||
default: 'false' # false, tsan, asan or ubsan
|
||||
default: "false"
|
||||
choices:
|
||||
- "false"
|
||||
- "tsan"
|
||||
- "asan"
|
||||
- "ubsan"
|
||||
time_trace:
|
||||
description: Whether to enable compiler trace reports
|
||||
required: true
|
||||
default: "false"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
@@ -42,10 +53,22 @@ runs:
|
||||
CODE_COVERAGE: "${{ inputs.code_coverage == 'true' && 'True' || 'False' }}"
|
||||
STATIC_OPTION: "${{ inputs.static == 'true' && 'True' || 'False' }}"
|
||||
INTEGRATION_TESTS_OPTION: "${{ inputs.build_integration_tests == 'true' && 'True' || 'False' }}"
|
||||
TIME_TRACE: "${{ inputs.time_trace == 'true' && 'True' || 'False' }}"
|
||||
run: |
|
||||
cd build
|
||||
conan install .. -of . -b $BUILD_OPTION -s build_type=${{ inputs.build_type }} -o clio:static="${STATIC_OPTION}" -o clio:tests=True -o clio:integration_tests="${INTEGRATION_TESTS_OPTION}" -o clio:lint=False -o clio:coverage="${CODE_COVERAGE}" --profile ${{ inputs.conan_profile }}
|
||||
|
||||
conan \
|
||||
install .. \
|
||||
-of . \
|
||||
-b $BUILD_OPTION \
|
||||
-s build_type=${{ inputs.build_type }} \
|
||||
-o clio:static="${STATIC_OPTION}" \
|
||||
-o clio:tests=True \
|
||||
-o clio:integration_tests="${INTEGRATION_TESTS_OPTION}" \
|
||||
-o clio:lint=False \
|
||||
-o clio:coverage="${CODE_COVERAGE}" \
|
||||
-o clio:time_trace="${TIME_TRACE}" \
|
||||
--profile ${{ inputs.conan_profile }}
|
||||
|
||||
- name: Run cmake
|
||||
shell: bash
|
||||
env:
|
||||
@@ -57,4 +80,9 @@ runs:
|
||||
'' }}
|
||||
run: |
|
||||
cd build
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE="${BUILD_TYPE}" ${SANITIZER_OPTION} .. -G Ninja
|
||||
cmake \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
|
||||
${SANITIZER_OPTION} \
|
||||
.. \
|
||||
-G Ninja
|
||||
|
||||
10
.github/actions/get_number_of_threads/action.yml
vendored
10
.github/actions/get_number_of_threads/action.yml
vendored
@@ -1,14 +1,16 @@
|
||||
name: Get number of threads
|
||||
description: Determines number of threads to use on macOS and Linux
|
||||
|
||||
inputs:
|
||||
substract_threads:
|
||||
description: How many threads to substract from the calculated number
|
||||
subtract_threads:
|
||||
description: How many threads to subtract from the calculated number
|
||||
required: true
|
||||
default: '0'
|
||||
default: "0"
|
||||
outputs:
|
||||
threads_number:
|
||||
description: Number of threads to use
|
||||
value: ${{ steps.number_of_threads_export.outputs.num }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
@@ -29,6 +31,6 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
num_of_threads=${{ steps.mac_threads.outputs.num || steps.linux_threads.outputs.num }}
|
||||
shift_by=${{ inputs.substract_threads }}
|
||||
shift_by=${{ inputs.subtract_threads }}
|
||||
shifted=$((num_of_threads - shift_by))
|
||||
echo "num=$(( shifted > 1 ? shifted : 1 ))" >> $GITHUB_OUTPUT
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
name: Git common ancestor
|
||||
description: Find the closest common commit
|
||||
|
||||
outputs:
|
||||
commit:
|
||||
description: Hash of commit
|
||||
value: ${{ steps.find_common_ancestor.outputs.commit }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
|
||||
37
.github/actions/prepare_runner/action.yml
vendored
37
.github/actions/prepare_runner/action.yml
vendored
@@ -1,9 +1,11 @@
|
||||
name: Prepare runner
|
||||
description: Install packages, set environment variables, create directories
|
||||
|
||||
inputs:
|
||||
disable_ccache:
|
||||
description: Whether ccache should be disabled
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
@@ -11,8 +13,37 @@ runs:
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
shell: bash
|
||||
run: |
|
||||
brew install llvm@14 pkg-config ninja bison cmake ccache jq gh conan@1 ca-certificates
|
||||
echo "/opt/homebrew/opt/conan@1/bin" >> $GITHUB_PATH
|
||||
brew install \
|
||||
bison \
|
||||
ca-certificates \
|
||||
ccache \
|
||||
clang-build-analyzer \
|
||||
conan@1 \
|
||||
gh \
|
||||
jq \
|
||||
llvm@14 \
|
||||
ninja \
|
||||
pkg-config
|
||||
echo "/opt/homebrew/opt/conan@1/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Install CMake 3.31.6 on mac
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
shell: bash
|
||||
run: |
|
||||
# Uninstall any existing cmake
|
||||
brew uninstall cmake --ignore-dependencies || true
|
||||
|
||||
# Download specific cmake formula
|
||||
FORMULA_URL="https://raw.githubusercontent.com/Homebrew/homebrew-core/b4e46db74e74a8c1650b38b1da222284ce1ec5ce/Formula/c/cmake.rb"
|
||||
FORMULA_EXPECTED_SHA256="c7ec95d86f0657638835441871e77541165e0a2581b53b3dd657cf13ad4228d4"
|
||||
|
||||
mkdir -p /tmp/homebrew-formula
|
||||
curl -s -L $FORMULA_URL -o /tmp/homebrew-formula/cmake.rb
|
||||
|
||||
echo "$FORMULA_EXPECTED_SHA256 /tmp/homebrew-formula/cmake.rb" | shasum -a 256 -c
|
||||
|
||||
# Install cmake from the specific formula with force flag
|
||||
brew install --formula --force /tmp/homebrew-formula/cmake.rb
|
||||
|
||||
- name: Fix git permissions on Linux
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
@@ -44,5 +75,3 @@ runs:
|
||||
run: |
|
||||
mkdir -p $CCACHE_DIR
|
||||
mkdir -p $CONAN_USER_HOME/.conan
|
||||
|
||||
|
||||
|
||||
4
.github/actions/restore_cache/action.yml
vendored
4
.github/actions/restore_cache/action.yml
vendored
@@ -1,5 +1,6 @@
|
||||
name: Restore cache
|
||||
description: Find and restores conan and ccache cache
|
||||
|
||||
inputs:
|
||||
conan_dir:
|
||||
description: Path to .conan directory
|
||||
@@ -17,7 +18,7 @@ inputs:
|
||||
code_coverage:
|
||||
description: Whether code coverage is on
|
||||
required: true
|
||||
default: 'false'
|
||||
default: "false"
|
||||
outputs:
|
||||
conan_hash:
|
||||
description: Hash to use as a part of conan cache key
|
||||
@@ -28,6 +29,7 @@ outputs:
|
||||
ccache_cache_hit:
|
||||
description: True if ccache cache has been downloaded
|
||||
value: ${{ steps.ccache_cache.outputs.cache-hit }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
|
||||
6
.github/actions/save_cache/action.yml
vendored
6
.github/actions/save_cache/action.yml
vendored
@@ -1,5 +1,6 @@
|
||||
name: Save cache
|
||||
description: Save conan and ccache cache for develop branch
|
||||
|
||||
inputs:
|
||||
conan_dir:
|
||||
description: Path to .conan directory
|
||||
@@ -28,7 +29,8 @@ inputs:
|
||||
code_coverage:
|
||||
description: Whether code coverage is on
|
||||
required: true
|
||||
default: 'false'
|
||||
default: "false"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
@@ -55,5 +57,3 @@ runs:
|
||||
with:
|
||||
path: ${{ inputs.ccache_dir }}
|
||||
key: clio-ccache-${{ runner.os }}-${{ inputs.build_type }}${{ inputs.code_coverage == 'true' && '-code_coverage' || '' }}-${{ inputs.conan_profile }}-develop-${{ steps.git_common_ancestor.outputs.commit }}
|
||||
|
||||
|
||||
|
||||
27
.github/actions/setup_conan/action.yml
vendored
27
.github/actions/setup_conan/action.yml
vendored
@@ -1,22 +1,19 @@
|
||||
name: Setup conan
|
||||
description: Setup conan profile and artifactory
|
||||
|
||||
inputs:
|
||||
conan_profile:
|
||||
description: Conan profile name
|
||||
required: true
|
||||
outputs:
|
||||
conan_profile:
|
||||
description: Created conan profile name
|
||||
value: ${{ steps.conan_export_output.outputs.conan_profile }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: On mac
|
||||
- name: Create conan profile on macOS
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
shell: bash
|
||||
env:
|
||||
CONAN_PROFILE: apple_clang_16
|
||||
id: conan_setup_mac
|
||||
CONAN_PROFILE: ${{ inputs.conan_profile }}
|
||||
run: |
|
||||
echo "Creating $CONAN_PROFILE conan profile"
|
||||
conan profile new $CONAN_PROFILE --detect --force
|
||||
@@ -24,20 +21,6 @@ runs:
|
||||
conan profile update settings.compiler.cppstd=20 $CONAN_PROFILE
|
||||
conan profile update env.CXXFLAGS=-DBOOST_ASIO_DISABLE_CONCEPTS $CONAN_PROFILE
|
||||
conan profile update "conf.tools.build:cxxflags+=[\"-DBOOST_ASIO_DISABLE_CONCEPTS\"]" $CONAN_PROFILE
|
||||
echo "created_conan_profile=$CONAN_PROFILE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: On linux
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
shell: bash
|
||||
id: conan_setup_linux
|
||||
run: |
|
||||
echo "created_conan_profile=${{ inputs.conan_profile }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Export output variable
|
||||
shell: bash
|
||||
id: conan_export_output
|
||||
run: |
|
||||
echo "conan_profile=${{ steps.conan_setup_mac.outputs.created_conan_profile || steps.conan_setup_linux.outputs.created_conan_profile }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Add conan-non-prod artifactory
|
||||
shell: bash
|
||||
@@ -48,5 +31,3 @@ runs:
|
||||
else
|
||||
echo "Conan-non-prod is available"
|
||||
fi
|
||||
|
||||
|
||||
|
||||
6
.github/actions/test/Dockerfile
vendored
6
.github/actions/test/Dockerfile
vendored
@@ -1,6 +0,0 @@
|
||||
FROM cassandra:4.0.4
|
||||
|
||||
RUN apt-get update && apt-get install -y postgresql
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
8
.github/actions/test/entrypoint.sh
vendored
8
.github/actions/test/entrypoint.sh
vendored
@@ -1,8 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
pg_ctlcluster 12 main start
|
||||
su postgres -c"psql -c\"alter user postgres with password 'postgres'\""
|
||||
su cassandra -c "/opt/cassandra/bin/cassandra -R"
|
||||
sleep 90
|
||||
chmod +x ./clio_tests
|
||||
./clio_tests
|
||||
161
.github/dependabot.yml
vendored
161
.github/dependabot.yml
vendored
@@ -1,16 +1,157 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
- package-ecosystem: github-actions
|
||||
directory: /
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "monday"
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: "Etc/GMT"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- "cindyyan317"
|
||||
- "godexsoft"
|
||||
- "kuznetsss"
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "[CI] "
|
||||
target-branch: "develop"
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/build_clio/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/build_docker_image/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/code_coverage/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/create_issue/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/generate/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/get_number_of_threads/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/git_common_ancestor/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/prepare_runner/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/restore_cache/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/save_cache/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: .github/actions/setup_conan/
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "04:00"
|
||||
timezone: Etc/GMT
|
||||
reviewers:
|
||||
- XRPLF/clio-dev-team
|
||||
commit-message:
|
||||
prefix: "ci: [DEPENDABOT] "
|
||||
target-branch: develop
|
||||
|
||||
158
.github/workflows/build.yml
vendored
158
.github/workflows/build.yml
vendored
@@ -1,4 +1,5 @@
|
||||
name: Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, release/*, develop]
|
||||
@@ -6,139 +7,75 @@ on:
|
||||
branches: [master, release/*, develop]
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check_format:
|
||||
name: Check format
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
steps:
|
||||
- name: Fix git permissions on Linux
|
||||
shell: bash
|
||||
run: git config --global --add safe.directory $PWD
|
||||
build-and-test:
|
||||
name: Build and Test
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- name: Run formatters
|
||||
id: run_formatters
|
||||
run: |
|
||||
./.githooks/check-format --diff
|
||||
shell: bash
|
||||
|
||||
check_docs:
|
||||
name: Check documentation
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Run linter
|
||||
id: run_linter
|
||||
run: |
|
||||
./.githooks/check-docs
|
||||
shell: bash
|
||||
|
||||
build:
|
||||
name: Build
|
||||
needs:
|
||||
- check_format
|
||||
- check_docs
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [heavy]
|
||||
conan_profile: [gcc, clang]
|
||||
build_type: [Release, Debug]
|
||||
container: ['{ "image": "ghcr.io/xrplf/clio-ci:latest" }']
|
||||
static: [true]
|
||||
|
||||
include:
|
||||
- os: heavy
|
||||
conan_profile: gcc
|
||||
build_type: Release
|
||||
container: '{ "image": "rippleci/clio_ci:latest" }'
|
||||
code_coverage: false
|
||||
static: true
|
||||
- os: heavy
|
||||
conan_profile: gcc
|
||||
build_type: Debug
|
||||
container: '{ "image": "rippleci/clio_ci:latest" }'
|
||||
code_coverage: true
|
||||
static: true
|
||||
- os: heavy
|
||||
conan_profile: clang
|
||||
build_type: Release
|
||||
container: '{ "image": "rippleci/clio_ci:latest" }'
|
||||
code_coverage: false
|
||||
static: true
|
||||
- os: heavy
|
||||
conan_profile: clang
|
||||
build_type: Debug
|
||||
container: '{ "image": "rippleci/clio_ci:latest" }'
|
||||
code_coverage: false
|
||||
static: true
|
||||
- os: macos15
|
||||
conan_profile: default_apple_clang
|
||||
build_type: Release
|
||||
code_coverage: false
|
||||
container: ""
|
||||
static: false
|
||||
uses: ./.github/workflows/build_impl.yml
|
||||
|
||||
uses: ./.github/workflows/build_and_test.yml
|
||||
with:
|
||||
runs_on: ${{ matrix.os }}
|
||||
container: ${{ matrix.container }}
|
||||
conan_profile: ${{ matrix.conan_profile }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
code_coverage: ${{ matrix.code_coverage }}
|
||||
static: ${{ matrix.static }}
|
||||
unit_tests: true
|
||||
integration_tests: true
|
||||
clio_server: true
|
||||
run_unit_tests: true
|
||||
run_integration_tests: false
|
||||
upload_clio_server: true
|
||||
|
||||
test:
|
||||
name: Run Tests
|
||||
needs: build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: heavy
|
||||
conan_profile: gcc
|
||||
build_type: Release
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
- os: heavy
|
||||
conan_profile: clang
|
||||
build_type: Release
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
- os: heavy
|
||||
conan_profile: clang
|
||||
build_type: Debug
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
- os: macos15
|
||||
conan_profile: apple_clang_16
|
||||
build_type: Release
|
||||
runs-on: ${{ matrix.os }}
|
||||
container: ${{ matrix.container }}
|
||||
code_coverage:
|
||||
name: Run Code Coverage
|
||||
|
||||
steps:
|
||||
- name: Clean workdir
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: kuznetsss/workspace-cleanup@1.0
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: clio_tests_${{ runner.os }}_${{ matrix.build_type }}_${{ matrix.conan_profile }}
|
||||
|
||||
- name: Run clio_tests
|
||||
run: |
|
||||
chmod +x ./clio_tests
|
||||
./clio_tests
|
||||
uses: ./.github/workflows/build_impl.yml
|
||||
with:
|
||||
runs_on: heavy
|
||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
||||
conan_profile: gcc
|
||||
build_type: Debug
|
||||
disable_cache: false
|
||||
code_coverage: true
|
||||
static: true
|
||||
upload_clio_server: false
|
||||
targets: all
|
||||
sanitizer: "false"
|
||||
analyze_build_time: false
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
check_config:
|
||||
name: Check Config Description
|
||||
needs: build
|
||||
needs: build-and-test
|
||||
runs-on: heavy
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
container:
|
||||
image: ghcr.io/xrplf/clio-ci:latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: clio_server_Linux_Release_gcc
|
||||
|
||||
- name: Compare Config Description
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -163,8 +100,3 @@ jobs:
|
||||
fi
|
||||
rm -f ${configDescriptionFile}
|
||||
exit 0
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
92
.github/workflows/build_and_test.yml
vendored
Normal file
92
.github/workflows/build_and_test.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
name: Reusable build and test
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs_on:
|
||||
description: Runner to run the job on
|
||||
required: true
|
||||
type: string
|
||||
|
||||
container:
|
||||
description: "The container object as a JSON string (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
conan_profile:
|
||||
description: Conan profile to use
|
||||
required: true
|
||||
type: string
|
||||
|
||||
build_type:
|
||||
description: Build type
|
||||
required: true
|
||||
type: string
|
||||
|
||||
disable_cache:
|
||||
description: Whether ccache and conan cache should be disabled
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
static:
|
||||
description: Whether to build static binaries
|
||||
required: true
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
run_unit_tests:
|
||||
description: Whether to run unit tests
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
run_integration_tests:
|
||||
description: Whether to run integration tests
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
upload_clio_server:
|
||||
description: Whether to upload clio_server
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
targets:
|
||||
description: Space-separated build target names
|
||||
required: false
|
||||
type: string
|
||||
default: all
|
||||
|
||||
sanitizer:
|
||||
description: Sanitizer to use
|
||||
required: false
|
||||
type: string
|
||||
default: "false"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
uses: ./.github/workflows/build_impl.yml
|
||||
with:
|
||||
runs_on: ${{ inputs.runs_on }}
|
||||
container: ${{ inputs.container }}
|
||||
conan_profile: ${{ inputs.conan_profile }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
disable_cache: ${{ inputs.disable_cache }}
|
||||
code_coverage: false
|
||||
static: ${{ inputs.static }}
|
||||
upload_clio_server: ${{ inputs.upload_clio_server }}
|
||||
targets: ${{ inputs.targets }}
|
||||
sanitizer: ${{ inputs.sanitizer }}
|
||||
analyze_build_time: false
|
||||
|
||||
test:
|
||||
needs: build
|
||||
uses: ./.github/workflows/test_impl.yml
|
||||
with:
|
||||
runs_on: ${{ inputs.runs_on }}
|
||||
container: ${{ inputs.container }}
|
||||
conan_profile: ${{ inputs.conan_profile }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
run_unit_tests: ${{ inputs.run_unit_tests }}
|
||||
run_integration_tests: ${{ inputs.run_integration_tests }}
|
||||
sanitizer: ${{ inputs.sanitizer }}
|
||||
@@ -1,4 +1,5 @@
|
||||
name: Build and publish Clio docker image
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
@@ -41,6 +42,7 @@ jobs:
|
||||
build_and_publish_image:
|
||||
name: Build and publish image
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
@@ -87,7 +89,10 @@ jobs:
|
||||
DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
image_name: rippleci/clio
|
||||
images: |
|
||||
rippleci/clio
|
||||
ghcr.io/xrplf/clio
|
||||
dockerhub_repo: rippleci/clio
|
||||
push_image: ${{ inputs.publish_image }}
|
||||
directory: docker/clio
|
||||
tags: ${{ inputs.tags }}
|
||||
|
||||
106
.github/workflows/build_impl.yml
vendored
106
.github/workflows/build_impl.yml
vendored
@@ -1,4 +1,5 @@
|
||||
name: Reusable build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
@@ -6,13 +7,11 @@ on:
|
||||
description: Runner to run the job on
|
||||
required: true
|
||||
type: string
|
||||
default: heavy
|
||||
|
||||
container:
|
||||
description: "The container object as a JSON string (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
default: ""
|
||||
|
||||
conan_profile:
|
||||
description: Conan profile to use
|
||||
@@ -28,60 +27,51 @@ on:
|
||||
description: Whether ccache and conan cache should be disabled
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
code_coverage:
|
||||
description: Whether to enable code coverage
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
static:
|
||||
description: Whether to build static binaries
|
||||
required: true
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
unit_tests:
|
||||
description: Whether to run unit tests
|
||||
upload_clio_server:
|
||||
description: Whether to upload clio_server
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
integration_tests:
|
||||
description: Whether to run integration tests
|
||||
targets:
|
||||
description: Space-separated build target names
|
||||
required: true
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
clio_server:
|
||||
description: Whether to build clio_server
|
||||
required: true
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
target:
|
||||
description: Build target name
|
||||
required: false
|
||||
type: string
|
||||
default: all
|
||||
|
||||
sanitizer:
|
||||
description: Sanitizer to use
|
||||
required: false
|
||||
required: true
|
||||
type: string
|
||||
default: 'false'
|
||||
|
||||
analyze_build_time:
|
||||
description: Whether to enable build time analysis
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build ${{ inputs.container != '' && 'in container' || 'natively' }}
|
||||
runs-on: ${{ inputs.runs_on }}
|
||||
runs-on: ${{ inputs.runs_on }}
|
||||
container: ${{ inputs.container != '' && fromJson(inputs.container) || null }}
|
||||
|
||||
steps:
|
||||
- name: Clean workdir
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: kuznetsss/workspace-cleanup@1.0
|
||||
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -94,7 +84,6 @@ jobs:
|
||||
|
||||
- name: Setup conan
|
||||
uses: ./.github/actions/setup_conan
|
||||
id: conan
|
||||
with:
|
||||
conan_profile: ${{ inputs.conan_profile }}
|
||||
|
||||
@@ -104,7 +93,7 @@ jobs:
|
||||
id: restore_cache
|
||||
with:
|
||||
conan_dir: ${{ env.CONAN_USER_HOME }}/.conan
|
||||
conan_profile: ${{ steps.conan.outputs.conan_profile }}
|
||||
conan_profile: ${{ inputs.conan_profile }}
|
||||
ccache_dir: ${{ env.CCACHE_DIR }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
code_coverage: ${{ inputs.code_coverage }}
|
||||
@@ -112,17 +101,33 @@ jobs:
|
||||
- name: Run conan and cmake
|
||||
uses: ./.github/actions/generate
|
||||
with:
|
||||
conan_profile: ${{ steps.conan.outputs.conan_profile }}
|
||||
conan_profile: ${{ inputs.conan_profile }}
|
||||
conan_cache_hit: ${{ !inputs.disable_cache && steps.restore_cache.outputs.conan_cache_hit }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
code_coverage: ${{ inputs.code_coverage }}
|
||||
static: ${{ inputs.static }}
|
||||
sanitizer: ${{ inputs.sanitizer }}
|
||||
time_trace: ${{ inputs.analyze_build_time }}
|
||||
|
||||
- name: Build Clio
|
||||
uses: ./.github/actions/build_clio
|
||||
with:
|
||||
target: ${{ inputs.target }}
|
||||
targets: ${{ inputs.targets }}
|
||||
|
||||
- name: Show build time analyze report
|
||||
if: ${{ inputs.analyze_build_time }}
|
||||
run: |
|
||||
ClangBuildAnalyzer --all build/ build_time_report.bin
|
||||
ClangBuildAnalyzer --analyze build_time_report.bin > build_time_report.txt
|
||||
cat build_time_report.txt
|
||||
shell: bash
|
||||
|
||||
- name: Upload build time analyze report
|
||||
if: ${{ inputs.analyze_build_time }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build_time_report_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||
path: build_time_report.txt
|
||||
|
||||
- name: Show ccache's statistics
|
||||
if: ${{ !inputs.disable_cache }}
|
||||
@@ -135,32 +140,32 @@ jobs:
|
||||
cat /tmp/ccache.stats
|
||||
|
||||
- name: Strip unit_tests
|
||||
if: ${{ inputs.unit_tests && !inputs.code_coverage && inputs.sanitizer == 'false' }}
|
||||
if: inputs.sanitizer == 'false' && !inputs.code_coverage && !inputs.analyze_build_time
|
||||
run: strip build/clio_tests
|
||||
|
||||
|
||||
- name: Strip integration_tests
|
||||
if: ${{ inputs.integration_tests && !inputs.code_coverage }}
|
||||
if: inputs.sanitizer == 'false' && !inputs.code_coverage && !inputs.analyze_build_time
|
||||
run: strip build/clio_integration_tests
|
||||
|
||||
- name: Upload clio_server
|
||||
if: ${{ inputs.clio_server }}
|
||||
if: inputs.upload_clio_server && !inputs.code_coverage && !inputs.analyze_build_time
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: clio_server_${{ runner.os }}_${{ inputs.build_type }}_${{ steps.conan.outputs.conan_profile }}
|
||||
name: clio_server_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||
path: build/clio_server
|
||||
|
||||
|
||||
- name: Upload clio_tests
|
||||
if: ${{ inputs.unit_tests && !inputs.code_coverage }}
|
||||
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ steps.conan.outputs.conan_profile }}
|
||||
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||
path: build/clio_tests
|
||||
|
||||
|
||||
- name: Upload clio_integration_tests
|
||||
if: ${{ inputs.integration_tests && !inputs.code_coverage }}
|
||||
if: ${{ !inputs.code_coverage && !inputs.analyze_build_time }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ steps.conan.outputs.conan_profile }}
|
||||
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||
path: build/clio_integration_tests
|
||||
|
||||
- name: Save cache
|
||||
@@ -175,16 +180,25 @@ jobs:
|
||||
ccache_cache_miss_rate: ${{ steps.ccache_stats.outputs.miss_rate }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
code_coverage: ${{ inputs.code_coverage }}
|
||||
conan_profile: ${{ steps.conan.outputs.conan_profile }}
|
||||
conan_profile: ${{ inputs.conan_profile }}
|
||||
|
||||
# TODO: This is not a part of build process but it is the easiest way to do it here.
|
||||
# It will be refactored in https://github.com/XRPLF/clio/issues/1075
|
||||
# This is run as part of the build job, because it requires the following:
|
||||
# - source code
|
||||
# - generated source code (Build.cpp)
|
||||
# - conan packages
|
||||
# - .gcno files in build directory
|
||||
#
|
||||
# It's all available in the build job, but not in the test job
|
||||
- name: Run code coverage
|
||||
if: ${{ inputs.code_coverage }}
|
||||
uses: ./.github/actions/code_coverage
|
||||
|
||||
# `codecov/codecov-action` will rerun `gcov` if it's available and build directory is present
|
||||
# To prevent this from happening, we run this action in a separate workflow
|
||||
#
|
||||
# More info: https://github.com/XRPLF/clio/pull/2066
|
||||
upload_coverage_report:
|
||||
if: ${{ inputs.code_coverage }}
|
||||
if: ${{ inputs.code_coverage }}
|
||||
name: Codecov
|
||||
needs: build
|
||||
uses: ./.github/workflows/upload_coverage_report.yml
|
||||
|
||||
28
.github/workflows/check_libxrpl.yml
vendored
28
.github/workflows/check_libxrpl.yml
vendored
@@ -1,19 +1,28 @@
|
||||
name: Check new libXRPL
|
||||
|
||||
on:
|
||||
repository_dispatch:
|
||||
types: [check_libxrpl]
|
||||
|
||||
concurrency:
|
||||
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CONAN_PROFILE: gcc
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Clio / `libXRPL ${{ github.event.client_payload.version }}`
|
||||
runs-on: [self-hosted, heavy]
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
image: ghcr.io/xrplf/clio-ci:latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Update libXRPL version requirement
|
||||
shell: bash
|
||||
@@ -23,18 +32,17 @@ jobs:
|
||||
- name: Prepare runner
|
||||
uses: ./.github/actions/prepare_runner
|
||||
with:
|
||||
disable_ccache: true
|
||||
disable_ccache: true
|
||||
|
||||
- name: Setup conan
|
||||
uses: ./.github/actions/setup_conan
|
||||
id: conan
|
||||
with:
|
||||
conan_profile: gcc
|
||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||
|
||||
- name: Run conan and cmake
|
||||
uses: ./.github/actions/generate
|
||||
with:
|
||||
conan_profile: ${{ steps.conan.outputs.conan_profile }}
|
||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||
conan_cache_hit: ${{ steps.restore_cache.outputs.conan_cache_hit }}
|
||||
build_type: Release
|
||||
|
||||
@@ -55,7 +63,7 @@ jobs:
|
||||
needs: build
|
||||
runs-on: [self-hosted, heavy]
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
image: ghcr.io/xrplf/clio-ci:latest
|
||||
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
@@ -72,9 +80,11 @@ jobs:
|
||||
needs: [build, run_tests]
|
||||
if: ${{ always() && contains(needs.*.result, 'failure') }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
@@ -83,8 +93,8 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
with:
|
||||
labels: 'compatibility,bug'
|
||||
title: 'Proposed libXRPL check failed'
|
||||
labels: "compatibility,bug"
|
||||
title: "Proposed libXRPL check failed"
|
||||
body: >
|
||||
Clio build or tests failed against `libXRPL ${{ github.event.client_payload.version }}`.
|
||||
|
||||
|
||||
9
.github/workflows/check_pr_title.yml
vendored
9
.github/workflows/check_pr_title.yml
vendored
@@ -1,4 +1,5 @@
|
||||
name: Check PR title
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, edited, reopened, synchronize]
|
||||
@@ -7,12 +8,10 @@ on:
|
||||
jobs:
|
||||
check_title:
|
||||
runs-on: ubuntu-latest
|
||||
# permissions:
|
||||
# pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: ytanikin/PRConventionalCommits@1.3.0
|
||||
- uses: ytanikin/pr-conventional-commits@8267db1bacc237419f9ed0228bb9d94e94271a1d # v1.4.1
|
||||
with:
|
||||
task_types: '["build","feat","fix","docs","test","ci","style","refactor","perf","chore"]'
|
||||
add_label: false
|
||||
# Turned off labelling because it leads to an error, see https://github.com/ytanikin/PRConventionalCommits/issues/19
|
||||
# custom_labels: '{"build":"build", "feat":"enhancement", "fix":"bug", "docs":"documentation", "test":"testability", "ci":"ci", "style":"refactoring", "refactor":"refactoring", "perf":"performance", "chore":"tooling"}'
|
||||
custom_labels: '{"build":"build", "feat":"enhancement", "fix":"bug", "docs":"documentation", "test":"testability", "ci":"ci", "style":"refactoring", "refactor":"refactoring", "perf":"performance", "chore":"tooling"}'
|
||||
|
||||
32
.github/workflows/clang-tidy.yml
vendored
32
.github/workflows/clang-tidy.yml
vendored
@@ -1,4 +1,5 @@
|
||||
name: Clang-tidy check
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9 * * 1-5"
|
||||
@@ -8,13 +9,21 @@ on:
|
||||
paths:
|
||||
- .clang_tidy
|
||||
- .github/workflows/clang-tidy.yml
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CONAN_PROFILE: clang
|
||||
|
||||
jobs:
|
||||
clang_tidy:
|
||||
runs-on: heavy
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
image: ghcr.io/xrplf/clio-ci:latest
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
issues: write
|
||||
@@ -32,9 +41,8 @@ jobs:
|
||||
|
||||
- name: Setup conan
|
||||
uses: ./.github/actions/setup_conan
|
||||
id: conan
|
||||
with:
|
||||
conan_profile: clang
|
||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||
|
||||
- name: Restore cache
|
||||
uses: ./.github/actions/restore_cache
|
||||
@@ -42,12 +50,12 @@ jobs:
|
||||
with:
|
||||
conan_dir: ${{ env.CONAN_USER_HOME }}/.conan
|
||||
ccache_dir: ${{ env.CCACHE_DIR }}
|
||||
conan_profile: ${{ steps.conan.outputs.conan_profile }}
|
||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||
|
||||
- name: Run conan and cmake
|
||||
uses: ./.github/actions/generate
|
||||
with:
|
||||
conan_profile: ${{ steps.conan.outputs.conan_profile }}
|
||||
conan_profile: ${{ env.CONAN_PROFILE }}
|
||||
conan_cache_hit: ${{ steps.restore_cache.outputs.conan_cache_hit }}
|
||||
build_type: Release
|
||||
|
||||
@@ -62,11 +70,11 @@ jobs:
|
||||
run: |
|
||||
run-clang-tidy-19 -p build -j ${{ steps.number_of_threads.outputs.threads_number }} -fix -quiet 1>output.txt
|
||||
|
||||
- name: Check format
|
||||
- name: Fix local includes
|
||||
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
run: ./.githooks/check-format
|
||||
run: pre-commit run --all-files fix-local-includes
|
||||
|
||||
- name: Print issues found
|
||||
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
|
||||
@@ -83,13 +91,13 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
with:
|
||||
title: 'Clang-tidy found bugs in code 🐛'
|
||||
title: "Clang-tidy found bugs in code 🐛"
|
||||
body: >
|
||||
Clang-tidy found issues in the code:
|
||||
|
||||
List of the issues found: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/
|
||||
|
||||
- uses: crazy-max/ghaction-import-gpg@v6
|
||||
- uses: crazy-max/ghaction-import-gpg@e89d40939c28e39f97cf32126055eeae86ba74ec # v6.3.0
|
||||
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.ACTIONS_GPG_PRIVATE_KEY }}
|
||||
@@ -99,7 +107,7 @@ jobs:
|
||||
|
||||
- name: Create PR with fixes
|
||||
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
env:
|
||||
GH_REPO: ${{ github.repository }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
@@ -111,7 +119,7 @@ jobs:
|
||||
delete-branch: true
|
||||
title: "style: clang-tidy auto fixes"
|
||||
body: "Fixes #${{ steps.create_issue.outputs.created_issue_id }}. Please review and commit clang-tidy fixes."
|
||||
reviewers: "cindyyan317,godexsoft,kuznetsss"
|
||||
reviewers: "godexsoft,kuznetsss,PeterChen13579"
|
||||
|
||||
- name: Fail the job
|
||||
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
name: Restart clang-tidy workflow
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [develop]
|
||||
|
||||
13
.github/workflows/docs.yml
vendored
13
.github/workflows/docs.yml
vendored
@@ -1,4 +1,5 @@
|
||||
name: Documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [develop]
|
||||
@@ -10,7 +11,8 @@ permissions:
|
||||
id-token: write
|
||||
|
||||
concurrency:
|
||||
group: "pages"
|
||||
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
@@ -21,18 +23,19 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
image: ghcr.io/xrplf/clio-ci:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
with:
|
||||
lfs: true
|
||||
|
||||
- name: Build docs
|
||||
run: |
|
||||
mkdir -p build_docs && cd build_docs
|
||||
cmake ../docs && cmake --build . --target docs
|
||||
|
||||
|
||||
- name: Setup Pages
|
||||
uses: actions/configure-pages@v5
|
||||
|
||||
@@ -41,7 +44,7 @@ jobs:
|
||||
with:
|
||||
path: build_docs/html
|
||||
name: docs-develop
|
||||
|
||||
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v4
|
||||
|
||||
189
.github/workflows/nightly.yml
vendored
189
.github/workflows/nightly.yml
vendored
@@ -1,167 +1,116 @@
|
||||
name: Nightly release
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 8 * * 1-5'
|
||||
- cron: "0 8 * * 1-5"
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/nightly.yml'
|
||||
- '.github/workflows/build_clio_docker_image.yml'
|
||||
- ".github/workflows/nightly.yml"
|
||||
- ".github/workflows/build_clio_docker_image.yml"
|
||||
|
||||
concurrency:
|
||||
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build clio
|
||||
build-and-test:
|
||||
name: Build and Test
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos15
|
||||
conan_profile: default_apple_clang
|
||||
build_type: Release
|
||||
static: false
|
||||
- os: heavy
|
||||
conan_profile: gcc
|
||||
build_type: Release
|
||||
static: true
|
||||
container: '{ "image": "rippleci/clio_ci:latest" }'
|
||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
||||
- os: heavy
|
||||
conan_profile: gcc
|
||||
build_type: Debug
|
||||
static: true
|
||||
container: '{ "image": "rippleci/clio_ci:latest" }'
|
||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
||||
|
||||
uses: ./.github/workflows/build_and_test.yml
|
||||
with:
|
||||
runs_on: ${{ matrix.os }}
|
||||
container: ${{ matrix.container }}
|
||||
conan_profile: ${{ matrix.conan_profile }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
static: ${{ matrix.static }}
|
||||
run_unit_tests: true
|
||||
run_integration_tests: ${{ matrix.os != 'macos15' }}
|
||||
upload_clio_server: true
|
||||
disable_cache: true
|
||||
|
||||
analyze_build_time:
|
||||
name: Analyze Build Time
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# TODO: Enable when we have at least ubuntu 22.04
|
||||
# as ClangBuildAnalyzer requires relatively modern glibc
|
||||
#
|
||||
# - os: heavy
|
||||
# conan_profile: clang
|
||||
# container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
||||
# static: true
|
||||
- os: macos15
|
||||
conan_profile: default_apple_clang
|
||||
container: ""
|
||||
static: false
|
||||
uses: ./.github/workflows/build_impl.yml
|
||||
with:
|
||||
runs_on: ${{ matrix.os }}
|
||||
container: ${{ matrix.container }}
|
||||
conan_profile: gcc
|
||||
build_type: ${{ matrix.build_type }}
|
||||
conan_profile: ${{ matrix.conan_profile }}
|
||||
build_type: Release
|
||||
disable_cache: true
|
||||
code_coverage: false
|
||||
static: ${{ matrix.static }}
|
||||
unit_tests: true
|
||||
integration_tests: true
|
||||
clio_server: true
|
||||
disable_cache: true
|
||||
|
||||
run_tests:
|
||||
needs: build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos15
|
||||
conan_profile: apple_clang_16
|
||||
build_type: Release
|
||||
integration_tests: false
|
||||
- os: heavy
|
||||
conan_profile: gcc
|
||||
build_type: Release
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
integration_tests: true
|
||||
- os: heavy
|
||||
conan_profile: gcc
|
||||
build_type: Debug
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
integration_tests: true
|
||||
runs-on: [self-hosted, "${{ matrix.os }}"]
|
||||
container: ${{ matrix.container }}
|
||||
|
||||
services:
|
||||
scylladb:
|
||||
image: ${{ (matrix.integration_tests) && 'scylladb/scylla' || '' }}
|
||||
options: >-
|
||||
--health-cmd "cqlsh -e 'describe cluster'"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- name: Clean workdir
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: kuznetsss/workspace-cleanup@1.0
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: clio_tests_${{ runner.os }}_${{ matrix.build_type }}_${{ matrix.conan_profile }}
|
||||
|
||||
- name: Run clio_tests
|
||||
run: |
|
||||
chmod +x ./clio_tests
|
||||
./clio_tests
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: clio_integration_tests_${{ runner.os }}_${{ matrix.build_type }}_${{ matrix.conan_profile }}
|
||||
|
||||
# To be enabled back once docker in mac runner arrives
|
||||
# https://github.com/XRPLF/clio/issues/1400
|
||||
- name: Run clio_integration_tests
|
||||
if: matrix.integration_tests
|
||||
run: |
|
||||
chmod +x ./clio_integration_tests
|
||||
./clio_integration_tests --backend_host=scylladb
|
||||
upload_clio_server: false
|
||||
targets: all
|
||||
sanitizer: "false"
|
||||
analyze_build_time: true
|
||||
|
||||
nightly_release:
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
needs: run_tests
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GH_REPO: ${{ github.repository }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: nightly_release
|
||||
pattern: clio_server_*
|
||||
|
||||
- name: Prepare files
|
||||
shell: bash
|
||||
run: |
|
||||
cp ${{ github.workspace }}/.github/workflows/nightly_notes.md "${RUNNER_TEMP}/nightly_notes.md"
|
||||
cd nightly_release
|
||||
for d in $(ls); do
|
||||
archive_name=$(ls $d)
|
||||
mv ${d}/${archive_name} ./
|
||||
rm -r $d
|
||||
sha256sum ./$archive_name > ./${archive_name}.sha256sum
|
||||
cat ./$archive_name.sha256sum >> "${RUNNER_TEMP}/nightly_notes.md"
|
||||
done
|
||||
echo '```' >> "${RUNNER_TEMP}/nightly_notes.md"
|
||||
|
||||
- name: Remove current nightly release and nightly tag
|
||||
shell: bash
|
||||
run: |
|
||||
gh release delete nightly --yes || true
|
||||
git push origin :nightly || true
|
||||
|
||||
- name: Publish nightly release
|
||||
shell: bash
|
||||
run: |
|
||||
gh release create nightly --prerelease --title "Clio development (nightly) build" \
|
||||
--target $GITHUB_SHA --notes-file "${RUNNER_TEMP}/nightly_notes.md" \
|
||||
./nightly_release/clio_server*
|
||||
needs: build-and-test
|
||||
uses: ./.github/workflows/release_impl.yml
|
||||
with:
|
||||
overwrite_release: true
|
||||
title: "Clio development (nightly) build"
|
||||
version: nightly
|
||||
notes_header_file: nightly_notes.md
|
||||
|
||||
build_and_publish_docker_image:
|
||||
uses: ./.github/workflows/build_clio_docker_image.yml
|
||||
needs: run_tests
|
||||
needs: build-and-test
|
||||
secrets: inherit
|
||||
with:
|
||||
tags: |
|
||||
type=raw,value=nightly
|
||||
type=raw,value=${{ github.sha }}
|
||||
type=raw,value=nightly
|
||||
type=raw,value=${{ github.sha }}
|
||||
artifact_name: clio_server_Linux_Release_gcc
|
||||
strip_binary: true
|
||||
publish_image: ${{ github.event_name != 'pull_request' }}
|
||||
|
||||
create_issue_on_failure:
|
||||
needs: [build, run_tests, nightly_release, build_and_publish_docker_image]
|
||||
needs: [build-and-test, nightly_release, build_and_publish_docker_image]
|
||||
if: ${{ always() && contains(needs.*.result, 'failure') && github.event_name != 'pull_request' }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
@@ -170,7 +119,7 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
with:
|
||||
title: 'Nightly release failed 🌙'
|
||||
title: "Nightly release failed 🌙"
|
||||
body: >
|
||||
Nightly release failed:
|
||||
|
||||
|
||||
5
.github/workflows/nightly_notes.md
vendored
5
.github/workflows/nightly_notes.md
vendored
@@ -1,6 +1,7 @@
|
||||
# Release notes
|
||||
|
||||
> **Note:** Please remember that this is a development release and it is not recommended for production use.
|
||||
|
||||
Changelog (including previous releases): https://github.com/XRPLF/clio/commits/nightly
|
||||
Changelog (including previous releases): <https://github.com/XRPLF/clio/commits/nightly>
|
||||
|
||||
## SHA256 checksums
|
||||
```
|
||||
|
||||
39
.github/workflows/pre-commit-autoupdate.yml
vendored
Normal file
39
.github/workflows/pre-commit-autoupdate.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
name: Pre-commit auto-update
|
||||
|
||||
on:
|
||||
# every first day of the month
|
||||
schedule:
|
||||
- cron: "0 0 1 * *"
|
||||
# on demand
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
auto-update:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.x
|
||||
|
||||
- run: pip install pre-commit
|
||||
- run: pre-commit autoupdate
|
||||
- run: pre-commit run --all-files
|
||||
|
||||
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
if: always()
|
||||
env:
|
||||
GH_REPO: ${{ github.repository }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
with:
|
||||
branch: update/pre-commit-hooks
|
||||
title: Update pre-commit hooks
|
||||
commit-message: "style: update pre-commit hooks"
|
||||
body: Update versions of pre-commit hooks to latest version.
|
||||
reviewers: "godexsoft,kuznetsss,PeterChen13579,mathbunnyru"
|
||||
28
.github/workflows/pre-commit.yml
vendored
Normal file
28
.github/workflows/pre-commit.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
name: Run pre-commit hooks
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
run-hooks:
|
||||
runs-on: heavy
|
||||
container:
|
||||
image: ghcr.io/xrplf/clio-ci:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Repo ⚡️
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Prepare runner
|
||||
uses: ./.github/actions/prepare_runner
|
||||
with:
|
||||
disable_ccache: true
|
||||
|
||||
- name: Run pre-commit ✅
|
||||
run: pre-commit run --all-files
|
||||
78
.github/workflows/release_impl.yml
vendored
Normal file
78
.github/workflows/release_impl.yml
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
name: Make release
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
overwrite_release:
|
||||
description: "Overwrite the current release and tag"
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
title:
|
||||
description: "Release title"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
version:
|
||||
description: "Release version"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
notes_header_file:
|
||||
description: "Release notes header file"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GH_REPO: ${{ github.repository }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: release_artifacts
|
||||
pattern: clio_server_*
|
||||
|
||||
- name: Prepare files
|
||||
shell: bash
|
||||
working-directory: release_artifacts
|
||||
run: |
|
||||
cp ${{ github.workspace }}/.github/workflows/${{ inputs.notes_header_file }} "${RUNNER_TEMP}/release_notes.md"
|
||||
echo '' >> "${RUNNER_TEMP}/release_notes.md"
|
||||
echo '```' >> "${RUNNER_TEMP}/release_notes.md"
|
||||
|
||||
for d in $(ls); do
|
||||
archive_name=$(ls $d)
|
||||
mv ${d}/${archive_name} ./
|
||||
rm -r $d
|
||||
sha256sum ./$archive_name > ./${archive_name}.sha256sum
|
||||
cat ./$archive_name.sha256sum >> "${RUNNER_TEMP}/release_notes.md"
|
||||
done
|
||||
|
||||
echo '```' >> "${RUNNER_TEMP}/release_notes.md"
|
||||
|
||||
- name: Remove current release and tag
|
||||
if: ${{ github.event_name != 'pull_request' && inputs.overwrite_release }}
|
||||
shell: bash
|
||||
run: |
|
||||
gh release delete ${{ inputs.version }} --yes || true
|
||||
git push origin :${{ inputs.version }} || true
|
||||
|
||||
- name: Publish release
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
shell: bash
|
||||
run: |
|
||||
gh release create ${{ inputs.version }} \
|
||||
${{ inputs.overwrite_release && '--prerelease' || '' }} \
|
||||
--title "${{ inputs.title }}" \
|
||||
--target $GITHUB_SHA \
|
||||
--notes-file "${RUNNER_TEMP}/release_notes.md" \
|
||||
./release_artifacts/clio_server*
|
||||
103
.github/workflows/sanitizers.yml
vendored
103
.github/workflows/sanitizers.yml
vendored
@@ -1,15 +1,22 @@
|
||||
name: Run tests with sanitizers
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 1-5"
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/sanitizers.yml'
|
||||
- ".github/workflows/sanitizers.yml"
|
||||
|
||||
concurrency:
|
||||
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build clio tests
|
||||
build-and-test:
|
||||
name: Build and Test
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -18,89 +25,19 @@ jobs:
|
||||
compiler: gcc
|
||||
- sanitizer: asan
|
||||
compiler: gcc
|
||||
# - sanitizer: ubsan # todo: enable when heavy runners are available
|
||||
# compiler: gcc
|
||||
uses: ./.github/workflows/build_impl.yml
|
||||
- sanitizer: ubsan
|
||||
compiler: gcc
|
||||
|
||||
uses: ./.github/workflows/build_and_test.yml
|
||||
with:
|
||||
runs_on: ubuntu-latest # todo: change to heavy
|
||||
container: '{ "image": "rippleci/clio_ci:latest" }'
|
||||
runs_on: heavy
|
||||
container: '{ "image": "ghcr.io/xrplf/clio-ci:latest" }'
|
||||
disable_cache: true
|
||||
conan_profile: ${{ matrix.compiler }}.${{ matrix.sanitizer }}
|
||||
build_type: Release
|
||||
code_coverage: false
|
||||
static: false
|
||||
unit_tests: true
|
||||
integration_tests: false
|
||||
clio_server: false
|
||||
target: clio_tests
|
||||
run_unit_tests: true
|
||||
run_integration_tests: false
|
||||
upload_clio_server: false
|
||||
targets: clio_tests clio_integration_tests
|
||||
sanitizer: ${{ matrix.sanitizer }}
|
||||
|
||||
# consider combining this with the previous matrix instead
|
||||
run_tests:
|
||||
needs: build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sanitizer: tsan
|
||||
compiler: gcc
|
||||
- sanitizer: asan
|
||||
compiler: gcc
|
||||
# - sanitizer: ubsan # todo: enable when heavy runners are available
|
||||
# compiler: gcc
|
||||
runs-on: ubuntu-latest # todo: change to heavy
|
||||
container:
|
||||
image: rippleci/clio_ci:latest
|
||||
permissions:
|
||||
contents: write
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: clio_tests_${{ runner.os }}_Release_${{ matrix.compiler }}.${{ matrix.sanitizer }}
|
||||
|
||||
- name: Run clio_tests [${{ matrix.compiler }} / ${{ matrix.sanitizer }}]
|
||||
shell: bash
|
||||
run: |
|
||||
chmod +x ./clio_tests
|
||||
./.github/scripts/execute-tests-under-sanitizer ./clio_tests
|
||||
|
||||
- name: Check for sanitizer report
|
||||
shell: bash
|
||||
id: check_report
|
||||
run: |
|
||||
if ls .sanitizer-report/* 1> /dev/null 2>&1; then
|
||||
echo "found_report=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "found_report=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Upload report
|
||||
if: ${{ steps.check_report.outputs.found_report == 'true' }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.compiler }}_${{ matrix.sanitizer }}_report
|
||||
path: .sanitizer-report/*
|
||||
include-hidden-files: true
|
||||
|
||||
#
|
||||
# todo: enable when we have fixed all currently existing issues from sanitizers
|
||||
#
|
||||
# - name: Create an issue
|
||||
# if: ${{ steps.check_report.outputs.found_report == 'true' }}
|
||||
# uses: ./.github/actions/create_issue
|
||||
# env:
|
||||
# GH_TOKEN: ${{ github.token }}
|
||||
# with:
|
||||
# labels: 'bug'
|
||||
# title: '[${{ matrix.sanitizer }}/${{ matrix.compiler }}] reported issues'
|
||||
# body: >
|
||||
# Clio tests failed one or more sanitizer checks when built with ${{ matrix.compiler }}`.
|
||||
|
||||
# Workflow: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/
|
||||
# Reports are available as artifacts.
|
||||
|
||||
138
.github/workflows/test_impl.yml
vendored
Normal file
138
.github/workflows/test_impl.yml
vendored
Normal file
@@ -0,0 +1,138 @@
|
||||
name: Reusable test
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs_on:
|
||||
description: Runner to run the job on
|
||||
required: true
|
||||
type: string
|
||||
|
||||
container:
|
||||
description: "The container object as a JSON string (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
conan_profile:
|
||||
description: Conan profile to use
|
||||
required: true
|
||||
type: string
|
||||
|
||||
build_type:
|
||||
description: Build type
|
||||
required: true
|
||||
type: string
|
||||
|
||||
run_unit_tests:
|
||||
description: Whether to run unit tests
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
run_integration_tests:
|
||||
description: Whether to run integration tests
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
sanitizer:
|
||||
description: Sanitizer to use
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
unit_tests:
|
||||
name: Unit testing ${{ inputs.container != '' && 'in container' || 'natively' }}
|
||||
runs-on: ${{ inputs.runs_on }}
|
||||
container: ${{ inputs.container != '' && fromJson(inputs.container) || null }}
|
||||
|
||||
if: inputs.run_unit_tests
|
||||
|
||||
steps:
|
||||
- name: Clean workdir
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: clio_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||
|
||||
- name: Make clio_tests executable
|
||||
shell: bash
|
||||
run: chmod +x ./clio_tests
|
||||
|
||||
- name: Run clio_tests (regular)
|
||||
if: inputs.sanitizer == 'false'
|
||||
run: ./clio_tests
|
||||
|
||||
- name: Run clio_tests (sanitizer)
|
||||
if: inputs.sanitizer != 'false'
|
||||
run: ./.github/scripts/execute-tests-under-sanitizer ./clio_tests
|
||||
|
||||
- name: Check for sanitizer report
|
||||
if: inputs.sanitizer != 'false'
|
||||
shell: bash
|
||||
id: check_report
|
||||
run: |
|
||||
if ls .sanitizer-report/* 1> /dev/null 2>&1; then
|
||||
echo "found_report=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "found_report=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Upload sanitizer report
|
||||
if: inputs.sanitizer != 'false' && steps.check_report.outputs.found_report == 'true'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.conan_profile }}_report
|
||||
path: .sanitizer-report/*
|
||||
include-hidden-files: true
|
||||
|
||||
# TODO: enable when we have fixed all currently existing issues from sanitizers
|
||||
- name: Create an issue
|
||||
if: false && inputs.sanitizer != 'false' && steps.check_report.outputs.found_report == 'true'
|
||||
uses: ./.github/actions/create_issue
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
with:
|
||||
labels: "bug"
|
||||
title: "[${{ inputs.conan_profile }}] reported issues"
|
||||
body: >
|
||||
Clio tests failed one or more sanitizer checks when built with ${{ inputs.conan_profile }}`.
|
||||
|
||||
Workflow: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/
|
||||
Reports are available as artifacts.
|
||||
|
||||
integration_tests:
|
||||
name: Integration testing ${{ inputs.container != '' && 'in container' || 'natively' }}
|
||||
runs-on: ${{ inputs.runs_on }}
|
||||
container: ${{ inputs.container != '' && fromJson(inputs.container) || null }}
|
||||
|
||||
if: inputs.run_integration_tests
|
||||
|
||||
services:
|
||||
scylladb:
|
||||
image: "scylladb/scylla"
|
||||
options: >-
|
||||
--health-cmd "cqlsh -e 'describe cluster'"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- name: Clean workdir
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: kuznetsss/workspace-cleanup@80b9863b45562c148927c3d53621ef354e5ae7ce # v1.0
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: clio_integration_tests_${{ runner.os }}_${{ inputs.build_type }}_${{ inputs.conan_profile }}
|
||||
|
||||
# To be enabled back once docker in mac runner arrives
|
||||
# https://github.com/XRPLF/clio/issues/1400
|
||||
- name: Run clio_integration_tests
|
||||
run: |
|
||||
chmod +x ./clio_integration_tests
|
||||
./clio_integration_tests --backend_host=scylladb
|
||||
24
.github/workflows/update_docker_ci.yml
vendored
24
.github/workflows/update_docker_ci.yml
vendored
@@ -1,22 +1,33 @@
|
||||
name: Update CI docker image
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'docker/ci/**'
|
||||
- 'docker/compilers/**'
|
||||
- "docker/ci/**"
|
||||
- "docker/compilers/**"
|
||||
- .github/workflows/update_docker_ci.yml
|
||||
- ".github/actions/build_docker_image/**"
|
||||
push:
|
||||
branches: [develop]
|
||||
paths:
|
||||
- 'docker/ci/**' # CI image must update when either its dockerfile changes
|
||||
- 'docker/compilers/**' # or any compilers changed and were pushed by hand
|
||||
# CI image must update when either its Dockerfile changes
|
||||
# or any compilers changed and were pushed by hand
|
||||
- "docker/ci/**"
|
||||
- "docker/compilers/**"
|
||||
- .github/workflows/update_docker_ci.yml
|
||||
- ".github/actions/build_docker_image/**"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build_and_push:
|
||||
name: Build and push docker image
|
||||
runs-on: [self-hosted, heavy]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/build_docker_image
|
||||
@@ -25,7 +36,10 @@ jobs:
|
||||
DOCKERHUB_PW: ${{ secrets.DOCKERHUB_PW }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
image_name: rippleci/clio_ci
|
||||
images: |
|
||||
rippleci/clio_ci
|
||||
ghcr.io/xrplf/clio-ci
|
||||
dockerhub_repo: rippleci/clio_ci
|
||||
push_image: ${{ github.event_name != 'pull_request' }}
|
||||
directory: docker/ci
|
||||
tags: |
|
||||
|
||||
16
.github/workflows/upload_coverage_report.yml
vendored
16
.github/workflows/upload_coverage_report.yml
vendored
@@ -1,4 +1,5 @@
|
||||
name: Upload report
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
@@ -10,6 +11,7 @@ jobs:
|
||||
upload_report:
|
||||
name: Upload report
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -23,13 +25,9 @@ jobs:
|
||||
|
||||
- name: Upload coverage report
|
||||
if: ${{ hashFiles('build/coverage_report.xml') != '' }}
|
||||
uses: wandalen/wretry.action@v3.7.3
|
||||
uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5.4.2
|
||||
with:
|
||||
action: codecov/codecov-action@v4
|
||||
with: |
|
||||
files: build/coverage_report.xml
|
||||
fail_ci_if_error: false
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
attempt_limit: 5
|
||||
attempt_delay: 10000
|
||||
files: build/coverage_report.xml
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
6
.markdownlint.yaml
Normal file
6
.markdownlint.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
# Default state for all rules
|
||||
default: true
|
||||
|
||||
# MD013/line-length - Line length
|
||||
MD013:
|
||||
line_length: 1000
|
||||
118
.pre-commit-config.yaml
Normal file
118
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,118 @@
|
||||
---
|
||||
# pre-commit is a tool to perform a predefined set of tasks manually and/or
|
||||
# automatically before git commits are made.
|
||||
#
|
||||
# Config reference: https://pre-commit.com/#pre-commit-configyaml---top-level
|
||||
#
|
||||
# Common tasks
|
||||
#
|
||||
# - Run on all files: pre-commit run --all-files
|
||||
# - Register git hooks: pre-commit install --install-hooks
|
||||
#
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
# `pre-commit sample-config` default hooks
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: check-added-large-files
|
||||
- id: check-executables-have-shebangs
|
||||
- id: check-shebang-scripts-are-executable
|
||||
- id: end-of-file-fixer
|
||||
exclude: ^docs/doxygen-awesome-theme/
|
||||
- id: trailing-whitespace
|
||||
exclude: ^docs/doxygen-awesome-theme/
|
||||
|
||||
# Autoformat: YAML, JSON, Markdown, etc.
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.5.3
|
||||
hooks:
|
||||
- id: prettier
|
||||
exclude: ^docs/doxygen-awesome-theme/
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.44.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: LICENSE.md
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.31.2
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||
rev: v19.1.7
|
||||
hooks:
|
||||
- id: clang-format
|
||||
args: [--style=file]
|
||||
types: [c++]
|
||||
|
||||
- repo: https://github.com/cheshirekow/cmake-format-precommit
|
||||
rev: v0.6.13
|
||||
hooks:
|
||||
- id: cmake-format
|
||||
additional_dependencies: [PyYAML]
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: check-no-h-files
|
||||
name: No .h files
|
||||
entry: There should be no .h files in this repository
|
||||
language: fail
|
||||
files: \.h$
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: gofmt
|
||||
name: Go Format
|
||||
entry: pre-commit-hooks/run-go-fmt.sh
|
||||
types: [go]
|
||||
language: golang
|
||||
description: "Runs `gofmt`, requires golang"
|
||||
- id: check-docs
|
||||
name: Check Doxygen Documentation
|
||||
entry: pre-commit-hooks/check-doxygen-docs.sh
|
||||
types: [text]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
- id: fix-local-includes
|
||||
name: Fix Local Includes
|
||||
entry: pre-commit-hooks/fix-local-includes.sh
|
||||
types: [c++]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
- id: verify-commits
|
||||
name: Verify Commits
|
||||
entry: pre-commit-hooks/verify-commits.sh
|
||||
types: [text]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: lfs-post-checkout
|
||||
name: LFS Post Checkout
|
||||
entry: pre-commit-hooks/lfs/post-checkout
|
||||
types: [text]
|
||||
stages: [post-checkout]
|
||||
language: system
|
||||
- id: lfs-post-commit
|
||||
name: LFS Post Commit
|
||||
entry: pre-commit-hooks/lfs/post-commit
|
||||
types: [text]
|
||||
stages: [post-commit]
|
||||
language: system
|
||||
- id: lfs-post-merge
|
||||
name: LFS Post Merge
|
||||
entry: pre-commit-hooks/lfs/post-merge
|
||||
types: [text]
|
||||
stages: [post-merge]
|
||||
language: system
|
||||
- id: lfs-pre-push
|
||||
name: LFS Pre Push
|
||||
entry: pre-commit-hooks/lfs/pre-push
|
||||
types: [text]
|
||||
stages: [pre-push]
|
||||
language: system
|
||||
@@ -17,6 +17,7 @@ option(packaging "Create distribution packages" FALSE)
|
||||
option(lint "Run clang-tidy checks during compilation" FALSE)
|
||||
option(static "Statically linked Clio" FALSE)
|
||||
option(snapshot "Build snapshot tool" FALSE)
|
||||
option(time_trace "Build using -ftime-trace to create compiler trace reports" FALSE)
|
||||
|
||||
# ========================================================================== #
|
||||
set(san "" CACHE STRING "Add sanitizer instrumentation")
|
||||
|
||||
129
CONTRIBUTING.md
129
CONTRIBUTING.md
@@ -1,39 +1,47 @@
|
||||
# Contributing
|
||||
|
||||
Thank you for your interest in contributing to the `clio` project 🙏
|
||||
|
||||
## Workflow
|
||||
|
||||
To contribute, please:
|
||||
|
||||
1. Fork the repository under your own user.
|
||||
2. Create a new branch on which to commit/push your changes.
|
||||
3. Write and test your code.
|
||||
4. Ensure that your code compiles with the provided build engine and update the provided build engine as part of your PR where needed and where appropriate.
|
||||
5. Where applicable, write test cases for your code and include those in the relevant subfolder under `tests`.
|
||||
6. Ensure your code passes automated checks (e.g. clang-format)
|
||||
6. Ensure your code passes [automated checks](#pre-commit-hooks)
|
||||
7. Squash your commits (i.e. rebase) into as few commits as is reasonable to describe your changes at a high level (typically a single commit for a small change). See below for more details.
|
||||
8. Open a PR to the main repository onto the _develop_ branch, and follow the provided template.
|
||||
|
||||
> **Note:** Please read the [Style guide](#style-guide).
|
||||
|
||||
## Install git hooks
|
||||
Please run the following command in order to use git hooks that are helpful for `clio` development.
|
||||
### `pre-commit` hooks
|
||||
|
||||
``` bash
|
||||
git config --local core.hooksPath .githooks
|
||||
To ensure code quality and style, we use [`pre-commit`](https://pre-commit.com/).
|
||||
|
||||
Run the following command to enable `pre-commit` hooks that help with Clio development:
|
||||
|
||||
```bash
|
||||
pip3 install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
## Git hooks dependencies
|
||||
The pre-commit hook requires `clang-format >= 19.0.0` and `cmake-format` to be installed on your machine.
|
||||
`clang-format` can be installed using `brew` on macOS and default package manager on Linux.
|
||||
`cmake-format` can be installed using `pip`.
|
||||
The hook will also attempt to automatically use `doxygen` to verify that everything public in the codebase is covered by doc comments. If `doxygen` is not installed, the hook will raise a warning suggesting to install `doxygen` for future commits.
|
||||
`pre-commit` takes care of running each tool in [`.pre-commit-config.yaml`](https://github.com/XRPLF/clio/blob/develop/.pre-commit-config.yaml) in a separate environment.
|
||||
|
||||
## Git commands
|
||||
This sections offers a detailed look at the git commands you will need to use to get your PR submitted.
|
||||
`pre-commit` also attempts to automatically use Doxygen to verify that everything public in the codebase has doc comments.
|
||||
If Doxygen is not installed, the hook issues a warning and recommends installing Doxygen for future commits.
|
||||
|
||||
### Git commands
|
||||
|
||||
This sections offers a detailed look at the git commands you will need to use to get your PR submitted.
|
||||
Please note that there are more than one way to do this and these commands are provided for your convenience.
|
||||
At this point it's assumed that you have already finished working on your feature/bug.
|
||||
|
||||
> **Important:** Before you issue any of the commands below, please hit the `Sync fork` button and make sure your fork's `develop` branch is up-to-date with the main `clio` repository.
|
||||
|
||||
``` bash
|
||||
```bash
|
||||
# Create a backup of your branch
|
||||
git branch <your feature branch>_bk
|
||||
|
||||
@@ -43,18 +51,20 @@ git pull origin develop
|
||||
git checkout <your feature branch>
|
||||
git rebase -i develop
|
||||
```
|
||||
|
||||
For each commit in the list other than the first one, enter `s` to squash.
|
||||
After this is done, you will have the opportunity to write a message for the squashed commit.
|
||||
|
||||
> **Hint:** Please use **imperative mood** in the commit message, and capitalize the first word.
|
||||
|
||||
``` bash
|
||||
```bash
|
||||
# You should now have a single commit on top of a commit in `develop`
|
||||
git log
|
||||
```
|
||||
|
||||
> **Note:** If there are merge conflicts, please resolve them now.
|
||||
|
||||
``` bash
|
||||
```bash
|
||||
# Use the same commit message as you did above
|
||||
git commit -m 'Your message'
|
||||
git rebase --continue
|
||||
@@ -62,27 +72,30 @@ git rebase --continue
|
||||
|
||||
> **Important:** If you have no GPG keys set up, please follow [this tutorial](https://docs.github.com/en/authentication/managing-commit-signature-verification/adding-a-gpg-key-to-your-github-account)
|
||||
|
||||
``` bash
|
||||
```bash
|
||||
# Sign the commit with your GPG key, and push your changes
|
||||
git commit --amend -S
|
||||
git push --force
|
||||
```
|
||||
|
||||
## Use ccache (optional)
|
||||
### Use ccache (optional)
|
||||
|
||||
Clio uses `ccache` to speed up compilation. If you want to use it, please make sure it is installed on your machine.
|
||||
CMake will automatically detect it and use it if it is available.
|
||||
|
||||
## Opening a pull request
|
||||
### Opening a pull request
|
||||
|
||||
When a pull request is open CI will perform checks on the new code.
|
||||
Title of the pull request and squashed commit should follow [conventional commits specification](https://www.conventionalcommits.org/en/v1.0.0/).
|
||||
|
||||
## Fixing issues found during code review
|
||||
### Fixing issues found during code review
|
||||
|
||||
While your code is in review, it's possible that some changes will be requested by reviewer(s).
|
||||
This section describes the process of adding your fixes.
|
||||
|
||||
We assume that you already made the required changes on your feature branch.
|
||||
|
||||
``` bash
|
||||
```bash
|
||||
# Add the changed code
|
||||
git add <paths to add>
|
||||
|
||||
@@ -94,62 +107,72 @@ git commit -S -m "[FOLD] Your commit message"
|
||||
git push
|
||||
```
|
||||
|
||||
## After code review
|
||||
### After code review
|
||||
|
||||
When your PR is approved and ready to merge, use `Squash and merge`.
|
||||
The button for that is near the bottom of the PR's page on GitHub.
|
||||
|
||||
> **Important:** Please leave the automatically-generated mention/link to the PR in the subject line **and** in the description field add `"Fix #ISSUE_ID"` (replacing `ISSUE_ID` with yours) if the PR fixes an issue.
|
||||
> **Note:** See [issues](https://github.com/XRPLF/clio/issues) to find the `ISSUE_ID` for the feature/bug you were working on.
|
||||
|
||||
# Style guide
|
||||
## Style guide
|
||||
|
||||
This is a non-exhaustive list of recommended style guidelines. These are not always strictly enforced and serve as a way to keep the codebase coherent.
|
||||
|
||||
## Formatting
|
||||
Code must conform to `clang-format` version 19, unless the result would be unreasonably difficult to read or maintain.
|
||||
In most cases the pre-commit hook will take care of formatting and will fix any issues automatically.
|
||||
To manually format your code, use `clang-format -i <your changed files>` for C++ files and `cmake-format -i <your changed files>` for CMake files.
|
||||
### Formatting
|
||||
|
||||
Code must conform to `clang-format`, unless the result is unreasonably difficult to read or maintain.
|
||||
In most cases the `pre-commit` hook takes care of formatting and fixes any issues automatically.
|
||||
To manually format your code, run `pre-commit run clang-format --files <your changed files>` for C++ files, and `pre-commit run cmake-format --files <your changed files>` for CMake files.
|
||||
|
||||
### Documentation
|
||||
|
||||
## Documentation
|
||||
All public namespaces, classes and functions must be covered by doc (`doxygen`) comments. Everything that is not within a nested `impl` namespace is considered public.
|
||||
|
||||
> **Note:** Keep in mind that this is enforced by Clio's CI and your build will fail if newly added public code lacks documentation.
|
||||
|
||||
## Avoid
|
||||
* Proliferation of nearly identical code.
|
||||
* Proliferation of new files and classes unless it improves readability or/and compilation time.
|
||||
* Unmanaged memory allocation and raw pointers.
|
||||
* Macros (unless they add significant value.)
|
||||
* Lambda patterns (unless these add significant value.)
|
||||
* CPU or architecture-specific code unless there is a good reason to include it, and where it is used guard it with macros and provide explanatory comments.
|
||||
* Importing new libraries unless there is a very good reason to do so.
|
||||
### Avoid
|
||||
|
||||
## Seek to
|
||||
* Extend functionality of existing code rather than creating new code.
|
||||
* Prefer readability over terseness where important logic is concerned.
|
||||
* Inline functions that are not used or are not likely to be used elsewhere in the codebase.
|
||||
* Use clear and self-explanatory names for functions, variables, structs and classes.
|
||||
* Use TitleCase for classes, structs and filenames, camelCase for function and variable names, lower case for namespaces and folders.
|
||||
* Provide as many comments as you feel that a competent programmer would need to understand what your code does.
|
||||
- Proliferation of nearly identical code.
|
||||
- Proliferation of new files and classes unless it improves readability or/and compilation time.
|
||||
- Unmanaged memory allocation and raw pointers.
|
||||
- Macros (unless they add significant value.)
|
||||
- Lambda patterns (unless these add significant value.)
|
||||
- CPU or architecture-specific code unless there is a good reason to include it, and where it is used guard it with macros and provide explanatory comments.
|
||||
- Importing new libraries unless there is a very good reason to do so.
|
||||
|
||||
### Seek to
|
||||
|
||||
- Extend functionality of existing code rather than creating new code.
|
||||
- Prefer readability over terseness where important logic is concerned.
|
||||
- Inline functions that are not used or are not likely to be used elsewhere in the codebase.
|
||||
- Use clear and self-explanatory names for functions, variables, structs and classes.
|
||||
- Use TitleCase for classes, structs and filenames, camelCase for function and variable names, lower case for namespaces and folders.
|
||||
- Provide as many comments as you feel that a competent programmer would need to understand what your code does.
|
||||
|
||||
## Maintainers
|
||||
|
||||
# Maintainers
|
||||
Maintainers are ecosystem participants with elevated access to the repository. They are able to push new code, make decisions on when a release should be made, etc.
|
||||
|
||||
## Code Review
|
||||
### Code Review
|
||||
|
||||
A PR must be reviewed and approved by at least one of the maintainers before it can be merged.
|
||||
|
||||
## Adding and Removing
|
||||
### Adding and Removing
|
||||
|
||||
New maintainers can be proposed by two existing maintainers, subject to a vote by a quorum of the existing maintainers. A minimum of 50% support and a 50% participation is required. In the event of a tie vote, the addition of the new maintainer will be rejected.
|
||||
|
||||
Existing maintainers can resign, or be subject to a vote for removal at the behest of two existing maintainers. A minimum of 60% agreement and 50% participation are required. The XRP Ledger Foundation will have the ability, for cause, to remove an existing maintainer without a vote.
|
||||
|
||||
## Existing Maintainers
|
||||
### Existing Maintainers
|
||||
|
||||
* [cindyyan317](https://github.com/cindyyan317) (Ripple)
|
||||
* [godexsoft](https://github.com/godexsoft) (Ripple)
|
||||
* [kuznetsss](https://github.com/kuznetsss) (Ripple)
|
||||
* [legleux](https://github.com/legleux) (Ripple)
|
||||
- [godexsoft](https://github.com/godexsoft) (Ripple)
|
||||
- [kuznetsss](https://github.com/kuznetsss) (Ripple)
|
||||
- [legleux](https://github.com/legleux) (Ripple)
|
||||
- [PeterChen13579](https://github.com/PeterChen13579) (Ripple)
|
||||
|
||||
## Honorable ex-Maintainers
|
||||
### Honorable ex-Maintainers
|
||||
|
||||
* [cjcobb23](https://github.com/cjcobb23) (ex-Ripple)
|
||||
* [natenichols](https://github.com/natenichols) (ex-Ripple)
|
||||
- [cindyyan317](https://github.com/cindyyan317) (ex-Ripple)
|
||||
- [cjcobb23](https://github.com/cjcobb23) (ex-Ripple)
|
||||
- [natenichols](https://github.com/natenichols) (ex-Ripple)
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
ISC License
|
||||
|
||||
Copyright (c) 2022, the clio developers
|
||||
Copyright (c) 2022, the clio developers
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# <img src='./docs/img/xrpl-logo.svg' width='40' valign="top" /> Clio
|
||||
# <img src='./docs/img/xrpl-logo.svg' width='40' valign="top" /> Clio <!-- markdownlint-disable-line MD033 MD045 -->
|
||||
|
||||
[](https://github.com/XRPLF/clio/actions/workflows/build.yml?query=branch%3Adevelop)
|
||||
[](https://github.com/XRPLF/clio/actions/workflows/nightly.yml?query=branch%3Adevelop)
|
||||
@@ -16,9 +16,9 @@ Multiple Clio nodes can share access to the same dataset, which allows for a hig
|
||||
Clio offers the full `rippled` API, with the caveat that Clio by default only returns validated data. This means that `ledger_index` defaults to `validated` instead of `current` for all requests. Other non-validated data, such as information about queued transactions, is also not returned.
|
||||
|
||||
Clio retrieves data from a designated group of `rippled` nodes instead of connecting to the peer-to-peer network.
|
||||
For requests that require access to the peer-to-peer network, such as `fee` or `submit`, Clio automatically forwards the request to a `rippled` node and propagates the response back to the client. To access non-validated data for *any* request, simply add `ledger_index: "current"` to the request, and Clio will forward the request to `rippled`.
|
||||
For requests that require access to the peer-to-peer network, such as `fee` or `submit`, Clio automatically forwards the request to a `rippled` node and propagates the response back to the client. To access non-validated data for _any_ request, simply add `ledger_index: "current"` to the request, and Clio will forward the request to `rippled`.
|
||||
|
||||
> [!NOTE]
|
||||
> [!NOTE]
|
||||
> Clio requires access to at least one `rippled` node, which can run on the same machine as Clio or separately.
|
||||
|
||||
## 📚 Learn more about Clio
|
||||
|
||||
22
_typos.toml
Normal file
22
_typos.toml
Normal file
@@ -0,0 +1,22 @@
|
||||
[default]
|
||||
# This allows to ignore account ids in tests and private keys
|
||||
# More info: https://github.com/crate-ci/typos/issues/415
|
||||
extend-ignore-re = [
|
||||
"[a-z-A-Z0-9]{33}",
|
||||
"[a-z-A-Z0-9]{34}",
|
||||
"[a-z-A-Z0-9]{64}",
|
||||
]
|
||||
|
||||
[default.extend-identifiers]
|
||||
# (S)tring
|
||||
tring = "tring"
|
||||
trings = "trings"
|
||||
|
||||
ASSERTs = "ASSERTs"
|
||||
EXCLUDEs = "EXCLUDEs"
|
||||
|
||||
ser = "ser"
|
||||
|
||||
[default.extend-words]
|
||||
strat = "strat"
|
||||
datas = "datas"
|
||||
92
cliff.toml
Normal file
92
cliff.toml
Normal file
@@ -0,0 +1,92 @@
|
||||
# git-cliff ~ default configuration file
|
||||
# https://git-cliff.org/docs/configuration
|
||||
#
|
||||
# Lines starting with "#" are comments.
|
||||
# Configuration options are organized into tables and keys.
|
||||
# See documentation for more information on available options.
|
||||
|
||||
[changelog]
|
||||
# template for the changelog header
|
||||
header = """
|
||||
# Changelog\n
|
||||
All notable changes to this project will be documented in this file.\n
|
||||
"""
|
||||
# template for the changelog body
|
||||
# https://keats.github.io/tera/docs/#introduction
|
||||
body = """
|
||||
{% if version %}\
|
||||
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||
{% else %}\
|
||||
## [unreleased]
|
||||
{% endif %}\
|
||||
{% for group, commits in commits | filter(attribute="merge_commit", value=false) | group_by(attribute="group") %}
|
||||
### {{ group | striptags | trim | upper_first }}
|
||||
{% for commit in commits %}
|
||||
- {% if commit.scope %}*({{ commit.scope }})* {% endif %}\
|
||||
{% if commit.breaking %}[**breaking**] {% endif %}\
|
||||
{{ commit.message | upper_first }} {% if commit.remote.username %}by @{{ commit.remote.username }}{% endif %}\
|
||||
{% endfor %}
|
||||
{% endfor %}\n
|
||||
"""
|
||||
# template for the changelog footer
|
||||
footer = """
|
||||
<!-- generated by git-cliff -->
|
||||
"""
|
||||
# remove the leading and trailing s
|
||||
trim = true
|
||||
# postprocessors
|
||||
postprocessors = [
|
||||
# { pattern = '<REPO>', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL
|
||||
]
|
||||
# render body even when there are no releases to process
|
||||
# render_always = true
|
||||
# output file path
|
||||
output = "CHANGELOG.md"
|
||||
|
||||
[git]
|
||||
# parse the commits based on https://www.conventionalcommits.org
|
||||
conventional_commits = true
|
||||
# filter out the commits that are not conventional
|
||||
filter_unconventional = true
|
||||
# process each line of a commit as an individual commit
|
||||
split_commits = false
|
||||
# regex for preprocessing the commit messages
|
||||
commit_preprocessors = [
|
||||
# Replace issue numbers
|
||||
#{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](<REPO>/issues/${2}))"},
|
||||
# Check spelling of the commit with https://github.com/crate-ci/typos
|
||||
# If the spelling is incorrect, it will be automatically fixed.
|
||||
#{ pattern = '.*', replace_command = 'typos --write-changes -' },
|
||||
]
|
||||
# regex for parsing and grouping commits
|
||||
commit_parsers = [
|
||||
{ message = "^feat", group = "<!-- 0 -->🚀 Features" },
|
||||
{ message = "^fix", group = "<!-- 1 -->🐛 Bug Fixes" },
|
||||
{ message = "^doc", group = "<!-- 3 -->📚 Documentation" },
|
||||
{ message = "^perf", group = "<!-- 4 -->⚡ Performance" },
|
||||
{ message = "^refactor", group = "<!-- 2 -->🚜 Refactor" },
|
||||
{ message = "^style.*[Cc]lang-tidy auto fixes", skip = true },
|
||||
{ message = "^style", group = "<!-- 5 -->🎨 Styling" },
|
||||
{ message = "^test", group = "<!-- 6 -->🧪 Testing" },
|
||||
{ message = "^chore\\(release\\): prepare for", skip = true },
|
||||
{ message = "^chore: Commits", skip = true },
|
||||
{ message = "^chore\\(deps.*\\)", skip = true },
|
||||
{ message = "^chore\\(pr\\)", skip = true },
|
||||
{ message = "^chore\\(pull\\)", skip = true },
|
||||
{ message = "^chore|^ci", group = "<!-- 7 -->⚙️ Miscellaneous Tasks" },
|
||||
{ body = ".*security", group = "<!-- 8 -->🛡️ Security" },
|
||||
{ message = "^revert", group = "<!-- 9 -->◀️ Revert" },
|
||||
{ message = ".*", group = "<!-- 10 -->💼 Other" },
|
||||
]
|
||||
# filter out the commits that are not matched by commit parsers
|
||||
filter_commits = false
|
||||
# sort the tags topologically
|
||||
topo_order = false
|
||||
# sort the commits inside sections by oldest/newest order
|
||||
sort_commits = "oldest"
|
||||
|
||||
ignore_tags = "^.*-[b|rc].*"
|
||||
|
||||
[remote.github]
|
||||
owner = "XRPLF"
|
||||
repo = "clio"
|
||||
@@ -70,4 +70,12 @@ endif ()
|
||||
# See https://github.com/cpp-best-practices/cppbestpractices/blob/master/02-Use_the_Tools_Available.md#gcc--clang for
|
||||
# the flags description
|
||||
|
||||
if (time_trace)
|
||||
if (is_clang OR is_appleclang)
|
||||
list(APPEND COMPILER_FLAGS -ftime-trace)
|
||||
else ()
|
||||
message(FATAL_ERROR "Clang or AppleClang is required to use `-ftime-trace`")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
target_compile_options(clio_options INTERFACE ${COMPILER_FLAGS})
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from conan import ConanFile
|
||||
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
|
||||
|
||||
|
||||
class Clio(ConanFile):
|
||||
name = 'clio'
|
||||
license = 'ISC'
|
||||
@@ -20,6 +21,7 @@ class Clio(ConanFile):
|
||||
'coverage': [True, False], # build for test coverage report; create custom target `clio_tests-ccov`
|
||||
'lint': [True, False], # run clang-tidy checks during compilation
|
||||
'snapshot': [True, False], # build export/import snapshot tool
|
||||
'time_trace': [True, False] # build using -ftime-trace to create compiler trace reports
|
||||
}
|
||||
|
||||
requires = [
|
||||
@@ -46,7 +48,8 @@ class Clio(ConanFile):
|
||||
'lint': False,
|
||||
'docs': False,
|
||||
'snapshot': False,
|
||||
|
||||
'time_trace': False,
|
||||
|
||||
'xrpl/*:tests': False,
|
||||
'xrpl/*:rocksdb': False,
|
||||
'cassandra-cpp-driver/*:shared': False,
|
||||
@@ -78,11 +81,12 @@ class Clio(ConanFile):
|
||||
|
||||
def layout(self):
|
||||
cmake_layout(self)
|
||||
# Fix this setting to follow the default introduced in Conan 1.48
|
||||
# Fix this setting to follow the default introduced in Conan 1.48
|
||||
# to align with our build instructions.
|
||||
self.folders.generators = 'build/generators'
|
||||
|
||||
generators = 'CMakeDeps'
|
||||
|
||||
def generate(self):
|
||||
tc = CMakeToolchain(self)
|
||||
tc.variables['verbose'] = self.options.verbose
|
||||
@@ -95,6 +99,7 @@ class Clio(ConanFile):
|
||||
tc.variables['packaging'] = self.options.packaging
|
||||
tc.variables['benchmark'] = self.options.benchmark
|
||||
tc.variables['snapshot'] = self.options.snapshot
|
||||
tc.variables['time_trace'] = self.options.time_trace
|
||||
tc.generate()
|
||||
|
||||
def build(self):
|
||||
|
||||
@@ -9,8 +9,9 @@ WORKDIR /root
|
||||
ENV CCACHE_VERSION=4.10.2 \
|
||||
LLVM_TOOLS_VERSION=19 \
|
||||
GH_VERSION=2.40.0 \
|
||||
DOXYGEN_VERSION=1.12.0
|
||||
|
||||
DOXYGEN_VERSION=1.12.0 \
|
||||
CLANG_BUILD_ANALYZER_VERSION=1.6.0
|
||||
|
||||
# Add repositories
|
||||
RUN apt-get -qq update \
|
||||
&& apt-get -qq install -y --no-install-recommends --no-install-suggests gnupg wget curl software-properties-common \
|
||||
@@ -20,9 +21,8 @@ RUN apt-get -qq update \
|
||||
# Install packages
|
||||
RUN apt update -qq \
|
||||
&& apt install -y --no-install-recommends --no-install-suggests python3 python3-pip git git-lfs make ninja-build flex bison jq graphviz \
|
||||
clang-format-${LLVM_TOOLS_VERSION} clang-tidy-${LLVM_TOOLS_VERSION} clang-tools-${LLVM_TOOLS_VERSION} \
|
||||
&& update-alternatives --install /usr/bin/clang-format clang-format /usr/bin/clang-format-${LLVM_TOOLS_VERSION} 100 \
|
||||
&& pip3 install -q --upgrade --no-cache-dir pip && pip3 install -q --no-cache-dir conan==1.62 gcovr cmake cmake-format \
|
||||
clang-tidy-${LLVM_TOOLS_VERSION} clang-tools-${LLVM_TOOLS_VERSION} \
|
||||
&& pip3 install -q --upgrade --no-cache-dir pip && pip3 install -q --no-cache-dir conan==1.62 gcovr cmake==3.31.6 pre-commit \
|
||||
&& apt-get clean && apt remove -y software-properties-common
|
||||
|
||||
# Install gcc-12 and make ldconfig aware of the new libstdc++ location (for gcc)
|
||||
@@ -62,8 +62,14 @@ RUN wget "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN
|
||||
&& cmake --build . --target install \
|
||||
&& rm -rf /tmp/* /var/tmp/*
|
||||
|
||||
# Install ClangBuildAnalyzer
|
||||
RUN wget "https://github.com/aras-p/ClangBuildAnalyzer/releases/download/v${CLANG_BUILD_ANALYZER_VERSION}/ClangBuildAnalyzer-linux" \
|
||||
&& chmod +x ClangBuildAnalyzer-linux \
|
||||
&& mv ClangBuildAnalyzer-linux /usr/bin/ClangBuildAnalyzer \
|
||||
&& rm -rf /tmp/* /var/tmp/*
|
||||
|
||||
# Install gh
|
||||
RUN wget https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz \
|
||||
RUN wget "https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz" \
|
||||
&& tar xf gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz \
|
||||
&& mv gh_${GH_VERSION}_linux_${TARGETARCH}/bin/gh /usr/bin/gh \
|
||||
&& rm -rf /tmp/* /var/tmp/*
|
||||
@@ -72,7 +78,7 @@ WORKDIR /root
|
||||
# Using root by default is not very secure but github checkout action doesn't work with any other user
|
||||
# https://github.com/actions/checkout/issues/956
|
||||
# And Github Actions doc recommends using root
|
||||
# https://docs.github.com/en/actions/creating-actions/dockerfile-support-for-github-actions#user
|
||||
# https://docs.github.com/en/actions/sharing-automations/creating-actions/dockerfile-support-for-github-actions#user
|
||||
|
||||
# Setup conan
|
||||
RUN conan remote add --insert 0 conan-non-prod http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
||||
@@ -95,7 +101,7 @@ RUN conan profile new clang --detect \
|
||||
&& conan profile update env.CC=/usr/bin/clang-16 clang \
|
||||
&& conan profile update env.CXX=/usr/bin/clang++-16 clang \
|
||||
&& conan profile update env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS" clang \
|
||||
&& conan profile update "conf.tools.build:compiler_executables={\"c\": \"/usr/bin/clang-16\", \"cpp\": \"/usr/bin/clang++-16\"}" clang
|
||||
&& conan profile update "conf.tools.build:compiler_executables={\"c\": \"/usr/bin/clang-16\", \"cpp\": \"/usr/bin/clang++-16\"}" clang
|
||||
|
||||
RUN echo "include(gcc)" >> .conan/profiles/default
|
||||
|
||||
@@ -4,6 +4,7 @@ This image contains an environment to build [Clio](https://github.com/XRPLF/clio
|
||||
It is used in [Clio Github Actions](https://github.com/XRPLF/clio/actions) but can also be used to compile Clio locally.
|
||||
|
||||
The image is based on Ubuntu 20.04 and contains:
|
||||
|
||||
- clang 16.0.6
|
||||
- gcc 12.3
|
||||
- doxygen 1.12
|
||||
@@ -13,4 +14,4 @@ The image is based on Ubuntu 20.04 and contains:
|
||||
- and some other useful tools
|
||||
|
||||
Conan is set up to build Clio without any additional steps. There are two preset conan profiles: `clang` and `gcc` to use corresponding compiler. By default conan is setup to use `gcc`.
|
||||
Sanitizer builds for `ASAN`, `TSAN` and `UBSAN` are enabled via conan profiles for each of the supported compilers. These can be selected using the following pattern (all lowercase): `[compiler].[sanitizer]` (e.g. `--profile gcc.tsan`).
|
||||
Sanitizer builds for `ASAN`, `TSAN` and `UBSAN` are enabled via conan profiles for each of the supported compilers. These can be selected using the following pattern (all lowercase): `[compiler].[sanitizer]` (e.g. `--profile gcc.tsan`).
|
||||
|
||||
@@ -12,12 +12,14 @@ Your configuration file should be mounted under the path `/opt/clio/etc/config.j
|
||||
Clio repository provides an [example](https://github.com/XRPLF/clio/blob/develop/docs/examples/config/example-config.json) of the configuration file.
|
||||
|
||||
Config file recommendations:
|
||||
|
||||
- Set `log_to_console` to `false` if you want to avoid logs being written to `stdout`.
|
||||
- Set `log_directory` to `/opt/clio/log` to store logs in a volume.
|
||||
|
||||
## Usage
|
||||
|
||||
The following command can be used to run Clio in docker (assuming server's port is `51233` in your config):
|
||||
|
||||
```bash
|
||||
docker run -d -v <path to your config.json>:/opt/clio/etc/config.json -v <path to store logs>:/opt/clio/log -p 51233:51233 rippleci/clio
|
||||
```
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM ubuntu:focal
|
||||
FROM ubuntu:focal
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TARGETARCH
|
||||
|
||||
@@ -63,7 +63,7 @@ COPY --from=build /gcc12.deb /
|
||||
|
||||
# Make gcc-12 available but also leave gcc12.deb for others to copy if needed
|
||||
RUN apt update && apt-get install -y binutils libc6-dev \
|
||||
&& dpkg -i /gcc12.deb
|
||||
&& dpkg -i /gcc12.deb
|
||||
|
||||
RUN update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 100 \
|
||||
&& update-alternatives --install /usr/bin/c++ c++ /usr/bin/g++-12 100 \
|
||||
@@ -1,6 +1,6 @@
|
||||
services:
|
||||
clio_develop:
|
||||
image: rippleci/clio_ci:latest
|
||||
image: ghcr.io/xrplf/clio-ci:latest
|
||||
volumes:
|
||||
- clio_develop_conan_data:/root/.conan/data
|
||||
- clio_develop_ccache:/root/.ccache
|
||||
|
||||
@@ -59,4 +59,3 @@ case $1 in
|
||||
esac
|
||||
|
||||
popd > /dev/null
|
||||
|
||||
|
||||
@@ -3,6 +3,8 @@ project(docs)
|
||||
|
||||
include(${CMAKE_CURRENT_SOURCE_DIR}/../cmake/ClioVersion.cmake)
|
||||
|
||||
# cmake-format: off
|
||||
# Generate `docs` target for doxygen documentation
|
||||
# Note: use `cmake --build . --target docs` from your `build` directory to generate the documentation
|
||||
# cmake-format: on
|
||||
include(${CMAKE_CURRENT_SOURCE_DIR}/../cmake/Docs.cmake)
|
||||
|
||||
@@ -34,7 +34,7 @@ FULL_SIDEBAR = NO
|
||||
HTML_HEADER = ${SOURCE}/docs/doxygen-awesome-theme/header.html
|
||||
HTML_EXTRA_STYLESHEET = ${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome.css \
|
||||
${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-sidebar-only.css \
|
||||
${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-sidebar-only-darkmode-toggle.css
|
||||
${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-sidebar-only-darkmode-toggle.css
|
||||
HTML_EXTRA_FILES = ${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-darkmode-toggle.js \
|
||||
${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-interactive-toc.js
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ Clio is built with [CMake](https://cmake.org/) and uses [Conan](https://conan.io
|
||||
- [**Optional**] [CCache](https://ccache.dev/): speeds up compilation if you are going to compile Clio often
|
||||
|
||||
| Compiler | Version |
|
||||
|-------------|---------|
|
||||
| ----------- | ------- |
|
||||
| GCC | 12.3 |
|
||||
| Clang | 16 |
|
||||
| Apple Clang | 15 |
|
||||
@@ -23,9 +23,9 @@ Clio does not require anything other than `compiler.cppstd=20` in your (`~/.cona
|
||||
> [!NOTE]
|
||||
> Although Clio is built using C++23, it's required to set `compiler.cppstd=20` for the time being as some of Clio's dependencies are not yet capable of building under C++23.
|
||||
|
||||
> Mac example:
|
||||
**Mac example**:
|
||||
|
||||
```
|
||||
```text
|
||||
[settings]
|
||||
os=Macos
|
||||
os_build=Macos
|
||||
@@ -40,9 +40,9 @@ compiler.cppstd=20
|
||||
tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]
|
||||
```
|
||||
|
||||
> Linux example:
|
||||
**Linux example**:
|
||||
|
||||
```
|
||||
```text
|
||||
[settings]
|
||||
os=Linux
|
||||
os_build=Linux
|
||||
@@ -93,6 +93,8 @@ If successful, `conan install` will find the required packages and `cmake` will
|
||||
> [!TIP]
|
||||
> To generate a Code Coverage report, include `-o coverage=True` in the `conan install` command above, along with `-o tests=True` to enable tests. After running the `cmake` commands, execute `make clio_tests-ccov`. The coverage report will be found at `clio_tests-llvm-cov/index.html`.
|
||||
|
||||
<!-- markdownlint-disable-line MD028 -->
|
||||
|
||||
> [!NOTE]
|
||||
> If you've built Clio before and the build is now failing, it's likely due to updated dependencies. Try deleting the build folder and then rerunning the Conan and CMake commands mentioned above.
|
||||
|
||||
@@ -104,10 +106,10 @@ To generate the API docs:
|
||||
|
||||
1. First, include `-o docs=True` in the conan install command. For example:
|
||||
|
||||
```sh
|
||||
mkdir build && cd build
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=False -o docs=True
|
||||
```
|
||||
```sh
|
||||
mkdir build && cd build
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=False -o docs=True
|
||||
```
|
||||
|
||||
2. Once that has completed successfully, run the `cmake` command and add the `--target docs` option:
|
||||
|
||||
@@ -118,16 +120,16 @@ To generate the API docs:
|
||||
|
||||
3. Go to `build/docs/html` to view the generated files.
|
||||
|
||||
Open the `index.html` file in your browser to see the documentation pages.
|
||||
Open the `index.html` file in your browser to see the documentation pages.
|
||||
|
||||

|
||||

|
||||
|
||||
## Building Clio with Docker
|
||||
|
||||
It is also possible to build Clio using [Docker](https://www.docker.com/) if you don't want to install all the dependencies on your machine.
|
||||
|
||||
```sh
|
||||
docker run -it rippleci/clio_ci:latest
|
||||
docker run -it ghcr.io/xrplf/clio-ci:latest
|
||||
git clone https://github.com/XRPLF/clio
|
||||
mkdir build && cd build
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=False
|
||||
@@ -148,39 +150,39 @@ Sometimes, during development, you need to build against a custom version of `li
|
||||
|
||||
1. First, pull/clone the appropriate `rippled` fork and switch to the branch you want to build. For example, the following example uses an in-development build with [XLS-33d Multi-Purpose Tokens](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0033d-multi-purpose-tokens):
|
||||
|
||||
```sh
|
||||
git clone https://github.com/shawnxie999/rippled/
|
||||
cd rippled
|
||||
git switch mpt-1.1
|
||||
```
|
||||
```sh
|
||||
git clone https://github.com/shawnxie999/rippled/
|
||||
cd rippled
|
||||
git switch mpt-1.1
|
||||
```
|
||||
|
||||
2. Export a custom package to your local Conan store using a user/channel:
|
||||
|
||||
```sh
|
||||
conan export . my/feature
|
||||
```
|
||||
```sh
|
||||
conan export . my/feature
|
||||
```
|
||||
|
||||
3. Patch your local Clio build to use the right package.
|
||||
|
||||
Edit `conanfile.py` (from the Clio repository root). Replace the `xrpl` requirement with the custom package version from the previous step. This must also include the current version number from your `rippled` branch. For example:
|
||||
Edit `conanfile.py` (from the Clio repository root). Replace the `xrpl` requirement with the custom package version from the previous step. This must also include the current version number from your `rippled` branch. For example:
|
||||
|
||||
```py
|
||||
# ... (excerpt from conanfile.py)
|
||||
requires = [
|
||||
'boost/1.82.0',
|
||||
'cassandra-cpp-driver/2.17.0',
|
||||
'fmt/10.1.1',
|
||||
'protobuf/3.21.9',
|
||||
'grpc/1.50.1',
|
||||
'openssl/1.1.1u',
|
||||
'xrpl/2.3.0-b1@my/feature', # Update this line
|
||||
'libbacktrace/cci.20210118'
|
||||
]
|
||||
```
|
||||
```py
|
||||
# ... (excerpt from conanfile.py)
|
||||
requires = [
|
||||
'boost/1.82.0',
|
||||
'cassandra-cpp-driver/2.17.0',
|
||||
'fmt/10.1.1',
|
||||
'protobuf/3.21.9',
|
||||
'grpc/1.50.1',
|
||||
'openssl/1.1.1u',
|
||||
'xrpl/2.3.0-b1@my/feature', # Update this line
|
||||
'libbacktrace/cci.20210118'
|
||||
]
|
||||
```
|
||||
|
||||
4. Build Clio as you would have before.
|
||||
|
||||
See [Building Clio](#building-clio) for details.
|
||||
See [Building Clio](#building-clio) for details.
|
||||
|
||||
## Using `clang-tidy` for static analysis
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -18,7 +18,8 @@ Clio needs access to a `rippled` server in order to work. The following configur
|
||||
|
||||
- A port to handle gRPC requests, with the IP(s) of Clio specified in the `secure_gateway` entry
|
||||
|
||||
The example configs of [rippled](https://github.com/XRPLF/rippled/blob/develop/cfg/rippled-example.cfg) and [Clio](../docs/examples/config/example-config.json) are set up in a way that minimal changes are required.
|
||||
The example configs of [rippled](https://github.com/XRPLF/rippled/blob/develop/cfg/rippled-example.cfg) and [Clio](../docs/examples/config/example-config.json) are set up in a way that minimal changes are required. However, if you want to view all configuration keys available in Clio, see [config-description.md](./config-description.md).
|
||||
|
||||
When running locally, the only change needed is to uncomment the `port_grpc` section of the `rippled` config.
|
||||
|
||||
If you're running Clio and `rippled` on separate machines, in addition to uncommenting the `port_grpc` section, a few other steps must be taken:
|
||||
@@ -52,7 +53,7 @@ Here is an example snippet from the config file:
|
||||
|
||||
## SSL
|
||||
|
||||
The parameters `ssl_cert_file` and `ssl_key_file` can also be added to the top level of precedence of our Clio config. The `ssl_cert_file` field specifies the filepath for your SSL cert, while `ssl_key_file` specifies the filepath for your SSL key. It is up to you how to change ownership of these folders for your designated Clio user.
|
||||
The parameters `ssl_cert_file` and `ssl_key_file` can also be added to the top level of precedence of our Clio config. The `ssl_cert_file` field specifies the filepath for your SSL cert, while `ssl_key_file` specifies the filepath for your SSL key. It is up to you how to change ownership of these folders for your designated Clio user.
|
||||
|
||||
Your options include:
|
||||
|
||||
|
||||
@@ -2,146 +2,144 @@
|
||||
* This is an example configuration file. Please do not use without modifying to suit your needs.
|
||||
*/
|
||||
{
|
||||
"database": {
|
||||
"type": "cassandra",
|
||||
"cassandra": {
|
||||
"contact_points": "127.0.0.1",
|
||||
"port": 9042,
|
||||
"keyspace": "clio",
|
||||
"replication_factor": 1,
|
||||
"table_prefix": "",
|
||||
"max_write_requests_outstanding": 25000,
|
||||
"max_read_requests_outstanding": 30000,
|
||||
"threads": 8,
|
||||
//
|
||||
// Advanced options. USE AT OWN RISK:
|
||||
// ---
|
||||
"core_connections_per_host": 1, // Defaults to 1
|
||||
"write_batch_size": 20 // Defaults to 20
|
||||
//
|
||||
// Below options will use defaults from cassandra driver if left unspecified.
|
||||
// See https://docs.datastax.com/en/developer/cpp-driver/2.17/api/struct.CassCluster/ for details.
|
||||
//
|
||||
// "queue_size_io": 2
|
||||
//
|
||||
// ---
|
||||
}
|
||||
},
|
||||
"allow_no_etl": false, // Allow Clio to run without valid ETL source, otherwise Clio will stop if ETL check fails
|
||||
"etl_sources": [
|
||||
{
|
||||
"ip": "127.0.0.1",
|
||||
"ws_port": "6005",
|
||||
"grpc_port": "50051"
|
||||
}
|
||||
],
|
||||
"forwarding": {
|
||||
"cache_timeout": 0.250, // in seconds, could be 0, which means no cache
|
||||
"request_timeout": 10.0 // time for Clio to wait for rippled to reply on a forwarded request (default is 10 seconds)
|
||||
},
|
||||
"rpc": {
|
||||
"cache_timeout": 0.5 // in seconds, could be 0, which means no cache for rpc
|
||||
},
|
||||
"dos_guard": {
|
||||
// Comma-separated list of IPs to exclude from rate limiting
|
||||
"whitelist": [
|
||||
"127.0.0.1"
|
||||
],
|
||||
//
|
||||
// The below values are the default values and are only specified here
|
||||
// for documentation purposes. The rate limiter currently limits
|
||||
// connections and bandwidth per IP. The rate limiter looks at the raw
|
||||
// IP of a client connection, and so requests routed through a load
|
||||
// balancer will all have the same IP and be treated as a single client.
|
||||
//
|
||||
"max_fetches": 1000000, // Max bytes per IP per sweep interval
|
||||
"max_connections": 20, // Max connections per IP
|
||||
"max_requests": 20, // Max connections per IP per sweep interval
|
||||
"sweep_interval": 1 // Time in seconds before resetting max_fetches and max_requests
|
||||
},
|
||||
"server": {
|
||||
"ip": "0.0.0.0",
|
||||
"port": 51233,
|
||||
// Max number of requests to queue up before rejecting further requests.
|
||||
// Defaults to 0, which disables the limit.
|
||||
"max_queue_size": 500,
|
||||
// If request contains header with authorization, Clio will check if it matches the prefix 'Password ' + this value's sha256 hash
|
||||
// If matches, the request will be considered as admin request
|
||||
"admin_password": "xrp",
|
||||
// If local_admin is true, Clio will consider requests come from 127.0.0.1 as admin requests
|
||||
// It's true by default unless admin_password is set,'local_admin' : true and 'admin_password' can not be set at the same time
|
||||
"local_admin": false,
|
||||
"processing_policy": "parallel", // Could be "sequent" or "parallel".
|
||||
// For sequent policy request from one client connection will be processed one by one and the next one will not be read before
|
||||
// the previous one is processed. For parallel policy Clio will take all requests and process them in parallel and
|
||||
// send a reply for each request whenever it is ready.
|
||||
"parallel_requests_limit": 10, // Optional parameter, used only if "processing_strategy" is "parallel". It limits the number of requests for one client connection processed in parallel. Infinite if not specified.
|
||||
// Max number of responses to queue up before sent successfully. If a client's waiting queue is too long, the server will close the connection.
|
||||
"ws_max_sending_queue_size": 1500,
|
||||
"__ng_web_server": false // Use ng web server. This is a temporary setting which will be deleted after switching to ng web server
|
||||
},
|
||||
// Time in seconds for graceful shutdown. Defaults to 10 seconds. Not fully implemented yet.
|
||||
"graceful_period": 10.0,
|
||||
// Overrides log level on a per logging channel.
|
||||
// Defaults to global "log_level" for each unspecified channel.
|
||||
"log_channels": [
|
||||
{
|
||||
"channel": "Backend",
|
||||
"log_level": "fatal"
|
||||
},
|
||||
{
|
||||
"channel": "WebServer",
|
||||
"log_level": "info"
|
||||
},
|
||||
{
|
||||
"channel": "Subscriptions",
|
||||
"log_level": "info"
|
||||
},
|
||||
{
|
||||
"channel": "RPC",
|
||||
"log_level": "error"
|
||||
},
|
||||
{
|
||||
"channel": "ETL",
|
||||
"log_level": "debug"
|
||||
},
|
||||
{
|
||||
"channel": "Performance",
|
||||
"log_level": "trace"
|
||||
}
|
||||
],
|
||||
"cache": {
|
||||
// Configure this to use either "num_diffs", "num_cursors_from_diff", or "num_cursors_from_account". By default, Clio uses "num_diffs".
|
||||
"num_diffs": 32, // Generate the cursors from the latest ledger diff, then use the cursors to partition the ledger to load concurrently. The cursors number is affected by the busyness of the network.
|
||||
// "num_cursors_from_diff": 3200, // Read the cursors from the diff table until we have enough cursors to partition the ledger to load concurrently.
|
||||
// "num_cursors_from_account": 3200, // Read the cursors from the account table until we have enough cursors to partition the ledger to load concurrently.
|
||||
"num_markers": 48, // The number of markers is the number of coroutines to load the cache concurrently.
|
||||
"page_fetch_size": 512, // The number of rows to load for each page.
|
||||
"load": "async" // "sync" to load cache synchronously or "async" to load cache asynchronously or "none"/"no" to turn off the cache.
|
||||
},
|
||||
"prometheus": {
|
||||
"enabled": true,
|
||||
"compress_reply": true
|
||||
},
|
||||
"log_level": "info",
|
||||
// Log format (this is the default format)
|
||||
"log_format": "%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%",
|
||||
"log_to_console": true,
|
||||
// Clio logs to file in the specified directory only if "log_directory" is set
|
||||
// "log_directory": "./clio_log",
|
||||
"log_rotation_size": 2048,
|
||||
"log_directory_max_size": 51200,
|
||||
"log_rotation_hour_interval": 12,
|
||||
"log_tag_style": "uint",
|
||||
"extractor_threads": 8,
|
||||
"read_only": false,
|
||||
// "start_sequence": [integer] the ledger index to start from,
|
||||
// "finish_sequence": [integer] the ledger index to finish at,
|
||||
// "ssl_cert_file" : "/full/path/to/cert.file",
|
||||
// "ssl_key_file" : "/full/path/to/key.file"
|
||||
"api_version": {
|
||||
"min": 1, // Minimum API version supported (could be 1 or 2)
|
||||
"max": 2, // Maximum API version supported (could be 1 or 2, but >= min)
|
||||
"default": 1 // Clio behaves the same as rippled by default
|
||||
"database": {
|
||||
"type": "cassandra",
|
||||
"cassandra": {
|
||||
"contact_points": "127.0.0.1",
|
||||
"port": 9042,
|
||||
"keyspace": "clio",
|
||||
"replication_factor": 1,
|
||||
"table_prefix": "",
|
||||
"max_write_requests_outstanding": 25000,
|
||||
"max_read_requests_outstanding": 30000,
|
||||
"threads": 8,
|
||||
//
|
||||
// Advanced options. USE AT OWN RISK:
|
||||
// ---
|
||||
"core_connections_per_host": 1, // Defaults to 1
|
||||
"write_batch_size": 20 // Defaults to 20
|
||||
//
|
||||
// Below options will use defaults from cassandra driver if left unspecified.
|
||||
// See https://docs.datastax.com/en/developer/cpp-driver/2.17/api/struct.CassCluster/ for details.
|
||||
//
|
||||
// "queue_size_io": 2
|
||||
//
|
||||
// ---
|
||||
}
|
||||
},
|
||||
"allow_no_etl": false, // Allow Clio to run without valid ETL source, otherwise Clio will stop if ETL check fails
|
||||
"etl_sources": [
|
||||
{
|
||||
"ip": "127.0.0.1",
|
||||
"ws_port": "6005",
|
||||
"grpc_port": "50051"
|
||||
}
|
||||
],
|
||||
"forwarding": {
|
||||
"cache_timeout": 0.25, // in seconds, could be 0, which means no cache
|
||||
"request_timeout": 10.0 // time for Clio to wait for rippled to reply on a forwarded request (default is 10 seconds)
|
||||
},
|
||||
"rpc": {
|
||||
"cache_timeout": 0.5 // in seconds, could be 0, which means no cache for rpc
|
||||
},
|
||||
"dos_guard": {
|
||||
// Comma-separated list of IPs to exclude from rate limiting
|
||||
"whitelist": ["127.0.0.1"],
|
||||
//
|
||||
// The below values are the default values and are only specified here
|
||||
// for documentation purposes. The rate limiter currently limits
|
||||
// connections and bandwidth per IP. The rate limiter looks at the raw
|
||||
// IP of a client connection, and so requests routed through a load
|
||||
// balancer will all have the same IP and be treated as a single client.
|
||||
//
|
||||
"max_fetches": 1000000, // Max bytes per IP per sweep interval
|
||||
"max_connections": 20, // Max connections per IP
|
||||
"max_requests": 20, // Max connections per IP per sweep interval
|
||||
"sweep_interval": 1 // Time in seconds before resetting max_fetches and max_requests
|
||||
},
|
||||
"server": {
|
||||
"ip": "0.0.0.0",
|
||||
"port": 51233,
|
||||
// Max number of requests to queue up before rejecting further requests.
|
||||
// Defaults to 0, which disables the limit.
|
||||
"max_queue_size": 500,
|
||||
// If request contains header with authorization, Clio will check if it matches the prefix 'Password ' + this value's sha256 hash
|
||||
// If matches, the request will be considered as admin request
|
||||
"admin_password": "xrp",
|
||||
// If local_admin is true, Clio will consider requests come from 127.0.0.1 as admin requests
|
||||
// It's true by default unless admin_password is set,'local_admin' : true and 'admin_password' can not be set at the same time
|
||||
"local_admin": false,
|
||||
"processing_policy": "parallel", // Could be "sequent" or "parallel".
|
||||
// For sequent policy request from one client connection will be processed one by one and the next one will not be read before
|
||||
// the previous one is processed. For parallel policy Clio will take all requests and process them in parallel and
|
||||
// send a reply for each request whenever it is ready.
|
||||
"parallel_requests_limit": 10, // Optional parameter, used only if "processing_strategy" is "parallel". It limits the number of requests for one client connection processed in parallel. Infinite if not specified.
|
||||
// Max number of responses to queue up before sent successfully. If a client's waiting queue is too long, the server will close the connection.
|
||||
"ws_max_sending_queue_size": 1500,
|
||||
"__ng_web_server": false // Use ng web server. This is a temporary setting which will be deleted after switching to ng web server
|
||||
},
|
||||
// Time in seconds for graceful shutdown. Defaults to 10 seconds. Not fully implemented yet.
|
||||
"graceful_period": 10.0,
|
||||
// Overrides log level on a per logging channel.
|
||||
// Defaults to global "log_level" for each unspecified channel.
|
||||
"log_channels": [
|
||||
{
|
||||
"channel": "Backend",
|
||||
"log_level": "fatal"
|
||||
},
|
||||
{
|
||||
"channel": "WebServer",
|
||||
"log_level": "info"
|
||||
},
|
||||
{
|
||||
"channel": "Subscriptions",
|
||||
"log_level": "info"
|
||||
},
|
||||
{
|
||||
"channel": "RPC",
|
||||
"log_level": "error"
|
||||
},
|
||||
{
|
||||
"channel": "ETL",
|
||||
"log_level": "debug"
|
||||
},
|
||||
{
|
||||
"channel": "Performance",
|
||||
"log_level": "trace"
|
||||
}
|
||||
],
|
||||
"cache": {
|
||||
// Configure this to use either "num_diffs", "num_cursors_from_diff", or "num_cursors_from_account". By default, Clio uses "num_diffs".
|
||||
"num_diffs": 32, // Generate the cursors from the latest ledger diff, then use the cursors to partition the ledger to load concurrently. The cursors number is affected by the busyness of the network.
|
||||
// "num_cursors_from_diff": 3200, // Read the cursors from the diff table until we have enough cursors to partition the ledger to load concurrently.
|
||||
// "num_cursors_from_account": 3200, // Read the cursors from the account table until we have enough cursors to partition the ledger to load concurrently.
|
||||
"num_markers": 48, // The number of markers is the number of coroutines to load the cache concurrently.
|
||||
"page_fetch_size": 512, // The number of rows to load for each page.
|
||||
"load": "async" // "sync" to load cache synchronously or "async" to load cache asynchronously or "none"/"no" to turn off the cache.
|
||||
},
|
||||
"prometheus": {
|
||||
"enabled": true,
|
||||
"compress_reply": true
|
||||
},
|
||||
"log_level": "info",
|
||||
// Log format (this is the default format)
|
||||
"log_format": "%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%",
|
||||
"log_to_console": true,
|
||||
// Clio logs to file in the specified directory only if "log_directory" is set
|
||||
// "log_directory": "./clio_log",
|
||||
"log_rotation_size": 2048,
|
||||
"log_directory_max_size": 51200,
|
||||
"log_rotation_hour_interval": 12,
|
||||
"log_tag_style": "uint",
|
||||
"extractor_threads": 8,
|
||||
"read_only": false,
|
||||
// "start_sequence": [integer] the ledger index to start from,
|
||||
// "finish_sequence": [integer] the ledger index to finish at,
|
||||
// "ssl_cert_file" : "/full/path/to/cert.file",
|
||||
// "ssl_key_file" : "/full/path/to/key.file"
|
||||
"api_version": {
|
||||
"min": 1, // Minimum API version supported (could be 1 or 2)
|
||||
"max": 2, // Maximum API version supported (could be 1 or 2, but >= min)
|
||||
"default": 1 // Clio behaves the same as rippled by default
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,8 +7,9 @@
|
||||
This directory contains an example of docker based infrastructure to collect and visualise metrics from clio.
|
||||
|
||||
The structure of the directory:
|
||||
|
||||
- `compose.yaml`
|
||||
Docker-compose file with Prometheus and Grafana set up.
|
||||
Docker-compose file with Prometheus and Grafana set up.
|
||||
- `prometheus.yaml`
|
||||
Defines metrics collection from Clio and Prometheus itself.
|
||||
Demonstrates how to setup Clio target and Clio's admin authorisation in Prometheus.
|
||||
@@ -24,6 +25,6 @@ The structure of the directory:
|
||||
1. Make sure you have `docker` and `docker-compose` installed.
|
||||
2. Run `docker-compose up -d` from this directory. It will start docker containers with Prometheus and Grafana.
|
||||
3. Open [http://localhost:3000/dashboards](http://localhost:3000/dashboards). Grafana login `admin`, password `grafana`.
|
||||
There will be preconfigured Clio dashboard.
|
||||
There will be preconfigured Clio dashboard.
|
||||
|
||||
If Clio is not running yet launch Clio to see metrics. Some of the metrics may appear only after requests to Clio.
|
||||
|
||||
@@ -6,7 +6,7 @@ services:
|
||||
volumes:
|
||||
- ./prometheus.yaml:/etc/prometheus/prometheus.yml
|
||||
command:
|
||||
- '--config.file=/etc/prometheus/prometheus.yml'
|
||||
- "--config.file=/etc/prometheus/prometheus.yml"
|
||||
grafana:
|
||||
image: grafana/grafana
|
||||
ports:
|
||||
|
||||
@@ -80,9 +80,7 @@
|
||||
"orientation": "auto",
|
||||
"percentChangeColorMode": "standard",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
],
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
@@ -161,9 +159,7 @@
|
||||
"orientation": "auto",
|
||||
"percentChangeColorMode": "standard",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
],
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
@@ -246,9 +242,7 @@
|
||||
"orientation": "auto",
|
||||
"percentChangeColorMode": "standard",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
],
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
@@ -331,9 +325,7 @@
|
||||
"orientation": "auto",
|
||||
"percentChangeColorMode": "standard",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
],
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
@@ -1406,7 +1398,7 @@
|
||||
"refId": "B"
|
||||
}
|
||||
],
|
||||
"title": "DB Opperations Error Rate",
|
||||
"title": "DB Operations Error Rate",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
apiVersion: 1
|
||||
|
||||
providers:
|
||||
- name: 'Clio dashboard'
|
||||
- name: "Clio dashboard"
|
||||
# <int> Org id. Default to 1
|
||||
orgId: 1
|
||||
# <string> name of the dashboard folder.
|
||||
folder: ''
|
||||
folder: ""
|
||||
# <string> folder UID. will be automatically generated if not specified
|
||||
folderUid: ''
|
||||
folderUid: ""
|
||||
# <string> provider type. Default to 'file'
|
||||
type: file
|
||||
# <bool> disable dashboard deletion
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
apiVersion: 1
|
||||
|
||||
datasources:
|
||||
- name: Prometheus
|
||||
type: prometheus
|
||||
url: http://prometheus:9090
|
||||
isDefault: true
|
||||
access: proxy
|
||||
- name: Prometheus
|
||||
type: prometheus
|
||||
url: http://prometheus:9090
|
||||
isDefault: true
|
||||
access: proxy
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
scrape_configs:
|
||||
- job_name: clio
|
||||
scrape_interval: 5s
|
||||
scrape_timeout: 5s
|
||||
authorization:
|
||||
type: Password
|
||||
# sha256sum from password `xrp`
|
||||
# use echo -n 'your_password' | shasum -a 256 to get hash
|
||||
credentials: 0e1dcf1ff020cceabf8f4a60a32e814b5b46ee0bb8cd4af5c814e4071bd86a18
|
||||
static_configs:
|
||||
- targets:
|
||||
- host.docker.internal:51233
|
||||
- job_name: prometheus
|
||||
honor_timestamps: true
|
||||
scrape_interval: 15s
|
||||
scrape_timeout: 10s
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost:9090
|
||||
- job_name: clio
|
||||
scrape_interval: 5s
|
||||
scrape_timeout: 5s
|
||||
authorization:
|
||||
type: Password
|
||||
# sha256sum from password `xrp`
|
||||
# use echo -n 'your_password' | shasum -a 256 to get hash
|
||||
credentials: 0e1dcf1ff020cceabf8f4a60a32e814b5b46ee0bb8cd4af5c814e4071bd86a18
|
||||
static_configs:
|
||||
- targets:
|
||||
- host.docker.internal:51233
|
||||
- job_name: prometheus
|
||||
honor_timestamps: true
|
||||
scrape_interval: 15s
|
||||
scrape_timeout: 10s
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost:9090
|
||||
|
||||
@@ -8,12 +8,12 @@ The minimum level of severity at which the log message will be outputted by defa
|
||||
|
||||
## `log_format`
|
||||
|
||||
The format of log lines produced by Clio. Defaults to `"%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%"`.
|
||||
The format of log lines produced by Clio. Defaults to `"%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%"`.
|
||||
|
||||
Each of the variables expands like so:
|
||||
|
||||
- `TimeStamp`: The full date and time of the log entry
|
||||
- `SourceLocation`: A partial path to the c++ file and the line number in said file (`source/file/path:linenumber`)
|
||||
- `SourceLocation`: A partial path to the c++ file and the line number in said file (`source/file/path:linenumber`)
|
||||
- `ThreadID`: The ID of the thread the log entry is written from
|
||||
- `Channel`: The channel that this log entry was sent to
|
||||
- `Severity`: The severity (aka log level) the entry was sent at
|
||||
@@ -30,8 +30,8 @@ Each object is of this format:
|
||||
|
||||
```json
|
||||
{
|
||||
"channel": "Backend",
|
||||
"log_level": "fatal"
|
||||
"channel": "Backend",
|
||||
"log_level": "fatal"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -3,10 +3,11 @@
|
||||
## Prerequisites
|
||||
|
||||
- Access to a Cassandra cluster or ScyllaDB cluster. Can be local or remote.
|
||||
> [!IMPORTANT]
|
||||
> There are some key considerations when using **ScyllaDB**. By default, Scylla reserves all free RAM on a machine for itself. If you are running `rippled` or other services on the same machine, restrict its memory usage using the `--memory` argument.
|
||||
>
|
||||
> See [ScyllaDB in a Shared Environment](https://docs.scylladb.com/getting-started/scylla-in-a-shared-environment/) to learn more.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> There are some key considerations when using **ScyllaDB**. By default, Scylla reserves all free RAM on a machine for itself. If you are running `rippled` or other services on the same machine, restrict its memory usage using the `--memory` argument.
|
||||
>
|
||||
> See [ScyllaDB in a Shared Environment](https://docs.scylladb.com/getting-started/scylla-in-a-shared-environment/) to learn more.
|
||||
|
||||
- Access to one or more `rippled` nodes. Can be local or remote.
|
||||
|
||||
|
||||
@@ -1,47 +1,60 @@
|
||||
# Troubleshooting Guide
|
||||
|
||||
This guide will help you troubleshoot common issues of Clio.
|
||||
|
||||
## Can't connect to DB
|
||||
|
||||
If you see the error log message `Could not connect to Cassandra: No hosts available`, this means that Clio can't connect to the database. Check the following:
|
||||
|
||||
- Make sure the database is running at the specified address and port.
|
||||
- Make sure the database is accessible from the machine where Clio is running.
|
||||
You can use [cqlsh](https://pypi.org/project/cqlsh/) to check the connection to the database.
|
||||
You can use [cqlsh](https://pypi.org/project/cqlsh/) to check the connection to the database.
|
||||
|
||||
If you would like to run a local ScyllaDB, you can call:
|
||||
|
||||
```sh
|
||||
docker run --rm -p 9042:9042 --name clio-scylla -d scylladb/scylla
|
||||
docker run --rm -p 9042:9042 --name clio-scylla -d scylladb/scylla
|
||||
```
|
||||
|
||||
## Check the server status of Clio
|
||||
|
||||
To check if Clio is syncing with rippled:
|
||||
|
||||
```sh
|
||||
curl -v -d '{"method":"server_info", "params":[{}]}' 127.0.0.1:51233|python3 -m json.tool|grep seq
|
||||
```
|
||||
|
||||
If Clio is syncing with rippled, the `seq` value will be increasing.
|
||||
|
||||
## Clio fails to start
|
||||
|
||||
If you see the error log message `Failed to fetch ETL state from...`, this means the configured rippled node is not reachable. Check the following:
|
||||
|
||||
- Make sure the rippled node is running at the specified address and port.
|
||||
- Make sure the rippled node is accessible from the machine where Clio is running.
|
||||
|
||||
If you would like to run Clio without an avaliable rippled node, you can add below setting to Clio's configuration file:
|
||||
```
|
||||
If you would like to run Clio without an available rippled node, you can add below setting to Clio's configuration file:
|
||||
|
||||
```text
|
||||
"allow_no_etl": true
|
||||
```
|
||||
|
||||
## Clio is not added to secure_gateway in rippled's config
|
||||
|
||||
If you see the warning message `AsyncCallData is_unlimited is false.`, this means that Clio is not added to the `secure_gateway` of `port_grpc` session in the rippled configuration file. It will slow down the sync process. Please add Clio's IP to the `secure_gateway` in the rippled configuration file for both grpc and ws port.
|
||||
|
||||
## Clio is slow
|
||||
|
||||
To speed up the response time, Clio has a cache inside. However, cache can take time to warm up. If you see slow response time, you can firstly check if cache is still loading.
|
||||
You can check the cache status by calling:
|
||||
|
||||
```sh
|
||||
curl -v -d '{"method":"server_info", "params":[{}]}' 127.0.0.1:51233|python3 -m json.tool|grep is_full
|
||||
curl -v -d '{"method":"server_info", "params":[{}]}' 127.0.0.1:51233|python3 -m json.tool|grep is_enabled
|
||||
```
|
||||
If `is_full` is false, it means the cache is still loading. Normally, the Clio can respond quicker if cache finishs loading. If `is_enabled` is false, it means the cache is disabled in the configuration file or there is data corruption in the database.
|
||||
|
||||
If `is_full` is false, it means the cache is still loading. Normally, the Clio can respond quicker if cache finishes loading. If `is_enabled` is false, it means the cache is disabled in the configuration file or there is data corruption in the database.
|
||||
|
||||
## Receive error message `Too many requests`
|
||||
|
||||
If client sees the error message `Too many requests`, this means that the client is blocked by Clio's DosGuard protection. You may want to add the client's IP to the whitelist in the configuration file, Or update other your DosGuard settings.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
# Note: This script is intended to be run from the root of the repository.
|
||||
#
|
||||
# Not really a hook but should be used to check the completness of documentation for added code, otherwise CI will come for you.
|
||||
# Not really a hook but should be used to check the completeness of documentation for added code, otherwise CI will come for you.
|
||||
# It's good to have /tmp as the output so that consecutive runs are fast but no clutter in the repository.
|
||||
|
||||
echo "+ Checking documentation..."
|
||||
@@ -15,12 +15,18 @@ DOCDIR=${TMPDIR}/out
|
||||
|
||||
# Check doxygen is at all installed
|
||||
if [ -z "$DOXYGEN" ]; then
|
||||
if [[ "${CI}" == "true" ]]; then
|
||||
# If we are in CI, we should fail the check
|
||||
echo "doxygen not found in CI, please install it"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# No hard error if doxygen is not installed yet
|
||||
cat <<EOF
|
||||
|
||||
WARNING
|
||||
-----------------------------------------------------------------------------
|
||||
'doxygen' is required to check documentation.
|
||||
'doxygen' is required to check documentation.
|
||||
Please install it for next time.
|
||||
|
||||
Your changes may fail to pass CI once pushed.
|
||||
32
pre-commit-hooks/fix-local-includes.sh
Executable file
32
pre-commit-hooks/fix-local-includes.sh
Executable file
@@ -0,0 +1,32 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Note: This script is intended to be run from the root of the repository.
|
||||
#
|
||||
# This script checks will fix local includes in the C++ code.
|
||||
|
||||
# paths to fix include statements
|
||||
sources="src tests"
|
||||
|
||||
echo "+ Fixing local includes..."
|
||||
|
||||
function grep_code {
|
||||
grep -l "${1}" ${sources} -r --include \*.hpp --include \*.cpp
|
||||
}
|
||||
|
||||
GNU_SED=$(sed --version 2>&1 | grep -q 'GNU' && echo true || echo false)
|
||||
|
||||
if [[ "$GNU_SED" == "false" ]]; then # macOS sed
|
||||
# make all includes to be <...> style
|
||||
grep_code '#include ".*"' | xargs sed -i '' -E 's|#include "(.*)"|#include <\1>|g'
|
||||
|
||||
# make local includes to be "..." style
|
||||
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | tr '\n' '|' | sed 's/|$//' | sed 's/|/\\|/g')
|
||||
grep_code "#include <\($main_src_dirs\)/.*>" | xargs sed -i '' -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g"
|
||||
else
|
||||
# make all includes to be <...> style
|
||||
grep_code '#include ".*"' | xargs sed -i -E 's|#include "(.*)"|#include <\1>|g'
|
||||
|
||||
# make local includes to be "..." style
|
||||
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | paste -sd '|' | sed 's/|/\\|/g')
|
||||
grep_code "#include <\($main_src_dirs\)/.*>" | xargs sed -i -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g"
|
||||
fi
|
||||
3
pre-commit-hooks/lfs/pre-push
Executable file
3
pre-commit-hooks/lfs/pre-push
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/sh
|
||||
command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'pre-push' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; }
|
||||
git lfs pre-push "$@"
|
||||
14
pre-commit-hooks/run-go-fmt.sh
Executable file
14
pre-commit-hooks/run-go-fmt.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Capture and print stdout, since gofmt doesn't use proper exit codes
|
||||
#
|
||||
set -e -o pipefail
|
||||
|
||||
if ! command -v gofmt &> /dev/null ; then
|
||||
echo "gofmt not installed or available in the PATH" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
output="$(gofmt -l -w "$@")"
|
||||
echo "$output"
|
||||
[[ -z "$output" ]]
|
||||
@@ -52,7 +52,3 @@ while read local_ref local_oid remote_ref remote_oid; do
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'pre-push' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; }
|
||||
|
||||
git lfs pre-push "$@"
|
||||
@@ -1,5 +1,6 @@
|
||||
add_subdirectory(util)
|
||||
add_subdirectory(data)
|
||||
add_subdirectory(cluster)
|
||||
add_subdirectory(etl)
|
||||
add_subdirectory(etlng)
|
||||
add_subdirectory(feed)
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
add_library(clio_app)
|
||||
target_sources(clio_app PRIVATE CliArgs.cpp ClioApplication.cpp Stopper.cpp WebHandlers.cpp)
|
||||
|
||||
target_link_libraries(clio_app PUBLIC clio_etl clio_etlng clio_feed clio_web clio_rpc clio_migration)
|
||||
target_link_libraries(
|
||||
clio_app
|
||||
PUBLIC clio_cluster
|
||||
clio_etl
|
||||
clio_etlng
|
||||
clio_feed
|
||||
clio_web
|
||||
clio_rpc
|
||||
clio_migration
|
||||
)
|
||||
|
||||
@@ -21,12 +21,15 @@
|
||||
|
||||
#include "app/Stopper.hpp"
|
||||
#include "app/WebHandlers.hpp"
|
||||
#include "cluster/ClusterCommunicationService.hpp"
|
||||
#include "data/AmendmentCenter.hpp"
|
||||
#include "data/BackendFactory.hpp"
|
||||
#include "data/LedgerCache.hpp"
|
||||
#include "etl/ETLService.hpp"
|
||||
#include "etl/LoadBalancer.hpp"
|
||||
#include "etl/NetworkValidatedLedgers.hpp"
|
||||
#include "etlng/LoadBalancer.hpp"
|
||||
#include "etlng/LoadBalancerInterface.hpp"
|
||||
#include "feed/SubscriptionManager.hpp"
|
||||
#include "migration/MigrationInspectorFactory.hpp"
|
||||
#include "rpc/Counters.hpp"
|
||||
@@ -108,6 +111,9 @@ ClioApplication::run(bool const useNgWebServer)
|
||||
// Interface to the database
|
||||
auto backend = data::makeBackend(config_, cache);
|
||||
|
||||
cluster::ClusterCommunicationService clusterCommunicationService{backend};
|
||||
clusterCommunicationService.run();
|
||||
|
||||
auto const amendmentCenter = std::make_shared<data::AmendmentCenter const>(backend);
|
||||
|
||||
{
|
||||
@@ -130,7 +136,12 @@ ClioApplication::run(bool const useNgWebServer)
|
||||
// ETL uses the balancer to extract data.
|
||||
// The server uses the balancer to forward RPCs to a rippled node.
|
||||
// The balancer itself publishes to streams (transactions_proposed and accounts_proposed)
|
||||
auto balancer = etl::LoadBalancer::makeLoadBalancer(config_, ioc, backend, subscriptions, ledgers);
|
||||
auto balancer = [&] -> std::shared_ptr<etlng::LoadBalancerInterface> {
|
||||
if (config_.get<bool>("__ng_etl"))
|
||||
return etlng::LoadBalancer::makeLoadBalancer(config_, ioc, backend, subscriptions, ledgers);
|
||||
|
||||
return etl::LoadBalancer::makeLoadBalancer(config_, ioc, backend, subscriptions, ledgers);
|
||||
}();
|
||||
|
||||
// ETL is responsible for writing and publishing to streams. In read-only mode, ETL only publishes
|
||||
auto etl = etl::ETLService::makeETLService(config_, ioc, backend, subscriptions, balancer, ledgers);
|
||||
@@ -142,12 +153,12 @@ ClioApplication::run(bool const useNgWebServer)
|
||||
config_, backend, subscriptions, balancer, etl, amendmentCenter, counters
|
||||
);
|
||||
|
||||
using RPCEngineType = rpc::RPCEngine<etl::LoadBalancer, rpc::Counters>;
|
||||
using RPCEngineType = rpc::RPCEngine<rpc::Counters>;
|
||||
auto const rpcEngine =
|
||||
RPCEngineType::makeRPCEngine(config_, backend, balancer, dosGuard, workQueue, counters, handlerProvider);
|
||||
|
||||
if (useNgWebServer or config_.get<bool>("server.__ng_web_server")) {
|
||||
web::ng::RPCServerHandler<RPCEngineType, etl::ETLService> handler{config_, backend, rpcEngine, etl};
|
||||
web::ng::RPCServerHandler<RPCEngineType> handler{config_, backend, rpcEngine, etl};
|
||||
|
||||
auto expectedAdminVerifier = web::makeAdminVerificationStrategy(config_);
|
||||
if (not expectedAdminVerifier.has_value()) {
|
||||
@@ -188,8 +199,7 @@ ClioApplication::run(bool const useNgWebServer)
|
||||
}
|
||||
|
||||
// Init the web server
|
||||
auto handler =
|
||||
std::make_shared<web::RPCServerHandler<RPCEngineType, etl::ETLService>>(config_, backend, rpcEngine, etl);
|
||||
auto handler = std::make_shared<web::RPCServerHandler<RPCEngineType>>(config_, backend, rpcEngine, etl);
|
||||
|
||||
auto const httpServer = web::makeHttpServer(config_, ioc, dosGuard, handler);
|
||||
|
||||
|
||||
@@ -20,8 +20,8 @@
|
||||
#pragma once
|
||||
|
||||
#include "data/BackendInterface.hpp"
|
||||
#include "etl/ETLService.hpp"
|
||||
#include "etl/LoadBalancer.hpp"
|
||||
#include "etlng/ETLServiceInterface.hpp"
|
||||
#include "etlng/LoadBalancerInterface.hpp"
|
||||
#include "feed/SubscriptionManagerInterface.hpp"
|
||||
#include "util/CoroutineGroup.hpp"
|
||||
#include "util/log/Logger.hpp"
|
||||
@@ -74,15 +74,12 @@ public:
|
||||
* @param ioc The io_context to stop.
|
||||
* @return The callback to be called on application stop.
|
||||
*/
|
||||
template <
|
||||
web::ng::SomeServer ServerType,
|
||||
etl::SomeLoadBalancer LoadBalancerType,
|
||||
etl::SomeETLService ETLServiceType>
|
||||
template <web::ng::SomeServer ServerType>
|
||||
static std::function<void(boost::asio::yield_context)>
|
||||
makeOnStopCallback(
|
||||
ServerType& server,
|
||||
LoadBalancerType& balancer,
|
||||
ETLServiceType& etl,
|
||||
etlng::LoadBalancerInterface& balancer,
|
||||
etlng::ETLServiceInterface& etl,
|
||||
feed::SubscriptionManagerInterface& subscriptions,
|
||||
data::BackendInterface& backend,
|
||||
boost::asio::io_context& ioc
|
||||
|
||||
5
src/cluster/CMakeLists.txt
Normal file
5
src/cluster/CMakeLists.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
add_library(clio_cluster)
|
||||
|
||||
target_sources(clio_cluster PRIVATE ClioNode.cpp ClusterCommunicationService.cpp)
|
||||
|
||||
target_link_libraries(clio_cluster PRIVATE clio_util clio_data)
|
||||
64
src/cluster/ClioNode.cpp
Normal file
64
src/cluster/ClioNode.cpp
Normal file
@@ -0,0 +1,64 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of clio: https://github.com/XRPLF/clio
|
||||
Copyright (c) 2025, the clio developers.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#include "cluster/ClioNode.hpp"
|
||||
|
||||
#include "util/TimeUtils.hpp"
|
||||
|
||||
#include <boost/json/conversion.hpp>
|
||||
#include <boost/json/object.hpp>
|
||||
#include <boost/json/value.hpp>
|
||||
#include <boost/uuid/uuid.hpp>
|
||||
|
||||
#include <memory>
|
||||
#include <stdexcept>
|
||||
#include <string>
|
||||
#include <string_view>
|
||||
|
||||
namespace cluster {
|
||||
|
||||
namespace {
|
||||
|
||||
struct Fields {
|
||||
static constexpr std::string_view const kUPDATE_TIME = "update_time";
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
void
|
||||
tag_invoke(boost::json::value_from_tag, boost::json::value& jv, ClioNode const& node)
|
||||
{
|
||||
jv = {
|
||||
{Fields::kUPDATE_TIME, util::systemTpToUtcStr(node.updateTime, ClioNode::kTIME_FORMAT)},
|
||||
};
|
||||
}
|
||||
|
||||
ClioNode
|
||||
tag_invoke(boost::json::value_to_tag<ClioNode>, boost::json::value const& jv)
|
||||
{
|
||||
auto const& updateTimeStr = jv.as_object().at(Fields::kUPDATE_TIME).as_string();
|
||||
auto const updateTime = util::systemTpFromUtcStr(std::string(updateTimeStr), ClioNode::kTIME_FORMAT);
|
||||
if (!updateTime.has_value()) {
|
||||
throw std::runtime_error("Failed to parse update time");
|
||||
}
|
||||
|
||||
return ClioNode{.uuid = std::make_shared<boost::uuids::uuid>(), .updateTime = updateTime.value()};
|
||||
}
|
||||
|
||||
} // namespace cluster
|
||||
58
src/cluster/ClioNode.hpp
Normal file
58
src/cluster/ClioNode.hpp
Normal file
@@ -0,0 +1,58 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of clio: https://github.com/XRPLF/clio
|
||||
Copyright (c) 2025, the clio developers.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <boost/json/conversion.hpp>
|
||||
#include <boost/json/value.hpp>
|
||||
#include <boost/uuid/uuid.hpp>
|
||||
|
||||
#include <chrono>
|
||||
#include <memory>
|
||||
|
||||
namespace cluster {
|
||||
|
||||
/**
|
||||
* @brief Represents a node in the cluster.
|
||||
*/
|
||||
struct ClioNode {
|
||||
/**
|
||||
* @brief The format of the time to store in the database.
|
||||
*/
|
||||
static constexpr char const* kTIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ";
|
||||
|
||||
// enum class WriterRole {
|
||||
// ReadOnly,
|
||||
// NotWriter,
|
||||
// Writer
|
||||
// };
|
||||
|
||||
std::shared_ptr<boost::uuids::uuid> uuid; ///< The UUID of the node.
|
||||
std::chrono::system_clock::time_point updateTime; ///< The time the data about the node was last updated.
|
||||
|
||||
// WriterRole writerRole;
|
||||
};
|
||||
|
||||
void
|
||||
tag_invoke(boost::json::value_from_tag, boost::json::value& jv, ClioNode const& node);
|
||||
|
||||
ClioNode
|
||||
tag_invoke(boost::json::value_to_tag<ClioNode>, boost::json::value const& jv);
|
||||
|
||||
} // namespace cluster
|
||||
175
src/cluster/ClusterCommunicationService.cpp
Normal file
175
src/cluster/ClusterCommunicationService.cpp
Normal file
@@ -0,0 +1,175 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of clio: https://github.com/XRPLF/clio
|
||||
Copyright (c) 2025, the clio developers.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#include "cluster/ClusterCommunicationService.hpp"
|
||||
|
||||
#include "cluster/ClioNode.hpp"
|
||||
#include "data/BackendInterface.hpp"
|
||||
#include "util/log/Logger.hpp"
|
||||
|
||||
#include <boost/asio/spawn.hpp>
|
||||
#include <boost/asio/steady_timer.hpp>
|
||||
#include <boost/json/parse.hpp>
|
||||
#include <boost/json/serialize.hpp>
|
||||
#include <boost/json/value.hpp>
|
||||
#include <boost/json/value_from.hpp>
|
||||
#include <boost/json/value_to.hpp>
|
||||
#include <boost/uuid/random_generator.hpp>
|
||||
#include <boost/uuid/uuid.hpp>
|
||||
|
||||
#include <chrono>
|
||||
#include <ctime>
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
namespace cluster {
|
||||
|
||||
ClusterCommunicationService::ClusterCommunicationService(
|
||||
std::shared_ptr<data::BackendInterface> backend,
|
||||
std::chrono::steady_clock::duration readInterval,
|
||||
std::chrono::steady_clock::duration writeInterval
|
||||
)
|
||||
: backend_(std::move(backend))
|
||||
, readInterval_(readInterval)
|
||||
, writeInterval_(writeInterval)
|
||||
, selfData_{ClioNode{
|
||||
.uuid = std::make_shared<boost::uuids::uuid>(boost::uuids::random_generator{}()),
|
||||
.updateTime = std::chrono::system_clock::time_point{}
|
||||
}}
|
||||
{
|
||||
nodesInClusterMetric_.set(1); // The node always sees itself
|
||||
isHealthy_ = true;
|
||||
}
|
||||
|
||||
void
|
||||
ClusterCommunicationService::run()
|
||||
{
|
||||
boost::asio::spawn(strand_, [this](boost::asio::yield_context yield) {
|
||||
boost::asio::steady_timer timer(yield.get_executor());
|
||||
while (true) {
|
||||
timer.expires_after(readInterval_);
|
||||
timer.async_wait(yield);
|
||||
doRead(yield);
|
||||
}
|
||||
});
|
||||
|
||||
boost::asio::spawn(strand_, [this](boost::asio::yield_context yield) {
|
||||
boost::asio::steady_timer timer(yield.get_executor());
|
||||
while (true) {
|
||||
doWrite();
|
||||
timer.expires_after(writeInterval_);
|
||||
timer.async_wait(yield);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
ClusterCommunicationService::~ClusterCommunicationService()
|
||||
{
|
||||
stop();
|
||||
}
|
||||
|
||||
void
|
||||
ClusterCommunicationService::stop()
|
||||
{
|
||||
if (stopped_)
|
||||
return;
|
||||
|
||||
ctx_.stop();
|
||||
ctx_.join();
|
||||
stopped_ = true;
|
||||
}
|
||||
|
||||
std::shared_ptr<boost::uuids::uuid>
|
||||
ClusterCommunicationService::selfUuid() const
|
||||
{
|
||||
// Uuid never changes so it is safe to copy it without using strand_
|
||||
return selfData_.uuid;
|
||||
}
|
||||
|
||||
ClioNode
|
||||
ClusterCommunicationService::selfData() const
|
||||
{
|
||||
ClioNode result{};
|
||||
boost::asio::spawn(strand_, [this, &result](boost::asio::yield_context) { result = selfData_; });
|
||||
return result;
|
||||
}
|
||||
|
||||
std::vector<ClioNode>
|
||||
ClusterCommunicationService::clusterData() const
|
||||
{
|
||||
std::vector<ClioNode> result;
|
||||
boost::asio::spawn(strand_, [this, &result](boost::asio::yield_context) {
|
||||
result = otherNodesData_;
|
||||
result.push_back(selfData_);
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
void
|
||||
ClusterCommunicationService::doRead(boost::asio::yield_context yield)
|
||||
{
|
||||
otherNodesData_.clear();
|
||||
|
||||
auto const expectedResult = backend_->fetchClioNodesData(yield);
|
||||
if (!expectedResult.has_value()) {
|
||||
LOG(log_.error()) << "Failed to fetch nodes data";
|
||||
isHealthy_ = false;
|
||||
return;
|
||||
}
|
||||
|
||||
// Create a new vector here to not have partially parsed data in otherNodesData_
|
||||
std::vector<ClioNode> otherNodesData;
|
||||
for (auto const& [uuid, nodeDataStr] : expectedResult.value()) {
|
||||
if (uuid == *selfData_.uuid) {
|
||||
continue;
|
||||
}
|
||||
|
||||
boost::system::error_code errorCode;
|
||||
auto const json = boost::json::parse(nodeDataStr, errorCode);
|
||||
if (errorCode.failed()) {
|
||||
LOG(log_.error()) << "Error parsing json from DB: " << nodeDataStr;
|
||||
isHealthy_ = false;
|
||||
return;
|
||||
}
|
||||
|
||||
auto expectedNodeData = boost::json::try_value_to<ClioNode>(json);
|
||||
if (expectedNodeData.has_error()) {
|
||||
LOG(log_.error()) << "Error converting json to ClioNode: " << json;
|
||||
isHealthy_ = false;
|
||||
return;
|
||||
}
|
||||
*expectedNodeData->uuid = uuid;
|
||||
otherNodesData.push_back(std::move(expectedNodeData).value());
|
||||
}
|
||||
otherNodesData_ = std::move(otherNodesData);
|
||||
nodesInClusterMetric_.set(otherNodesData_.size() + 1);
|
||||
isHealthy_ = true;
|
||||
}
|
||||
|
||||
void
|
||||
ClusterCommunicationService::doWrite()
|
||||
{
|
||||
selfData_.updateTime = std::chrono::system_clock::now();
|
||||
boost::json::value jsonValue{};
|
||||
boost::json::value_from(selfData_, jsonValue);
|
||||
backend_->writeNodeMessage(*selfData_.uuid, boost::json::serialize(jsonValue.as_object()));
|
||||
}
|
||||
|
||||
} // namespace cluster
|
||||
141
src/cluster/ClusterCommunicationService.hpp
Normal file
141
src/cluster/ClusterCommunicationService.hpp
Normal file
@@ -0,0 +1,141 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of clio: https://github.com/XRPLF/clio
|
||||
Copyright (c) 2025, the clio developers.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "cluster/ClioNode.hpp"
|
||||
#include "cluster/ClusterCommunicationServiceInterface.hpp"
|
||||
#include "data/BackendInterface.hpp"
|
||||
#include "util/log/Logger.hpp"
|
||||
#include "util/prometheus/Bool.hpp"
|
||||
#include "util/prometheus/Gauge.hpp"
|
||||
#include "util/prometheus/Prometheus.hpp"
|
||||
|
||||
#include <boost/asio/spawn.hpp>
|
||||
#include <boost/asio/strand.hpp>
|
||||
#include <boost/asio/thread_pool.hpp>
|
||||
#include <boost/uuid/uuid.hpp>
|
||||
|
||||
#include <chrono>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
namespace cluster {
|
||||
|
||||
/**
|
||||
* @brief Service to post and read messages to/from the cluster. It uses a backend to communicate with the cluster.
|
||||
*/
|
||||
class ClusterCommunicationService : public ClusterCommunicationServiceInterface {
|
||||
util::prometheus::GaugeInt& nodesInClusterMetric_ = PrometheusService::gaugeInt(
|
||||
"cluster_nodes_total_number",
|
||||
{},
|
||||
"Total number of nodes this node can detect in the cluster."
|
||||
);
|
||||
util::prometheus::Bool isHealthy_ = PrometheusService::boolMetric(
|
||||
"cluster_communication_is_healthy",
|
||||
{},
|
||||
"Whether cluster communication service is operating healthy (1 - healthy, 0 - we have a problem)"
|
||||
);
|
||||
|
||||
// TODO: Use util::async::CoroExecutionContext after https://github.com/XRPLF/clio/issues/1973 is implemented
|
||||
boost::asio::thread_pool ctx_{1};
|
||||
boost::asio::strand<boost::asio::thread_pool::executor_type> strand_ = boost::asio::make_strand(ctx_);
|
||||
|
||||
util::Logger log_{"ClusterCommunication"};
|
||||
|
||||
std::shared_ptr<data::BackendInterface> backend_;
|
||||
|
||||
std::chrono::steady_clock::duration readInterval_;
|
||||
std::chrono::steady_clock::duration writeInterval_;
|
||||
|
||||
ClioNode selfData_;
|
||||
std::vector<ClioNode> otherNodesData_;
|
||||
|
||||
bool stopped_ = false;
|
||||
|
||||
public:
|
||||
static constexpr std::chrono::milliseconds kDEFAULT_READ_INTERVAL{2100};
|
||||
static constexpr std::chrono::milliseconds kDEFAULT_WRITE_INTERVAL{1200};
|
||||
/**
|
||||
* @brief Construct a new Cluster Communication Service object.
|
||||
*
|
||||
* @param backend The backend to use for communication.
|
||||
* @param readInterval The interval to read messages from the cluster.
|
||||
* @param writeInterval The interval to write messages to the cluster.
|
||||
*/
|
||||
ClusterCommunicationService(
|
||||
std::shared_ptr<data::BackendInterface> backend,
|
||||
std::chrono::steady_clock::duration readInterval = kDEFAULT_READ_INTERVAL,
|
||||
std::chrono::steady_clock::duration writeInterval = kDEFAULT_WRITE_INTERVAL
|
||||
);
|
||||
|
||||
~ClusterCommunicationService() override;
|
||||
|
||||
/**
|
||||
* @brief Start the service.
|
||||
*/
|
||||
void
|
||||
run();
|
||||
|
||||
/**
|
||||
* @brief Stop the service.
|
||||
*/
|
||||
void
|
||||
stop();
|
||||
|
||||
ClusterCommunicationService(ClusterCommunicationService&&) = delete;
|
||||
ClusterCommunicationService(ClusterCommunicationService const&) = delete;
|
||||
ClusterCommunicationService&
|
||||
operator=(ClusterCommunicationService&&) = delete;
|
||||
ClusterCommunicationService&
|
||||
operator=(ClusterCommunicationService const&) = delete;
|
||||
|
||||
/**
|
||||
* @brief Get the UUID of the current node.
|
||||
*
|
||||
* @return The UUID of the current node.
|
||||
*/
|
||||
std::shared_ptr<boost::uuids::uuid>
|
||||
selfUuid() const;
|
||||
|
||||
/**
|
||||
* @brief Get the data of the current node.
|
||||
*
|
||||
* @return The data of the current node.
|
||||
*/
|
||||
ClioNode
|
||||
selfData() const override;
|
||||
|
||||
/**
|
||||
* @brief Get the data of all nodes in the cluster (including self).
|
||||
*
|
||||
* @return The data of all nodes in the cluster.
|
||||
*/
|
||||
std::vector<ClioNode>
|
||||
clusterData() const override;
|
||||
|
||||
private:
|
||||
void
|
||||
doRead(boost::asio::yield_context yield);
|
||||
|
||||
void
|
||||
doWrite();
|
||||
};
|
||||
|
||||
} // namespace cluster
|
||||
52
src/cluster/ClusterCommunicationServiceInterface.hpp
Normal file
52
src/cluster/ClusterCommunicationServiceInterface.hpp
Normal file
@@ -0,0 +1,52 @@
|
||||
//------------------------------------------------------------------------------
|
||||
/*
|
||||
This file is part of clio: https://github.com/XRPLF/clio
|
||||
Copyright (c) 2025, the clio developers.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
//==============================================================================
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "cluster/ClioNode.hpp"
|
||||
|
||||
#include <vector>
|
||||
|
||||
namespace cluster {
|
||||
|
||||
/**
|
||||
* @brief Interface for the cluster communication service.
|
||||
*/
|
||||
class ClusterCommunicationServiceInterface {
|
||||
public:
|
||||
virtual ~ClusterCommunicationServiceInterface() = default;
|
||||
|
||||
/**
|
||||
* @brief Get the data of the current node.
|
||||
*
|
||||
* @return The data of the current node.
|
||||
*/
|
||||
[[nodiscard]] virtual ClioNode
|
||||
selfData() const = 0;
|
||||
|
||||
/**
|
||||
* @brief Get the data of all nodes in the cluster (including self).
|
||||
*
|
||||
* @return The data of all nodes in the cluster.
|
||||
*/
|
||||
[[nodiscard]] virtual std::vector<ClioNode>
|
||||
clusterData() const = 0;
|
||||
};
|
||||
|
||||
} // namespace cluster
|
||||
@@ -61,7 +61,7 @@ BackendInterface::finishWrites(std::uint32_t const ledgerSequence)
|
||||
LOG(gLog.debug()) << "Want finish writes for " << ledgerSequence;
|
||||
auto commitRes = doFinishWrites();
|
||||
if (commitRes) {
|
||||
LOG(gLog.debug()) << "Successfully commited. Updating range now to " << ledgerSequence;
|
||||
LOG(gLog.debug()) << "Successfully committed. Updating range now to " << ledgerSequence;
|
||||
updateRange(ledgerSequence);
|
||||
}
|
||||
return commitRes;
|
||||
@@ -246,7 +246,7 @@ BackendInterface::fetchBookOffers(
|
||||
auto end = std::chrono::system_clock::now();
|
||||
LOG(gLog.debug()) << "Fetching " << std::to_string(keys.size()) << " offers took "
|
||||
<< std::to_string(getMillis(mid - begin)) << " milliseconds. Fetching next dir took "
|
||||
<< std::to_string(succMillis) << " milliseonds. Fetched next dir " << std::to_string(numSucc)
|
||||
<< std::to_string(succMillis) << " milliseconds. Fetched next dir " << std::to_string(numSucc)
|
||||
<< " times"
|
||||
<< " Fetching next page of dir took " << std::to_string(pageMillis) << " milliseconds"
|
||||
<< ". num pages = " << std::to_string(numPages) << ". Fetching all objects took "
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
#include <boost/json.hpp>
|
||||
#include <boost/json/object.hpp>
|
||||
#include <boost/utility/result_of.hpp>
|
||||
#include <boost/uuid/uuid.hpp>
|
||||
#include <xrpl/basics/base_uint.h>
|
||||
#include <xrpl/protocol/AccountID.h>
|
||||
#include <xrpl/protocol/Fees.h>
|
||||
@@ -68,7 +69,7 @@ public:
|
||||
|
||||
static constexpr std::size_t kDEFAULT_WAIT_BETWEEN_RETRY = 500;
|
||||
/**
|
||||
* @brief A helper function that catches DatabaseTimout exceptions and retries indefinitely.
|
||||
* @brief A helper function that catches DatabaseTimeout exceptions and retries indefinitely.
|
||||
*
|
||||
* @tparam FnType The type of function object to execute
|
||||
* @param func The function object to execute
|
||||
@@ -154,7 +155,7 @@ public:
|
||||
}
|
||||
virtual ~BackendInterface() = default;
|
||||
|
||||
// TODO: Remove this hack once old ETL is removed.
|
||||
// TODO https://github.com/XRPLF/clio/issues/1956: Remove this hack once old ETL is removed.
|
||||
// Cache should not be exposed thru BackendInterface
|
||||
|
||||
/**
|
||||
@@ -397,7 +398,7 @@ public:
|
||||
* @brief Fetches a specific ledger object.
|
||||
*
|
||||
* Currently the real fetch happens in doFetchLedgerObject and fetchLedgerObject attempts to fetch from Cache first
|
||||
* and only calls out to the real DB if a cache miss ocurred.
|
||||
* and only calls out to the real DB if a cache miss occurred.
|
||||
*
|
||||
* @param key The key of the object
|
||||
* @param sequence The ledger sequence to fetch for
|
||||
@@ -511,7 +512,7 @@ public:
|
||||
* @param key The key to fetch for
|
||||
* @param ledgerSequence The ledger sequence to fetch for
|
||||
* @param yield The coroutine context
|
||||
* @return The sucessor on success; nullopt otherwise
|
||||
* @return The successor on success; nullopt otherwise
|
||||
*/
|
||||
std::optional<LedgerObject>
|
||||
fetchSuccessorObject(ripple::uint256 key, std::uint32_t ledgerSequence, boost::asio::yield_context yield) const;
|
||||
@@ -525,7 +526,7 @@ public:
|
||||
* @param key The key to fetch for
|
||||
* @param ledgerSequence The ledger sequence to fetch for
|
||||
* @param yield The coroutine context
|
||||
* @return The sucessor key on success; nullopt otherwise
|
||||
* @return The successor key on success; nullopt otherwise
|
||||
*/
|
||||
std::optional<ripple::uint256>
|
||||
fetchSuccessorKey(ripple::uint256 key, std::uint32_t ledgerSequence, boost::asio::yield_context yield) const;
|
||||
@@ -536,7 +537,7 @@ public:
|
||||
* @param key The key to fetch for
|
||||
* @param ledgerSequence The ledger sequence to fetch for
|
||||
* @param yield The coroutine context
|
||||
* @return The sucessor on success; nullopt otherwise
|
||||
* @return The successor on success; nullopt otherwise
|
||||
*/
|
||||
virtual std::optional<ripple::uint256>
|
||||
doFetchSuccessorKey(ripple::uint256 key, std::uint32_t ledgerSequence, boost::asio::yield_context yield) const = 0;
|
||||
@@ -568,6 +569,15 @@ public:
|
||||
virtual std::optional<std::string>
|
||||
fetchMigratorStatus(std::string const& migratorName, boost::asio::yield_context yield) const = 0;
|
||||
|
||||
/**
|
||||
* @brief Fetches the data of all nodes in the cluster.
|
||||
*
|
||||
* @param yield The coroutine context
|
||||
*@return The data of all nodes in the cluster.
|
||||
*/
|
||||
[[nodiscard]] virtual std::expected<std::vector<std::pair<boost::uuids::uuid, std::string>>, std::string>
|
||||
fetchClioNodesData(boost::asio::yield_context yield) const = 0;
|
||||
|
||||
/**
|
||||
* @brief Synchronously fetches the ledger range from DB.
|
||||
*
|
||||
@@ -648,6 +658,14 @@ public:
|
||||
virtual void
|
||||
writeAccountTransactions(std::vector<AccountTransactionsData> data) = 0;
|
||||
|
||||
/**
|
||||
* @brief Write a new account transaction.
|
||||
*
|
||||
* @param record An object representing the account transaction
|
||||
*/
|
||||
virtual void
|
||||
writeAccountTransaction(AccountTransactionsData record) = 0;
|
||||
|
||||
/**
|
||||
* @brief Write NFTs transactions.
|
||||
*
|
||||
@@ -674,6 +692,15 @@ public:
|
||||
virtual void
|
||||
writeSuccessor(std::string&& key, std::uint32_t seq, std::string&& successor) = 0;
|
||||
|
||||
/**
|
||||
* @brief Write a node message. Used by ClusterCommunicationService
|
||||
*
|
||||
* @param uuid The UUID of the node
|
||||
* @param message The message to write
|
||||
*/
|
||||
virtual void
|
||||
writeNodeMessage(boost::uuids::uuid const& uuid, std::string message) = 0;
|
||||
|
||||
/**
|
||||
* @brief Starts a write transaction with the DB. No-op for cassandra.
|
||||
*
|
||||
|
||||
@@ -36,6 +36,8 @@
|
||||
|
||||
#include <boost/asio/spawn.hpp>
|
||||
#include <boost/json/object.hpp>
|
||||
#include <boost/uuid/string_generator.hpp>
|
||||
#include <boost/uuid/uuid.hpp>
|
||||
#include <cassandra.h>
|
||||
#include <fmt/core.h>
|
||||
#include <xrpl/basics/Blob.h>
|
||||
@@ -46,6 +48,7 @@
|
||||
#include <xrpl/protocol/LedgerHeader.h>
|
||||
#include <xrpl/protocol/nft.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <atomic>
|
||||
#include <chrono>
|
||||
#include <cstddef>
|
||||
@@ -777,7 +780,7 @@ public:
|
||||
|
||||
while (liveAccounts.size() < number) {
|
||||
Statement const statement = lastItem ? schema_->selectAccountFromToken.bind(*lastItem, Limit{pageSize})
|
||||
: schema_->selectAccountFromBegining.bind(Limit{pageSize});
|
||||
: schema_->selectAccountFromBeginning.bind(Limit{pageSize});
|
||||
|
||||
auto const res = executor_.read(yield, statement);
|
||||
if (res) {
|
||||
@@ -877,6 +880,22 @@ public:
|
||||
return {};
|
||||
}
|
||||
|
||||
std::expected<std::vector<std::pair<boost::uuids::uuid, std::string>>, std::string>
|
||||
fetchClioNodesData(boost::asio::yield_context yield) const override
|
||||
{
|
||||
auto const readResult = executor_.read(yield, schema_->selectClioNodesData);
|
||||
if (not readResult)
|
||||
return std::unexpected{readResult.error().message()};
|
||||
|
||||
std::vector<std::pair<boost::uuids::uuid, std::string>> result;
|
||||
|
||||
for (auto [uuid, message] : extract<boost::uuids::uuid, std::string>(*readResult)) {
|
||||
result.emplace_back(uuid, std::move(message));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
void
|
||||
doWriteLedgerObject(std::string&& key, std::uint32_t const seq, std::string&& blob) override
|
||||
{
|
||||
@@ -906,30 +925,42 @@ public:
|
||||
statements.reserve(data.size() * 10); // assume 10 transactions avg
|
||||
|
||||
for (auto& record : data) {
|
||||
std::transform(
|
||||
std::begin(record.accounts),
|
||||
std::end(record.accounts),
|
||||
std::back_inserter(statements),
|
||||
[this, &record](auto&& account) {
|
||||
return schema_->insertAccountTx.bind(
|
||||
std::forward<decltype(account)>(account),
|
||||
std::make_tuple(record.ledgerSequence, record.transactionIndex),
|
||||
record.txHash
|
||||
);
|
||||
}
|
||||
);
|
||||
std::ranges::transform(record.accounts, std::back_inserter(statements), [this, &record](auto&& account) {
|
||||
return schema_->insertAccountTx.bind(
|
||||
std::forward<decltype(account)>(account),
|
||||
std::make_tuple(record.ledgerSequence, record.transactionIndex),
|
||||
record.txHash
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
executor_.write(std::move(statements));
|
||||
}
|
||||
|
||||
void
|
||||
writeAccountTransaction(AccountTransactionsData record) override
|
||||
{
|
||||
std::vector<Statement> statements;
|
||||
statements.reserve(record.accounts.size());
|
||||
|
||||
std::ranges::transform(record.accounts, std::back_inserter(statements), [this, &record](auto&& account) {
|
||||
return schema_->insertAccountTx.bind(
|
||||
std::forward<decltype(account)>(account),
|
||||
std::make_tuple(record.ledgerSequence, record.transactionIndex),
|
||||
record.txHash
|
||||
);
|
||||
});
|
||||
|
||||
executor_.write(std::move(statements));
|
||||
}
|
||||
|
||||
void
|
||||
writeNFTTransactions(std::vector<NFTTransactionsData> const& data) override
|
||||
{
|
||||
std::vector<Statement> statements;
|
||||
statements.reserve(data.size());
|
||||
|
||||
std::transform(std::cbegin(data), std::cend(data), std::back_inserter(statements), [this](auto const& record) {
|
||||
std::ranges::transform(data, std::back_inserter(statements), [this](auto const& record) {
|
||||
return schema_->insertNFTTx.bind(
|
||||
record.tokenID, std::make_tuple(record.ledgerSequence, record.transactionIndex), record.txHash
|
||||
);
|
||||
@@ -999,7 +1030,7 @@ public:
|
||||
std::vector<Statement> statements;
|
||||
statements.reserve(data.size());
|
||||
for (auto [mptId, holder] : data)
|
||||
statements.push_back(schema_->insertMPTHolder.bind(std::move(mptId), std::move(holder)));
|
||||
statements.push_back(schema_->insertMPTHolder.bind(mptId, holder));
|
||||
|
||||
executor_.write(std::move(statements));
|
||||
}
|
||||
@@ -1019,6 +1050,12 @@ public:
|
||||
);
|
||||
}
|
||||
|
||||
void
|
||||
writeNodeMessage(boost::uuids::uuid const& uuid, std::string message) override
|
||||
{
|
||||
executor_.writeSync(schema_->updateClioNodeMessage, data::cassandra::Text{std::move(message)}, uuid);
|
||||
}
|
||||
|
||||
bool
|
||||
isTooBusy() const override
|
||||
{
|
||||
|
||||
@@ -54,7 +54,7 @@ struct AccountTransactionsData {
|
||||
* @param meta The transaction metadata
|
||||
* @param txHash The transaction hash
|
||||
*/
|
||||
AccountTransactionsData(ripple::TxMeta& meta, ripple::uint256 const& txHash)
|
||||
AccountTransactionsData(ripple::TxMeta const& meta, ripple::uint256 const& txHash)
|
||||
: accounts(meta.getAffectedAccounts())
|
||||
, ledgerSequence(meta.getLgrSeq())
|
||||
, transactionIndex(meta.getIndex())
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
#include "data/LedgerCache.hpp"
|
||||
|
||||
#include "data/Types.hpp"
|
||||
#include "etlng/Models.hpp"
|
||||
#include "util/Assert.hpp"
|
||||
|
||||
#include <xrpl/basics/base_uint.h>
|
||||
@@ -62,7 +63,7 @@ LedgerCache::update(std::vector<LedgerObject> const& objs, uint32_t seq, bool is
|
||||
if (seq > latestSeq_) {
|
||||
ASSERT(
|
||||
seq == latestSeq_ + 1 || latestSeq_ == 0,
|
||||
"New sequense must be either next or first. seq = {}, latestSeq_ = {}",
|
||||
"New sequence must be either next or first. seq = {}, latestSeq_ = {}",
|
||||
seq,
|
||||
latestSeq_
|
||||
);
|
||||
@@ -87,6 +88,42 @@ LedgerCache::update(std::vector<LedgerObject> const& objs, uint32_t seq, bool is
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
LedgerCache::update(std::vector<etlng::model::Object> const& objs, uint32_t seq)
|
||||
{
|
||||
if (disabled_)
|
||||
return;
|
||||
|
||||
std::scoped_lock const lck{mtx_};
|
||||
if (seq > latestSeq_) {
|
||||
ASSERT(
|
||||
seq == latestSeq_ + 1 || latestSeq_ == 0,
|
||||
"New sequence must be either next or first. seq = {}, latestSeq_ = {}",
|
||||
seq,
|
||||
latestSeq_
|
||||
);
|
||||
latestSeq_ = seq;
|
||||
}
|
||||
|
||||
deleted_.clear(); // previous update's deletes no longer needed
|
||||
|
||||
for (auto const& obj : objs) {
|
||||
if (!obj.data.empty()) {
|
||||
auto& e = map_[obj.key];
|
||||
if (seq > e.seq)
|
||||
e = {.seq = seq, .blob = obj.data};
|
||||
} else {
|
||||
if (map_.contains(obj.key))
|
||||
deleted_[obj.key] = map_[obj.key];
|
||||
|
||||
map_.erase(obj.key);
|
||||
if (!full_)
|
||||
deletes_.insert(obj.key);
|
||||
}
|
||||
}
|
||||
cv_.notify_all();
|
||||
}
|
||||
|
||||
std::optional<LedgerObject>
|
||||
LedgerCache::getSuccessor(ripple::uint256 const& key, uint32_t seq) const
|
||||
{
|
||||
@@ -139,6 +176,29 @@ LedgerCache::get(ripple::uint256 const& key, uint32_t seq) const
|
||||
return {e->second.blob};
|
||||
}
|
||||
|
||||
std::optional<Blob>
|
||||
LedgerCache::getDeleted(ripple::uint256 const& key, uint32_t seq) const
|
||||
{
|
||||
if (disabled_)
|
||||
return std::nullopt;
|
||||
|
||||
std::shared_lock const lck{mtx_};
|
||||
if (seq > latestSeq_)
|
||||
return std::nullopt;
|
||||
|
||||
++objectReqCounter_.get();
|
||||
|
||||
auto e = deleted_.find(key);
|
||||
if (e == deleted_.end())
|
||||
return std::nullopt;
|
||||
|
||||
if (seq < e->second.seq)
|
||||
return std::nullopt;
|
||||
|
||||
++objectHitCounter_.get();
|
||||
return {e->second.blob};
|
||||
}
|
||||
|
||||
void
|
||||
LedgerCache::setDisabled()
|
||||
{
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
|
||||
#include "data/LedgerCacheInterface.hpp"
|
||||
#include "data/Types.hpp"
|
||||
#include "etlng/Models.hpp"
|
||||
#include "util/prometheus/Bool.hpp"
|
||||
#include "util/prometheus/Counter.hpp"
|
||||
#include "util/prometheus/Label.hpp"
|
||||
@@ -29,7 +30,6 @@
|
||||
#include <xrpl/basics/base_uint.h>
|
||||
#include <xrpl/basics/hardened_hash.h>
|
||||
|
||||
#include <atomic>
|
||||
#include <condition_variable>
|
||||
#include <cstddef>
|
||||
#include <cstdint>
|
||||
@@ -74,6 +74,7 @@ class LedgerCache : public LedgerCacheInterface {
|
||||
)};
|
||||
|
||||
std::map<ripple::uint256, CacheEntry> map_;
|
||||
std::map<ripple::uint256, CacheEntry> deleted_;
|
||||
|
||||
mutable std::shared_mutex mtx_;
|
||||
std::condition_variable_any cv_;
|
||||
@@ -94,11 +95,17 @@ class LedgerCache : public LedgerCacheInterface {
|
||||
|
||||
public:
|
||||
void
|
||||
update(std::vector<LedgerObject> const& objs, uint32_t seq, bool isBackground = false) override;
|
||||
update(std::vector<LedgerObject> const& objs, uint32_t seq, bool isBackground) override;
|
||||
|
||||
void
|
||||
update(std::vector<etlng::model::Object> const& objs, uint32_t seq) override;
|
||||
|
||||
std::optional<Blob>
|
||||
get(ripple::uint256 const& key, uint32_t seq) const override;
|
||||
|
||||
std::optional<Blob>
|
||||
getDeleted(ripple::uint256 const& key, uint32_t seq) const override;
|
||||
|
||||
std::optional<LedgerObject>
|
||||
getSuccessor(ripple::uint256 const& key, uint32_t seq) const override;
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
#pragma once
|
||||
|
||||
#include "data/Types.hpp"
|
||||
#include "etlng/Models.hpp"
|
||||
|
||||
#include <xrpl/basics/base_uint.h>
|
||||
#include <xrpl/basics/hardened_hash.h>
|
||||
@@ -55,6 +56,15 @@ public:
|
||||
virtual void
|
||||
update(std::vector<LedgerObject> const& objs, uint32_t seq, bool isBackground = false) = 0;
|
||||
|
||||
/**
|
||||
* @brief Update the cache with new ledger objects.
|
||||
*
|
||||
* @param objs The ledger objects to update cache with
|
||||
* @param seq The sequence to update cache for
|
||||
*/
|
||||
virtual void
|
||||
update(std::vector<etlng::model::Object> const& objs, uint32_t seq) = 0;
|
||||
|
||||
/**
|
||||
* @brief Fetch a cached object by its key and sequence number.
|
||||
*
|
||||
@@ -65,6 +75,16 @@ public:
|
||||
virtual std::optional<Blob>
|
||||
get(ripple::uint256 const& key, uint32_t seq) const = 0;
|
||||
|
||||
/**
|
||||
* @brief Fetch a recently deleted object by its key and sequence number.
|
||||
*
|
||||
* @param key The key to fetch for
|
||||
* @param seq The sequence to fetch for
|
||||
* @return If found in deleted cache, will return the cached Blob; otherwise nullopt is returned
|
||||
*/
|
||||
virtual std::optional<Blob>
|
||||
getDeleted(ripple::uint256 const& key, uint32_t seq) const = 0;
|
||||
|
||||
/**
|
||||
* @brief Gets a cached successor.
|
||||
*
|
||||
|
||||
@@ -6,7 +6,7 @@ To support additional database types, you can create new classes that implement
|
||||
|
||||
## Data Model
|
||||
|
||||
The data model used by Clio to read and write ledger data is different from what `rippled` uses. `rippled` uses a novel data structure named [*SHAMap*](https://github.com/ripple/rippled/blob/master/src/ripple/shamap/README.md), which is a combination of a Merkle Tree and a Radix Trie. In a SHAMap, ledger objects are stored in the root vertices of the tree. Thus, looking up a record located at the leaf node of the SHAMap executes a tree search, where the path from the root node to the leaf node is the key of the record.
|
||||
The data model used by Clio to read and write ledger data is different from what `rippled` uses. `rippled` uses a novel data structure named [_SHAMap_](https://github.com/ripple/rippled/blob/master/src/ripple/shamap/README.md), which is a combination of a Merkle Tree and a Radix Trie. In a SHAMap, ledger objects are stored in the root vertices of the tree. Thus, looking up a record located at the leaf node of the SHAMap executes a tree search, where the path from the root node to the leaf node is the key of the record.
|
||||
|
||||
`rippled` nodes can also generate a proof-tree by forming a subtree with all the path nodes and their neighbors, which can then be used to prove the existence of the leaf node data to other `rippled` nodes. In short, the main purpose of the SHAMap data structure is to facilitate the fast validation of data integrity between different decentralized `rippled` nodes.
|
||||
|
||||
@@ -22,19 +22,19 @@ There are three main types of data in each XRP Ledger version:
|
||||
|
||||
Due to the structural differences of the different types of databases, Clio may choose to represent these data types using a different schema for each unique database type.
|
||||
|
||||
### Keywords
|
||||
### Keywords
|
||||
|
||||
**Sequence**: A unique incrementing identification number used to label the different ledger versions.
|
||||
|
||||
**Hash**: The SHA512-half (calculate SHA512 and take the first 256 bits) hash of various ledger data like the entire ledger or specific ledger objects.
|
||||
|
||||
**Ledger Object**: The [binary-encoded](https://xrpl.org/serialization.html) STObject containing specific data (i.e. metadata, transaction data).
|
||||
**Ledger Object**: The [binary-encoded](https://xrpl.org/serialization.html) STObject containing specific data (i.e. metadata, transaction data).
|
||||
|
||||
**Metadata**: The data containing [detailed information](https://xrpl.org/transaction-metadata.html#transaction-metadata) of the outcome of a specific transaction, regardless of whether the transaction was successful.
|
||||
**Metadata**: The data containing [detailed information](https://xrpl.org/transaction-metadata.html#transaction-metadata) of the outcome of a specific transaction, regardless of whether the transaction was successful.
|
||||
|
||||
**Transaction data**: The data containing the [full details](https://xrpl.org/transaction-common-fields.html) of a specific transaction.
|
||||
**Transaction data**: The data containing the [full details](https://xrpl.org/transaction-common-fields.html) of a specific transaction.
|
||||
|
||||
**Object Index**: The pseudo-random unique identifier of a ledger object, created by hashing the data of the object.
|
||||
**Object Index**: The pseudo-random unique identifier of a ledger object, created by hashing the data of the object.
|
||||
|
||||
## Cassandra Implementation
|
||||
|
||||
@@ -58,11 +58,11 @@ Their schemas and how they work are detailed in the following sections.
|
||||
|
||||
### ledger_transactions
|
||||
|
||||
```
|
||||
CREATE TABLE clio.ledger_transactions (
|
||||
ledger_sequence bigint, # The sequence number of the ledger version
|
||||
hash blob, # Hash of all the transactions on this ledger version
|
||||
PRIMARY KEY (ledger_sequence, hash)
|
||||
```sql
|
||||
CREATE TABLE clio.ledger_transactions (
|
||||
ledger_sequence bigint, # The sequence number of the ledger version
|
||||
hash blob, # Hash of all the transactions on this ledger version
|
||||
PRIMARY KEY (ledger_sequence, hash)
|
||||
) WITH CLUSTERING ORDER BY (hash ASC) ...
|
||||
```
|
||||
|
||||
@@ -70,37 +70,37 @@ This table stores the hashes of all transactions in a given ledger sequence and
|
||||
|
||||
### transactions
|
||||
|
||||
```
|
||||
CREATE TABLE clio.transactions (
|
||||
hash blob PRIMARY KEY, # The transaction hash
|
||||
date bigint, # Date of the transaction
|
||||
ledger_sequence bigint, # The sequence that the transaction was validated
|
||||
metadata blob, # Metadata of the transaction
|
||||
transaction blob # Data of the transaction
|
||||
```sql
|
||||
CREATE TABLE clio.transactions (
|
||||
hash blob PRIMARY KEY, # The transaction hash
|
||||
date bigint, # Date of the transaction
|
||||
ledger_sequence bigint, # The sequence that the transaction was validated
|
||||
metadata blob, # Metadata of the transaction
|
||||
transaction blob # Data of the transaction
|
||||
) ...
|
||||
```
|
||||
|
||||
This table stores the full transaction and metadata of each ledger version with the transaction hash as the primary key.
|
||||
|
||||
To lookup all the transactions that were validated in a ledger version with sequence `n`, first get the all the transaction hashes in that ledger version by querying `SELECT * FROM ledger_transactions WHERE ledger_sequence = n;`. Then, iterate through the list of hashes and query `SELECT * FROM transactions WHERE hash = one_of_the_hash_from_the_list;` to get the detailed transaction data.
|
||||
To lookup all the transactions that were validated in a ledger version with sequence `n`, first get the all the transaction hashes in that ledger version by querying `SELECT * FROM ledger_transactions WHERE ledger_sequence = n;`. Then, iterate through the list of hashes and query `SELECT * FROM transactions WHERE hash = one_of_the_hash_from_the_list;` to get the detailed transaction data.
|
||||
|
||||
### ledger_hashes
|
||||
|
||||
```
|
||||
```sql
|
||||
CREATE TABLE clio.ledger_hashes (
|
||||
hash blob PRIMARY KEY, # Hash of entire ledger version's data
|
||||
sequence bigint # The sequence of the ledger version
|
||||
hash blob PRIMARY KEY, # Hash of entire ledger version's data
|
||||
sequence bigint # The sequence of the ledger version
|
||||
) ...
|
||||
```
|
||||
|
||||
This table stores the hash of all ledger versions by their sequences.
|
||||
This table stores the hash of all ledger versions by their sequences.
|
||||
|
||||
### ledger_range
|
||||
|
||||
```
|
||||
```sql
|
||||
CREATE TABLE clio.ledger_range (
|
||||
is_latest boolean PRIMARY KEY, # Whether this sequence is the stopping range
|
||||
sequence bigint # The sequence number of the starting/stopping range
|
||||
is_latest boolean PRIMARY KEY, # Whether this sequence is the stopping range
|
||||
sequence bigint # The sequence number of the starting/stopping range
|
||||
) ...
|
||||
```
|
||||
|
||||
@@ -108,12 +108,12 @@ This table marks the range of ledger versions that is stored on this specific Ca
|
||||
|
||||
### objects
|
||||
|
||||
```
|
||||
```sql
|
||||
CREATE TABLE clio.objects (
|
||||
key blob, # Object index of the object
|
||||
sequence bigint, # The sequence this object was last updated
|
||||
object blob, # Data of the object
|
||||
PRIMARY KEY (key, sequence)
|
||||
key blob, # Object index of the object
|
||||
sequence bigint, # The sequence this object was last updated
|
||||
object blob, # Data of the object
|
||||
PRIMARY KEY (key, sequence)
|
||||
) WITH CLUSTERING ORDER BY (sequence DESC) ...
|
||||
```
|
||||
|
||||
@@ -123,10 +123,10 @@ The table is updated when all data for a given ledger sequence has been written
|
||||
|
||||
### ledgers
|
||||
|
||||
```
|
||||
```sql
|
||||
CREATE TABLE clio.ledgers (
|
||||
sequence bigint PRIMARY KEY, # Sequence of the ledger version
|
||||
header blob # Data of the header
|
||||
sequence bigint PRIMARY KEY, # Sequence of the ledger version
|
||||
header blob # Data of the header
|
||||
) ...
|
||||
```
|
||||
|
||||
@@ -134,11 +134,11 @@ This table stores the ledger header data of specific ledger versions by their se
|
||||
|
||||
### diff
|
||||
|
||||
```
|
||||
```sql
|
||||
CREATE TABLE clio.diff (
|
||||
seq bigint, # Sequence of the ledger version
|
||||
key blob, # Hash of changes in the ledger version
|
||||
PRIMARY KEY (seq, key)
|
||||
seq bigint, # Sequence of the ledger version
|
||||
key blob, # Hash of changes in the ledger version
|
||||
PRIMARY KEY (seq, key)
|
||||
) WITH CLUSTERING ORDER BY (key ASC) ...
|
||||
```
|
||||
|
||||
@@ -146,12 +146,12 @@ This table stores the object index of all the changes in each ledger version.
|
||||
|
||||
### account_tx
|
||||
|
||||
```
|
||||
```sql
|
||||
CREATE TABLE clio.account_tx (
|
||||
account blob,
|
||||
seq_idx frozen<tuple<bigint, bigint>>, # Tuple of (ledger_index, transaction_index)
|
||||
hash blob, # Hash of the transaction
|
||||
PRIMARY KEY (account, seq_idx)
|
||||
account blob,
|
||||
seq_idx frozen<tuple<bigint, bigint>>, # Tuple of (ledger_index, transaction_index)
|
||||
hash blob, # Hash of the transaction
|
||||
PRIMARY KEY (account, seq_idx)
|
||||
) WITH CLUSTERING ORDER BY (seq_idx DESC) ...
|
||||
```
|
||||
|
||||
@@ -159,18 +159,18 @@ This table stores the list of transactions affecting a given account. This inclu
|
||||
|
||||
### successor
|
||||
|
||||
```
|
||||
```sql
|
||||
CREATE TABLE clio.successor (
|
||||
key blob, # Object index
|
||||
seq bigint, # The sequnce that this ledger object's predecessor and successor was updated
|
||||
next blob, # Index of the next object that existed in this sequence
|
||||
PRIMARY KEY (key, seq)
|
||||
key blob, # Object index
|
||||
seq bigint, # The sequence that this ledger object's predecessor and successor was updated
|
||||
next blob, # Index of the next object that existed in this sequence
|
||||
PRIMARY KEY (key, seq)
|
||||
) WITH CLUSTERING ORDER BY (seq ASC) ...
|
||||
```
|
||||
|
||||
This table is the important backbone of how histories of ledger objects are stored in Cassandra. The `successor` table stores the object index of all ledger objects that were validated on the XRP network along with the ledger sequence that the object was updated on.
|
||||
|
||||
As each key is ordered by the sequence, which is achieved by tracing through the table with a specific sequence number, Clio can recreate a Linked List data structure that represents all the existing ledger objects at that ledger sequence. The special values of `0x00...00` and `0xFF...FF` are used to label the *head* and *tail* of the Linked List in the successor table.
|
||||
As each key is ordered by the sequence, which is achieved by tracing through the table with a specific sequence number, Clio can recreate a Linked List data structure that represents all the existing ledger objects at that ledger sequence. The special values of `0x00...00` and `0xFF...FF` are used to label the _head_ and _tail_ of the Linked List in the successor table.
|
||||
|
||||
The diagram below showcases how tracing through the same table, but with different sequence parameter filtering, can result in different Linked List data representing the corresponding past state of the ledger objects. A query like `SELECT * FROM successor WHERE key = ? AND seq <= n ORDER BY seq DESC LIMIT 1;` can effectively trace through the successor table and get the Linked List of a specific sequence `n`.
|
||||
|
||||
@@ -182,12 +182,12 @@ In each new ledger version with sequence `n`, a ledger object `v` can either be
|
||||
|
||||
For all three of these operations, the procedure to update the successor table can be broken down into two steps:
|
||||
|
||||
1. Trace through the Linked List of the previous sequence to find the ledger object `e` with the greatest object index smaller or equal than the `v`'s index. Save `e`'s `next` value (the index of the next ledger object) as `w`.
|
||||
1. Trace through the Linked List of the previous sequence to find the ledger object `e` with the greatest object index smaller or equal than the `v`'s index. Save `e`'s `next` value (the index of the next ledger object) as `w`.
|
||||
|
||||
2. If `v` is...
|
||||
1. Being **created**, add two new records of `seq=n` with one being `e` pointing to `v`, and `v` pointing to `w` (Linked List insertion operation).
|
||||
2. Being **modified**, do nothing.
|
||||
3. Being **deleted**, add a record of `seq=n` with `e` pointing to `v`'s `next` value (Linked List deletion operation).
|
||||
2. If `v` is...
|
||||
1. Being **created**, add two new records of `seq=n` with one being `e` pointing to `v`, and `v` pointing to `w` (Linked List insertion operation).
|
||||
2. Being **modified**, do nothing.
|
||||
3. Being **deleted**, add a record of `seq=n` with `e` pointing to `v`'s `next` value (Linked List deletion operation).
|
||||
|
||||
## NFT data model
|
||||
|
||||
@@ -195,13 +195,13 @@ In `rippled` NFTs are stored in `NFTokenPage` ledger objects. This object is imp
|
||||
|
||||
### nf_tokens
|
||||
|
||||
```
|
||||
```sql
|
||||
CREATE TABLE clio.nf_tokens (
|
||||
token_id blob, # The NFT's ID
|
||||
sequence bigint, # Sequence of ledger version
|
||||
owner blob, # The account ID of the owner of this NFT at this ledger
|
||||
is_burned boolean, # True if token was burned in this ledger
|
||||
PRIMARY KEY (token_id, sequence)
|
||||
token_id blob, # The NFT's ID
|
||||
sequence bigint, # Sequence of ledger version
|
||||
owner blob, # The account ID of the owner of this NFT at this ledger
|
||||
is_burned boolean, # True if token was burned in this ledger
|
||||
PRIMARY KEY (token_id, sequence)
|
||||
) WITH CLUSTERING ORDER BY (sequence DESC) ...
|
||||
```
|
||||
|
||||
@@ -209,7 +209,7 @@ This table indexes NFT IDs with their owner at a given ledger.
|
||||
|
||||
The example query below shows how you could search for the owner of token `N` at ledger `Y` and see whether the token was burned.
|
||||
|
||||
```
|
||||
```sql
|
||||
SELECT * FROM nf_tokens
|
||||
WHERE token_id = N AND seq <= Y
|
||||
ORDER BY seq DESC LIMIT 1;
|
||||
@@ -219,12 +219,12 @@ If the token is burned, the owner field indicates the account that owned the tok
|
||||
|
||||
### issuer_nf_tokens_v2
|
||||
|
||||
```
|
||||
```sql
|
||||
CREATE TABLE clio.issuer_nf_tokens_v2 (
|
||||
issuer blob, # The NFT issuer's account ID
|
||||
taxon bigint, # The NFT's token taxon
|
||||
token_id blob, # The NFT's ID
|
||||
PRIMARY KEY (issuer, taxon, token_id)
|
||||
issuer blob, # The NFT issuer's account ID
|
||||
taxon bigint, # The NFT's token taxon
|
||||
token_id blob, # The NFT's ID
|
||||
PRIMARY KEY (issuer, taxon, token_id)
|
||||
) WITH CLUSTERING ORDER BY (taxon ASC, token_id ASC) ...
|
||||
```
|
||||
|
||||
@@ -233,12 +233,12 @@ combination. This is useful for determining all the NFTs a specific account issu
|
||||
|
||||
### nf_token_uris
|
||||
|
||||
```
|
||||
```sql
|
||||
CREATE TABLE clio.nf_token_uris (
|
||||
token_id blob, # The NFT's ID
|
||||
sequence bigint, # Sequence of ledger version
|
||||
uri blob, # The NFT's URI
|
||||
PRIMARY KEY (token_id, sequence)
|
||||
token_id blob, # The NFT's ID
|
||||
sequence bigint, # Sequence of ledger version
|
||||
uri blob, # The NFT's URI
|
||||
PRIMARY KEY (token_id, sequence)
|
||||
) WITH CLUSTERING ORDER BY (sequence DESC) ...
|
||||
```
|
||||
|
||||
@@ -252,12 +252,12 @@ A given NFT will have only one entry in this table (see caveat below), and will
|
||||
|
||||
### nf_token_transactions
|
||||
|
||||
```
|
||||
```sql
|
||||
CREATE TABLE clio.nf_token_transactions (
|
||||
token_id blob, # The NFT's ID
|
||||
seq_idx tuple<bigint, bigint>, # Tuple of (ledger_index, transaction_index)
|
||||
hash blob, # Hash of the transaction
|
||||
PRIMARY KEY (token_id, seq_idx)
|
||||
token_id blob, # The NFT's ID
|
||||
seq_idx tuple<bigint, bigint>, # Tuple of (ledger_index, transaction_index)
|
||||
hash blob, # Hash of the transaction
|
||||
PRIMARY KEY (token_id, seq_idx)
|
||||
) WITH CLUSTERING ORDER BY (seq_idx DESC) ...
|
||||
```
|
||||
|
||||
@@ -265,12 +265,12 @@ The `nf_token_transactions` table serves as the NFT counterpart to `account_tx`,
|
||||
|
||||
### migrator_status
|
||||
|
||||
```
|
||||
```sql
|
||||
CREATE TABLE clio.migrator_status (
|
||||
migrator_name TEXT, # The name of the migrator
|
||||
status TEXT, # The status of the migrator
|
||||
PRIMARY KEY (migrator_name)
|
||||
)
|
||||
)
|
||||
```
|
||||
|
||||
The `migrator_status` table stores the status of the migratior in this database. If a migrator's status is `migrated`, it means this database has finished data migration for this migrator.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user