Compare commits

..

128 Commits

Author SHA1 Message Date
Alex Kremer
ee8a9f5ed0 Upgrade libxrpl to 2.2.0-b1 (#1237)
Fixes #1236
2024-03-05 23:46:35 +00:00
Sergey Kuznetsov
8dbdb9d8e3 Fix documentation comment (#1234) 2024-03-05 19:25:38 +00:00
Alex Kremer
644a1fdb43 Attempt at gh-pages deploy (#1233)
For #889
2024-03-05 18:13:22 +00:00
Sergey Kuznetsov
58a1833cf2 Add forwarding cache (#1204)
Fixes #51.
2024-03-05 18:09:29 +00:00
Alex Kremer
dc8d1658e3 Build docs and upload to gh-pages (#1232)
For #889
2024-03-05 17:08:35 +00:00
Alex Kremer
73d427c1cb Automatically detect missing doxygen comments (#1226)
Fixes #1216
2024-03-05 12:37:16 +00:00
github-actions[bot]
c7b637b3f3 [CI] clang-tidy auto fixes (#1228)
Co-authored-by: kuznetsss <kuznetsss@users.noreply.github.com>
2024-03-04 09:16:08 +00:00
Alex Kremer
51150d8474 Add doxygen to CI docker image (#1225)
For #1216
2024-03-01 17:00:27 +00:00
Alex Kremer
a74970b81e Add missing doxygen comments (#1223)
Fixes #1218
2024-03-01 15:58:18 +00:00
Sergey Kuznetsov
b3e63b2491 Add a note about pre-commit hook dependencies (#1220) 2024-02-28 16:32:32 +00:00
cyan317
a7f61c3e68 remove _ from public member (#1217) 2024-02-27 15:56:01 +00:00
Alex Kremer
862fc48924 Use doxygen-awesome theme (#1214)
For #889
2024-02-27 15:22:44 +00:00
Sergey Kuznetsov
98ebc92bff Add cmake-format to precommit hook (#1215)
Fixes #1067.
2024-02-27 11:40:23 +00:00
Sergey Kuznetsov
e98e74d768 Add cmake-format to docker and mac runners (#1213)
For #1067
2024-02-26 23:08:31 +00:00
Sergey Kuznetsov
c94f55b7eb Add doc for Random and TerminationHandler (#1206)
Fixes #1010
2024-02-26 23:07:05 +00:00
Sergey Kuznetsov
0f5da4414c Turn on macOS runners in CI (#1185) 2024-02-23 13:22:03 +00:00
Alex Kremer
33700e3305 Update issue templates (#1202)
Fixes #1203
2024-02-22 13:39:35 +00:00
Alex Kremer
a7a1a724e2 Fix logo backdrop and alignment (#1198) 2024-02-21 15:17:08 +00:00
github-actions[bot]
656ab286b6 [CI] clang-tidy auto fixes (#1197)
Fixes #1196
2024-02-21 13:50:46 +00:00
Alex Kremer
190b5c6a37 Rewrite cache loader to async framework (#1193)
Fixes #1188
2024-02-20 19:24:49 +00:00
Maria Shodunke
27fe35a2d1 Cleanup location of docs and example files (#1195) 2024-02-20 18:40:03 +00:00
Sergey Kuznetsov
62f55a7dce Refactor source (#1171)
For #51
2024-02-20 14:07:27 +00:00
Maria Shodunke
26d663c0be Split README into separate files (#1191)
Fixes #1059
2024-02-20 11:52:32 +00:00
github-actions[bot]
9b0dab602f [CI] clang-tidy auto fixes (#1190) 2024-02-16 10:21:59 +00:00
Alex Kremer
97a63db51d Basic execution context framework (#1004)
Fixes #1187
2024-02-15 20:36:00 +00:00
Sergey Kuznetsov
75c6ad5c8d Improve Prometheus section in Readme (#1182) 2024-02-15 17:32:46 +00:00
github-actions[bot]
52d6d2c54f [CI] clang-tidy auto fixes (#1177)
Co-authored-by: kuznetsss <kuznetsss@users.noreply.github.com>
2024-02-15 09:10:54 +00:00
cyan317
b89cdb26f2 Use json value_to<string> to do the string convert (#1172)
Fix #953
2024-02-14 13:26:00 +00:00
Alex Kremer
cce695c570 Rename all detail to impl (#1168)
Fixes #1084
2024-02-12 11:54:41 +00:00
Alex Kremer
cea9c41a88 Upload codecov in separate workflow (#1166)
Fixes #1165
2024-02-08 16:20:54 +00:00
Sergey Kuznetsov
8575f786a8 Comment out macOS CI (#1164) 2024-02-07 15:57:50 +00:00
Alex Kremer
08b02c64cb Fix amm_info amounts in output map to user input (#1162)
Fixes #1156
2024-02-06 21:12:57 +00:00
Alex Kremer
b358649cf9 Add missing include (#1161)
Fixes #1160
2024-02-06 12:35:35 +00:00
github-actions[bot]
6bd72355db [CI] clang-tidy auto fixes (#1159)
Fixes #1158
2024-02-06 12:08:44 +00:00
Alex Kremer
a1699d7484 Rename headers to .hpp (#1154)
Fixes #1153
2024-02-05 13:10:50 +00:00
Sergey Kuznetsov
957aadd25a Requests library (#1140)
For #51.

First part of improving forwarding - library for easy async requests.
2024-02-05 11:35:10 +00:00
Alex Kremer
8f89a5913d Fix paging bug in range deletion tool (#1150) 2024-02-02 16:29:17 +00:00
github-actions[bot]
ecfe5e84e5 [CI] clang-tidy auto fixes (#1152)
Co-authored-by: kuznetsss <kuznetsss@users.noreply.github.com>
2024-02-02 09:34:08 +00:00
Alex Kremer
03c0940649 Fix most includes in headers (#1149)
Fixes #1146
2024-02-01 12:49:11 +00:00
cyan317
dc5aacfe39 Side chain ledgerentry (#1144)
Fix #861
2024-02-01 09:12:24 +00:00
Alex Kremer
3fda74e3f7 Cassandra data removal tool (#1142)
Fixes #1143
2024-01-30 13:27:42 +00:00
Santiago Reig
df27c4e629 Forward server_state to rippled (#1135)
Fixes #1138.
2024-01-25 15:56:31 +00:00
cyan317
37ee74c293 Fix bookbase (#1139)
Fix #1137
2024-01-25 14:01:01 +00:00
Bronek Kozicki
ec335176bb Fixes for gcc 13 (#1128)
* Add include <cstdint> where needed
* Add .devcontainer to .gitignore
* Document gcc-13 compilation fix for rocksdb
2024-01-25 12:01:11 +00:00
cyan317
ab33b26ec4 Fix ETL race condition problem (#1132)
Wait for previous publish being finished to switch to writer.
2024-01-24 16:55:08 +00:00
cyan317
28c8fa2a9a Ledger entry type filter for account_objects and ledger_data (#1116)
Fix #1109
2024-01-17 17:29:59 +00:00
Alex Kremer
12bbed194c Rerun clang tidy on fix merge (#1124) 2024-01-16 18:14:09 +00:00
Alex Kremer
1fa09006f8 Trigger clang-tidy restart via git commands (#1123) 2024-01-16 16:44:35 +00:00
Alex Kremer
e3b6fc4bd4 [CI] clang-tidy auto fixes (manual) (#1122)
Fixes #1121
2024-01-16 16:13:52 +00:00
Alex Kremer
34594ff8c0 [CI] clang-tidy auto fixes (FAKE4) (#1120) 2024-01-16 14:49:28 +00:00
Alex Kremer
40eeb57920 Add permission for actions (#1119) 2024-01-16 14:44:04 +00:00
Alex Kremer
3eb36c049c [CI] clang-tidy auto fixes (FAKE3) (#1118) 2024-01-16 14:32:40 +00:00
Alex Kremer
81602e8ae7 Change to running workflow via gh (#1117) 2024-01-16 14:27:21 +00:00
Alex Kremer
0cef9e0620 [CI] clang-tidy auto fixes (FAKE2) (#1115) 2024-01-16 12:50:07 +00:00
Alex Kremer
81d1b30607 Use contains syntax to grep for title (#1114) 2024-01-16 12:46:02 +00:00
Alex Kremer
923d021c83 [CI] clang-tidy auto fixes (FAKE) (#1113) 2024-01-16 12:36:29 +00:00
Alex Kremer
cd2b09ffb7 Use contains syntax to grep for label (#1112) 2024-01-16 12:28:48 +00:00
github-actions[bot]
3c62a1f42c [CI] clang-tidy auto fixes (#1111)
Co-authored-by: kuznetsss <kuznetsss@users.noreply.github.com>
2024-01-16 09:19:57 +00:00
cyan317
f97e0690c8 Account tx type improvement (#1108)
Fix #1090
2024-01-16 09:18:47 +00:00
Alex Kremer
eeaccbabd9 Add attempt at auto rerun functionality (#1105)
Attempting to refactor clang-tidy as an action and reuse it from two workflows.
2024-01-15 19:48:59 +00:00
Alex Kremer
13d2d4e2ca Enable DB tests via ScyllaDB service (#1103)
Fixes #1092
2024-01-15 12:09:00 +00:00
cyan317
350a45e7e2 Fix unstable unittest (#1102)
Properly mock wsbase
2024-01-15 12:06:14 +00:00
Alex Kremer
ce86572274 Fix forwarded flag placement (#1101)
Fixes #1091
2024-01-12 14:02:50 +00:00
github-actions[bot]
ac97788db8 [CI] clang-tidy auto fixes (#1099)
Fixes #1098. Fixes #1100.

---------

Co-authored-by: kuznetsss <kuznetsss@users.noreply.github.com>
Co-authored-by: Sergey Kuznetsov <skuznetsov@ripple.com>
2024-01-12 12:29:51 +00:00
Alex Kremer
2893492569 Remove legacy hook (#1097) 2024-01-11 17:49:00 +00:00
Alex Kremer
b63e98bda0 Update libxrpl to 2.0.0 (#1096) 2024-01-11 16:36:39 +00:00
Alex Kremer
f4df5c2185 Implement amm_info handler (#1060)
Fixes #283
2024-01-11 15:57:53 +00:00
github-actions[bot]
93d5c12b14 [CI] clang-tidy auto fixes (#1094)
Co-authored-by: kuznetsss <kuznetsss@users.noreply.github.com>
2024-01-11 09:37:54 +00:00
cyan317
2514b7986e Fix unstable test (#1089) 2024-01-10 16:56:57 +00:00
cyan317
d30e63d49a add api_version to response (#1088)
Fix #1020
2024-01-09 15:53:09 +00:00
github-actions[bot]
61f1e0853d [CI] clang-tidy auto fixes (#1086)
Co-authored-by: kuznetsss <kuznetsss@users.noreply.github.com>
2024-01-09 09:35:42 +00:00
cyan317
eb1831c489 New subscription manager (#1071)
Fix #886
2024-01-08 14:45:57 +00:00
Shi Cheng
07bd4b0760 upload clio_server artificat (#1083) 2024-01-08 10:49:53 +00:00
Alex Kremer
e26a1e37b5 Improve batching code (#1079)
Fixes #1077
2024-01-05 15:44:30 +00:00
Sergey Kuznetsov
e89640bcfb Add debug cache to ci (#1078)
Fixes #1066
2024-01-05 10:59:26 +00:00
github-actions[bot]
ae135759ef [CI] clang-tidy auto fixes (#1081)
Co-authored-by: kuznetsss <kuznetsss@users.noreply.github.com>
2024-01-05 09:31:56 +00:00
Alex Kremer
28188aa0f9 Add batching to writes (#1076)
Fixes #1077
2024-01-04 15:17:15 +00:00
Sergey Kuznetsov
af485a0634 Add gcovr to CI docker image (#1072)
For #1066
2024-01-03 16:53:26 +00:00
github-actions[bot]
b609298870 [CI] clang-tidy auto fixes (#1070)
Co-authored-by: kuznetsss <kuznetsss@users.noreply.github.com>
2024-01-03 08:53:47 +00:00
Alex Kremer
d077093a8d Simplify backend mock access for unittests (#1062) 2024-01-02 13:35:57 +00:00
Alex Kremer
781f3b3c48 Bump libxrpl version to 2.0.0-rc6 (#1061)
Fixes #1063
2023-12-23 20:28:07 +00:00
Bronek Kozicki
a8bae96ad4 Add coverage_report target (#1058) 2023-12-21 15:08:32 +00:00
Bronek Kozicki
fe9649d872 Fix c++20 requires syntax (#1057) 2023-12-19 20:52:53 +00:00
Sergey Kuznetsov
431b5f5ab8 Add ccache mention in docs (#1055) 2023-12-18 16:43:15 +00:00
Sergey Kuznetsov
b1dc2775fb Remove exception text from error sending (#1048)
Fixes #1037
2023-12-13 16:30:16 +00:00
Elliot Lee
dd35a7cfd2 Update CONTRIBUTING.md (#1047) 2023-12-13 15:47:07 +00:00
github-actions[bot]
a9d685d5c0 [CI] clang-tidy auto fixes (#1046)
Fixes #1045.
2023-12-13 15:08:53 +00:00
Sergey Kuznetsov
6065d324b5 Remove push-to-fork in clang-tidy workflow 2023-12-13 14:23:21 +00:00
Sergey Kuznetsov
fe7b5fe18f Another try to sign commit in CI (#1043) 2023-12-13 13:54:28 +00:00
Sergey Kuznetsov
1c663988f5 Use different token to sign commits (#1041)
For #884
2023-12-13 13:23:24 +00:00
Sergey Kuznetsov
d11d566121 Fix wrong image (#1040)
For #884
2023-12-13 12:49:44 +00:00
Sergey Kuznetsov
a467cb2526 Add signing clang-tidy commit (#1036)
Fixes #884
2023-12-12 18:04:40 +00:00
Sergey Kuznetsov
f62e36dc94 Add status to readme (#1035)
For #844
2023-12-12 17:07:51 +00:00
Sergey Kuznetsov
d933ce2a29 Use clio_ci docker image (#1033)
Fixes #884
2023-12-12 16:03:08 +00:00
Sergey Kuznetsov
db751e3807 Make root default user in CI image (#1034)
For #884
2023-12-12 14:05:30 +00:00
Sergey Kuznetsov
3c4a8f0cfb Add conan setup into image (#1032)
For #884
2023-12-12 12:00:57 +00:00
Sergey Kuznetsov
397ce97175 Fix docker publish (#1027)
Fixes docker build for #884
2023-12-11 17:08:42 +00:00
Sergey Kuznetsov
ac6ad13f6c Fix release notes (#1022)
Fixes release notes for #884
2023-12-11 15:52:36 +00:00
Sergey Kuznetsov
7d1d1749bc Another fix of clang-tidy workflow (#1026)
Another fix for clang-tidy nightly check for #884
2023-12-11 15:11:30 +00:00
Sergey Kuznetsov
acf359d631 Fix permissions issue for clang-tidy (#1023)
Fixes issue creation for clang-tidy nightly checks for #884
2023-12-11 11:53:22 +00:00
Sergey Kuznetsov
a34e107b86 Add nightly builds (#1013)
Partially fixes #884.
Adds:
- Docker image for CI on Linux
- Nightly builds without cache and releases
- Nightly clang-tidy checks
- Fix typos in .clang-tidy
2023-12-08 18:22:22 +00:00
cyan317
b886586de3 Unify ledger_index type (#1019)
Fix #1014
2023-12-08 14:20:40 +00:00
cyan317
a57abb15a3 Fix example json format (#1018) 2023-12-05 12:45:01 +00:00
cyan317
c87586a265 Fix compiler error: header missing (#1016) 2023-12-04 13:45:48 +00:00
cyan317
8172670c93 Add close_time_iso to transaction stream (#1012)
Fix #1011
2023-11-30 13:32:50 +00:00
Sergey Kuznetsov
3fdcd3315b Make assert write to both log file and cerr (#1009) 2023-11-30 10:33:52 +00:00
cyan317
dd018f1c5e Fix ledger close_time_iso(#1008)
Fix #1007
2023-11-29 18:04:12 +00:00
Sergey Kuznetsov
c2b462da75 Fix paste on mac (#1006) 2023-11-29 15:41:45 +00:00
Sergey Kuznetsov
252920ec57 Fix CI 2023-11-29 15:24:50 +00:00
Sergey Kuznetsov
9ef6801c55 Fix git hook 2023-11-29 15:24:50 +00:00
Sergey Kuznetsov
24c562fa2a Add hostname resolving to dosguard (#1000)
Fixes #983.

Cassandra, ETL sorces and cache already support hostname resolving.

Also added config to show missing includes by clangd.
2023-11-29 15:13:40 +00:00
Sergey Kuznetsov
35f119a268 Switch to llvm 17 tools (#1002)
Fixes #952
2023-11-28 20:09:58 +00:00
Sergey Kuznetsov
1be368dcaf Fix wrong assert (#1003) 2023-11-28 14:06:17 +00:00
cyan317
a5fbb01299 fix (#999)
Fix #985
2023-11-24 16:01:27 +00:00
Sergey Kuznetsov
3b75d88a35 Add server_definitions to forwarding set (#996)
Fixes #942
2023-11-22 16:21:03 +00:00
cyan317
f0224581a5 Fix nfts_by_issuer's DB issue (#997)
Fix #988
2023-11-22 15:55:46 +00:00
Sergey Kuznetsov
b998473673 Add compression and histogram metric type for Prometheus (#987)
Fixes #932
Also fixes #966

Decided not to add Summary type because it has the same functionality as Histogram but makes more calculations on client side (Clio side). See https://prometheus.io/docs/practices/histograms for detailed comparison.
2023-11-22 12:55:06 +00:00
Sergey Kuznetsov
8ebe2d6a80 Add assertion that terminate clio (#994)
Fixes #893.

Also added termination handler to print backtrace on crash, so fixes #929.
2023-11-21 13:06:04 +00:00
Sergey Kuznetsov
3bab90ca7a Comment out gcc-only checks (#995) 2023-11-21 09:53:08 +00:00
cyan317
74660aebf1 binary (#993)
Fix #984
2023-11-20 17:53:34 +00:00
cyan317
db08de466a Unify json (#992)
Fix #962
2023-11-20 13:09:28 +00:00
Alex Kremer
1bacad9e49 Update xrpl version to 2.0.0-rc1 (#990)
Fixes #989
2023-11-15 19:40:38 +00:00
cyan317
ca16858878 Add DeliverMax for Tx streams (#980) 2023-11-13 13:29:36 +00:00
cyan317
feae85782c DeliverMax alias of Payment tx (#979)
Fix #973
2023-11-09 13:35:08 +00:00
cyan317
b016c1d7ba Fix lowercase ctid (#977)
Fix #963
2023-11-07 16:10:12 +00:00
Sergey Kuznetsov
0597a9d685 Add amm type to account objects (#975)
Fixes #834
2023-11-03 13:54:54 +00:00
cyan317
05bea6a971 add amm filter (#972)
Fix #968
2023-11-03 13:12:36 +00:00
cyan317
fa660ef400 Implement DID (#967)
Fix #918
2023-11-03 09:40:40 +00:00
531 changed files with 40705 additions and 13866 deletions

View File

@@ -33,12 +33,13 @@ DisableFormat: false
ExperimentalAutoDetectBinPacking: false
FixNamespaceComments: true
ForEachMacros: [ Q_FOREACH, BOOST_FOREACH ]
IncludeBlocks: Regroup
IncludeCategories:
- Regex: '^<(BeastConfig)'
Priority: 0
- Regex: '^<(ripple)/'
- Regex: '^".*"$'
Priority: 1
- Regex: '^<.*\.(h|hpp)>$'
Priority: 2
- Regex: '^<(boost)/'
- Regex: '^<.*>$'
Priority: 3
- Regex: '.*'
Priority: 4

View File

@@ -7,6 +7,7 @@ Checks: '-*,
bugprone-copy-constructor-init,
bugprone-dangling-handle,
bugprone-dynamic-static-initializers,
bugprone-empty-catch,
bugprone-fold-init-type,
bugprone-forward-declaration-namespace,
bugprone-inaccurate-erase,
@@ -20,11 +21,15 @@ Checks: '-*,
bugprone-misplaced-pointer-arithmetic-in-alloc,
bugprone-misplaced-widening-cast,
bugprone-move-forwarding-reference,
bugprone-multiple-new-in-one-expression,
bugprone-multiple-statement-macro,
bugprone-no-escape,
bugprone-non-zero-enum-to-bool-conversion,
bugprone-parent-virtual-call,
bugprone-posix-return,
bugprone-redundant-branch-condition,
bugprone-reserved-identifier,
bugprone-unused-return-value,
bugprone-shared-ptr-array-mismatch,
bugprone-signal-handler,
bugprone-signed-char-misuse,
@@ -45,6 +50,7 @@ Checks: '-*,
bugprone-suspicious-semicolon,
bugprone-suspicious-string-compare,
bugprone-swapped-arguments,
bugprone-switch-missing-default-case,
bugprone-terminating-continue,
bugprone-throw-keyword-missing,
bugprone-too-small-loop-variable,
@@ -52,18 +58,23 @@ Checks: '-*,
bugprone-undelegated-constructor,
bugprone-unhandled-exception-at-new,
bugprone-unhandled-self-assignment,
bugprone-unique-ptr-array-mismatch,
bugprone-unsafe-functions,
bugprone-unused-raii,
bugprone-unused-return-value,
bugprone-use-after-move,
bugprone-virtual-near-miss,
cppcoreguidelines-init-variables,
cppcoreguidelines-prefer-member-initializer,
cppcoreguidelines-misleading-capture-default-by-value,
cppcoreguidelines-pro-type-member-init,
cppcoreguidelines-pro-type-static-cast-downcast,
cppcoreguidelines-rvalue-reference-param-not-moved,
cppcoreguidelines-use-default-member-init,
cppcoreguidelines-virtual-class-destructor,
llvm-namespace-comment,
misc-const-correctness,
misc-definitions-in-headers,
misc-header-include-cycle,
misc-include-cleaner,
misc-misplaced-const,
misc-redundant-expression,
misc-static-assert,
@@ -75,6 +86,7 @@ Checks: '-*,
modernize-make-shared,
modernize-make-unique,
modernize-pass-by-value,
modernize-type-traits,
modernize-use-emplace,
modernize-use-equals-default,
modernize-use-equals-delete,
@@ -88,7 +100,6 @@ Checks: '-*,
performance-move-constructor-init,
performance-no-automatic-move,
performance-trivially-destructible,
readability-avoid-const-params-in-decls,
readability-braces-around-statements,
readability-const-return-type,
readability-container-contains,
@@ -112,7 +123,10 @@ Checks: '-*,
CheckOptions:
readability-braces-around-statements.ShortStatementLines: 2
bugprone-unsafe-functions.ReportMoreUnsafeFunctions: true
bugprone-unused-return-value.CheckedReturnTypes: ::std::error_code;::std::error_condition;::std::errc;::std::expected
misc-include-cleaner.IgnoreHeaders: '.*/(detail|impl)/.*'
HeaderFilterRegex: '^.*/(src|unitests)/.*\.(h|hpp)$'
HeaderFilterRegex: '^.*/(src|unittests)/.*\.(h|hpp)$'
WarningsAsErrors: '*'

5
.clangd Normal file
View File

@@ -0,0 +1,5 @@
Diagnostics:
UnusedIncludes: Strict
MissingIncludes: Strict
Includes:
IgnoreHeader: ".*/(detail|impl)/.*"

245
.cmake-format.yaml Normal file
View File

@@ -0,0 +1,245 @@
_help_parse: Options affecting listfile parsing
parse:
_help_additional_commands:
- Specify structure for custom cmake functions
additional_commands:
foo:
flags:
- BAR
- BAZ
kwargs:
HEADERS: '*'
SOURCES: '*'
DEPENDS: '*'
_help_override_spec:
- Override configurations per-command where available
override_spec: {}
_help_vartags:
- Specify variable tags.
vartags: []
_help_proptags:
- Specify property tags.
proptags: []
_help_format: Options affecting formatting.
format:
_help_disable:
- Disable formatting entirely, making cmake-format a no-op
disable: false
_help_line_width:
- How wide to allow formatted cmake files
line_width: 120
_help_tab_size:
- How many spaces to tab for indent
tab_size: 2
_help_use_tabchars:
- If true, lines are indented using tab characters (utf-8
- 0x09) instead of <tab_size> space characters (utf-8 0x20).
- In cases where the layout would require a fractional tab
- character, the behavior of the fractional indentation is
- governed by <fractional_tab_policy>
use_tabchars: false
_help_fractional_tab_policy:
- If <use_tabchars> is True, then the value of this variable
- indicates how fractional indentions are handled during
- whitespace replacement. If set to 'use-space', fractional
- indentation is left as spaces (utf-8 0x20). If set to
- '`round-up` fractional indentation is replaced with a single'
- tab character (utf-8 0x09) effectively shifting the column
- to the next tabstop
fractional_tab_policy: use-space
_help_max_subgroups_hwrap:
- If an argument group contains more than this many sub-groups
- (parg or kwarg groups) then force it to a vertical layout.
max_subgroups_hwrap: 4
_help_max_pargs_hwrap:
- If a positional argument group contains more than this many
- arguments, then force it to a vertical layout.
max_pargs_hwrap: 6
_help_max_rows_cmdline:
- If a cmdline positional group consumes more than this many
- lines without nesting, then invalidate the layout (and nest)
max_rows_cmdline: 2
_help_separate_ctrl_name_with_space:
- If true, separate flow control names from their parentheses
- with a space
separate_ctrl_name_with_space: true
_help_separate_fn_name_with_space:
- If true, separate function names from parentheses with a
- space
separate_fn_name_with_space: false
_help_dangle_parens:
- If a statement is wrapped to more than one line, than dangle
- the closing parenthesis on its own line.
dangle_parens: true
_help_dangle_align:
- If the trailing parenthesis must be 'dangled' on its on
- 'line, then align it to this reference: `prefix`: the start'
- 'of the statement, `prefix-indent`: the start of the'
- 'statement, plus one indentation level, `child`: align to'
- the column of the arguments
dangle_align: prefix
_help_min_prefix_chars:
- If the statement spelling length (including space and
- parenthesis) is smaller than this amount, then force reject
- nested layouts.
min_prefix_chars: 4
_help_max_prefix_chars:
- If the statement spelling length (including space and
- parenthesis) is larger than the tab width by more than this
- amount, then force reject un-nested layouts.
max_prefix_chars: 10
_help_max_lines_hwrap:
- If a candidate layout is wrapped horizontally but it exceeds
- this many lines, then reject the layout.
max_lines_hwrap: 2
_help_line_ending:
- What style line endings to use in the output.
line_ending: unix
_help_command_case:
- Format command names consistently as 'lower' or 'upper' case
command_case: canonical
_help_keyword_case:
- Format keywords consistently as 'lower' or 'upper' case
keyword_case: unchanged
_help_always_wrap:
- A list of command names which should always be wrapped
always_wrap: []
_help_enable_sort:
- If true, the argument lists which are known to be sortable
- will be sorted lexicographicall
enable_sort: true
_help_autosort:
- If true, the parsers may infer whether or not an argument
- list is sortable (without annotation).
autosort: true
_help_require_valid_layout:
- By default, if cmake-format cannot successfully fit
- everything into the desired linewidth it will apply the
- last, most agressive attempt that it made. If this flag is
- True, however, cmake-format will print error, exit with non-
- zero status code, and write-out nothing
require_valid_layout: false
_help_layout_passes:
- A dictionary mapping layout nodes to a list of wrap
- decisions. See the documentation for more information.
layout_passes: {}
_help_markup: Options affecting comment reflow and formatting.
markup:
_help_bullet_char:
- What character to use for bulleted lists
bullet_char: '*'
_help_enum_char:
- What character to use as punctuation after numerals in an
- enumerated list
enum_char: .
_help_first_comment_is_literal:
- If comment markup is enabled, don't reflow the first comment
- block in each listfile. Use this to preserve formatting of
- your copyright/license statements.
first_comment_is_literal: false
_help_literal_comment_pattern:
- If comment markup is enabled, don't reflow any comment block
- which matches this (regex) pattern. Default is `None`
- (disabled).
literal_comment_pattern: null
_help_fence_pattern:
- Regular expression to match preformat fences in comments
- default= ``r'^\s*([`~]{3}[`~]*)(.*)$'``
fence_pattern: ^\s*([`~]{3}[`~]*)(.*)$
_help_ruler_pattern:
- Regular expression to match rulers in comments default=
- '``r''^\s*[^\w\s]{3}.*[^\w\s]{3}$''``'
ruler_pattern: ^\s*[^\w\s]{3}.*[^\w\s]{3}$
_help_explicit_trailing_pattern:
- If a comment line matches starts with this pattern then it
- is explicitly a trailing comment for the preceeding
- argument. Default is '#<'
explicit_trailing_pattern: '#<'
_help_hashruler_min_length:
- If a comment line starts with at least this many consecutive
- hash characters, then don't lstrip() them off. This allows
- for lazy hash rulers where the first hash char is not
- separated by space
hashruler_min_length: 10
_help_canonicalize_hashrulers:
- If true, then insert a space between the first hash char and
- remaining hash chars in a hash ruler, and normalize its
- length to fill the column
canonicalize_hashrulers: true
_help_enable_markup:
- enable comment markup parsing and reflow
enable_markup: true
_help_lint: Options affecting the linter
lint:
_help_disabled_codes:
- a list of lint codes to disable
disabled_codes: []
_help_function_pattern:
- regular expression pattern describing valid function names
function_pattern: '[0-9a-z_]+'
_help_macro_pattern:
- regular expression pattern describing valid macro names
macro_pattern: '[0-9A-Z_]+'
_help_global_var_pattern:
- regular expression pattern describing valid names for
- variables with global (cache) scope
global_var_pattern: '[A-Z][0-9A-Z_]+'
_help_internal_var_pattern:
- regular expression pattern describing valid names for
- variables with global scope (but internal semantic)
internal_var_pattern: _[A-Z][0-9A-Z_]+
_help_local_var_pattern:
- regular expression pattern describing valid names for
- variables with local scope
local_var_pattern: '[a-z][a-z0-9_]+'
_help_private_var_pattern:
- regular expression pattern describing valid names for
- privatedirectory variables
private_var_pattern: _[0-9a-z_]+
_help_public_var_pattern:
- regular expression pattern describing valid names for public
- directory variables
public_var_pattern: '[A-Z][0-9A-Z_]+'
_help_argument_var_pattern:
- regular expression pattern describing valid names for
- function/macro arguments and loop variables.
argument_var_pattern: '[a-z][a-z0-9_]+'
_help_keyword_pattern:
- regular expression pattern describing valid names for
- keywords used in functions or macros
keyword_pattern: '[A-Z][0-9A-Z_]+'
_help_max_conditionals_custom_parser:
- In the heuristic for C0201, how many conditionals to match
- within a loop in before considering the loop a parser.
max_conditionals_custom_parser: 2
_help_min_statement_spacing:
- Require at least this many newlines between statements
min_statement_spacing: 1
_help_max_statement_spacing:
- Require no more than this many newlines between statements
max_statement_spacing: 2
max_returns: 6
max_branches: 12
max_arguments: 5
max_localvars: 15
max_statements: 50
_help_encode: Options affecting file encoding
encode:
_help_emit_byteorder_mark:
- If true, emit the unicode byte-order mark (BOM) at the start
- of the file
emit_byteorder_mark: false
_help_input_encoding:
- Specify the encoding of the input file. Defaults to utf-8
input_encoding: utf-8
_help_output_encoding:
- Specify the encoding of the output file. Defaults to utf-8.
- Note that cmake only claims to support utf-8 so be careful
- when using anything else
output_encoding: utf-8
_help_misc: Miscellaneous configurations options.
misc:
_help_per_command:
- A dictionary containing any per-command configuration
- overrides. Currently only `command_case` is supported.
per_command: {}

11
.codecov.yml Normal file
View File

@@ -0,0 +1,11 @@
coverage:
status:
project:
default:
target: 50%
threshold: 2%
patch:
default:
target: 20% # Need to bump this number https://docs.codecov.com/docs/commit-status#patch-status
threshold: 2%

62
.githooks/check-docs Executable file
View File

@@ -0,0 +1,62 @@
#!/bin/bash
# Note: This script is intended to be run from the root of the repository.
#
# Not really a hook but should be used to check the completness of documentation for added code, otherwise CI will come for you.
# It's good to have /tmp as the output so that consecutive runs are fast but no clutter in the repository.
echo "+ Checking documentation..."
ROOT=$(pwd)
DOXYGEN=$(command -v doxygen)
TMPDIR=${ROOT}/.cache/doxygen
TMPFILE=${TMPDIR}/docs.log
DOCDIR=${TMPDIR}/out
if [ -z "$DOXYGEN" ]; then
# No hard error if doxygen is not installed yet
cat <<EOF
WARNING
-----------------------------------------------------------------------------
'doxygen' is required to check documentation.
Please install it for next time. For the time being it's on CI.
-----------------------------------------------------------------------------
EOF
exit 0
fi
mkdir -p ${DOCDIR} > /dev/null 2>&1
pushd ${DOCDIR} > /dev/null 2>&1
cat ${ROOT}/docs/Doxyfile | \
sed \
-e "s/\${LINT}/YES/" \
-e "s!\${SOURCE}!${ROOT}!" \
-e "s/\${USE_DOT}/NO/" \
-e "s/\${EXCLUDES}/impl/" \
| ${DOXYGEN} - 2> ${TMPFILE} 1> /dev/null
# We don't want to check for default values and typedefs as well as for member variables
OUT=$(cat ${TMPFILE} \
| grep -v "=default" \
| grep -v "\(variable\)" \
| grep -v "\(typedef\)")
rm -rf ${TMPFILE} > /dev/null 2>&1
popd > /dev/null 2>&1
if [[ ! -z "$OUT" ]]; then
cat <<EOF
ERROR
-----------------------------------------------------------------------------
Found issues with documentation:
$OUT
-----------------------------------------------------------------------------
EOF
exit 2
fi

99
.githooks/check-format Executable file
View File

@@ -0,0 +1,99 @@
#!/bin/bash
# Note: This script is intended to be run from the root of the repository.
#
# This script checks the format of the code and cmake files.
# In many cases it will automatically fix the issues and abort the commit.
echo "+ Checking code format..."
# paths to check and re-format
sources="src unittests"
formatter="clang-format -i"
version=$($formatter --version | grep -o '[0-9\.]*')
if [[ "17.0.0" > "$version" ]]; then
cat <<EOF
ERROR
-----------------------------------------------------------------------------
A minimum of version 17 of `which clang-format` is required.
Your version is $version.
Please fix paths and run again.
-----------------------------------------------------------------------------
EOF
exit 3
fi
# check there is no .h headers, only .hpp
wrong_headers=$(find $sources -name "*.h" | sed 's/^/ - /')
if [[ ! -z "$wrong_headers" ]]; then
cat <<EOF
ERROR
-----------------------------------------------------------------------------
Found .h headers in the source code. Please rename them to .hpp:
$wrong_headers
-----------------------------------------------------------------------------
EOF
exit 2
fi
if ! command -v cmake-format &> /dev/null; then
cat <<EOF
ERROR
-----------------------------------------------------------------------------
'cmake-format' is required to run this script.
Please install it and run again.
-----------------------------------------------------------------------------
EOF
exit 3
fi
function grep_code {
grep -l "${1}" ${sources} -r --include \*.hpp --include \*.cpp
}
if [[ "$OSTYPE" == "darwin"* ]]; then
# make all includes to be <...> style
grep_code '#include ".*"' | xargs sed -i '' -E 's|#include "(.*)"|#include <\1>|g'
# make local includes to be "..." style
main_src_dirs=$(find ./src -maxdepth 1 -type d -exec basename {} \; | tr '\n' '|' | sed 's/|$//' | sed 's/|/\\|/g')
grep_code "#include <\($main_src_dirs\)/.*>" | xargs sed -i '' -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g"
else
# make all includes to be <...> style
grep_code '#include ".*"' | xargs sed -i -E 's|#include "(.*)"|#include <\1>|g'
# make local includes to be "..." style
main_src_dirs=$(find ./src -type d -maxdepth 1 -exec basename {} \; | paste -sd '|' | sed 's/|/\\|/g')
grep_code "#include <\($main_src_dirs\)/.*>" | xargs sed -i -E "s|#include <(($main_src_dirs)/.*)>|#include \"\1\"|g"
fi
cmake_dirs=$(echo CMake $sources)
cmake_files=$(find $cmake_dirs -type f \( -name "CMakeLists.txt" -o -name "*.cmake" \))
cmake_files=$(echo $cmake_files ./CMakeLists.txt)
first=$(git diff $sources $cmake_files)
find $sources -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.ipp' \) -print0 | xargs -0 $formatter
cmake-format -i $cmake_files
second=$(git diff $sources $cmake_files)
changes=$(diff <(echo "$first") <(echo "$second") | wc -l | sed -e 's/^[[:space:]]*//')
if [ "$changes" != "0" ]; then
cat <<\EOF
WARNING
-----------------------------------------------------------------------------
Automatically re-formatted code with 'clang-format' - commit was aborted.
Please manually add any updated files and commit again.
-----------------------------------------------------------------------------
EOF
exit 1
fi

View File

@@ -1,20 +0,0 @@
#!/bin/bash
# Pushing a release branch requires an annotated tag at the released commit
branch=$(git rev-parse --abbrev-ref HEAD)
if [[ $branch =~ master ]]; then
# check if HEAD commit is tagged
if ! git describe --exact-match HEAD; then
echo "Commits to master must be tagged"
exit 1
fi
elif [[ $branch =~ release/* ]]; then
IFS=/ read -r branch rel_ver <<< ${branch}
tag=$(git describe --tags --abbrev=0)
if [[ "${rel_ver}" != "${tag}" ]]; then
echo "release/${rel_ver} branches must have annotated tag ${rel_ver}"
echo "git tag -am\"${rel_ver}\" ${rel_ver}"
exit 1
fi
fi

View File

@@ -1,42 +1,6 @@
#!/bin/bash
exec 1>&2
# This script is intended to be run from the root of the repository.
# paths to check and re-format
sources="src unittests"
formatter="clang-format -i"
version=$($formatter --version | grep -o '[0-9\.]*')
if [[ "16.0.0" > "$version" ]]; then
cat <<EOF
ERROR
-----------------------------------------------------------------------------
A minimum of version 16 of `clang-format` is required.
Your version is $version.
Please fix paths and run again.
-----------------------------------------------------------------------------
EOF
exit 2
fi
first=$(git diff $sources)
find $sources -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -print0 | xargs -0 $formatter
second=$(git diff $sources)
changes=$(diff <(echo "$first") <(echo "$second") | wc -l | sed -e 's/^[[:space:]]*//')
if [ "$changes" != "0" ]; then
cat <<\EOF
WARNING
-----------------------------------------------------------------------------
Automatically re-formatted code with `clang-format` - commit was aborted.
Please manually add any updated files and commit again.
-----------------------------------------------------------------------------
EOF
exit 1
fi
.githooks/ensure_release_tag
source .githooks/check-format
source .githooks/check-docs

30
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,30 @@
---
name: Bug report
about: Create a report to help us improve
title: "[Title with short description] (Version: [Clio version])"
labels: bug
assignees: ''
---
<!-- Please search existing issues to avoid creating duplicates. -->
## Issue Description
<!-- Provide a summary for your issue/bug. -->
## Steps to Reproduce
<!-- List in detail the exact steps to reproduce the unexpected behavior of the software. -->
## Expected Result
<!-- Explain in detail what behavior you expected to happen. -->
## Actual Result
<!-- Explain in detail what behavior actually happened. -->
## Environment
<!-- Please describe your environment setup (such as Ubuntu 20.04.2 with Boost 1.82). -->
<!-- Please use the version returned by './clio_server --version' as the version number -->
## Supporting Files
<!-- If you have supporting files such as a log, feel free to post a link here using Github Gist. -->
<!-- Consider adding configuration files with private information removed via Github Gist. -->

View File

@@ -0,0 +1,22 @@
---
name: Feature request
about: Suggest an idea for this project
title: "[Title with short description] (Version: [Clio version])"
labels: enhancement
assignees: ''
---
<!-- Please search existing issues to avoid creating duplicates. -->
## Summary
<!-- Provide a summary to the feature request -->
## Motivation
<!-- Why do we need this feature? -->
## Solution
<!-- What is the solution? -->
## Paths Not Taken
<!-- What other alternatives have been considered? -->

17
.github/ISSUE_TEMPLATE/question.md vendored Normal file
View File

@@ -0,0 +1,17 @@
---
name: Question
about: A question in form of an issue
title: "[Title with short description] (Version: Clio version)"
labels: question
assignees: ''
---
<!-- Please search existing issues to avoid creating duplicates. -->
<!-- Consider starting a [discussion](https://github.com/XRPLF/clio/discussions) instead. -->
## Question
<!-- Your question -->
## Paths Not Taken
<!-- If applicable, what other alternatives have been considered? -->

View File

@@ -1,37 +1,18 @@
name: Build clio
description: Build clio in build directory
inputs:
conan_profile:
description: Conan profile name
required: true
default: default
conan_cache_hit:
description: Whether conan cache has been downloaded
required: true
target:
description: Build target name
default: all
runs:
using: composite
steps:
- name: Get number of threads on mac
id: mac_threads
if: ${{ runner.os == 'macOS' }}
shell: bash
run: echo "num=$(($(sysctl -n hw.logicalcpu) - 2))" >> $GITHUB_OUTPUT
- name: Get number of threads on Linux
id: linux_threads
if: ${{ runner.os == 'Linux' }}
shell: bash
run: echo "num=$(($(nproc) - 2))" >> $GITHUB_OUTPUT
- name: Get number of threads
uses: ./.github/actions/get_number_of_threads
id: number_of_threads
- name: Build Clio
shell: bash
env:
BUILD_OPTION: "${{ inputs.conan_cache_hit == 'true' && 'missing' || '' }}"
LINT: "${{ runner.os == 'Linux' && 'True' || 'False' }}"
run: |
mkdir -p build
cd build
threads_num=${{ steps.mac_threads.outputs.num || steps.linux_threads.outputs.num }}
conan install .. -of . -b $BUILD_OPTION -s build_type=Release -o clio:tests=True -o clio:lint=$LINT --profile ${{ inputs.conan_profile }}
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release .. -G Ninja
cmake --build . --parallel $threads_num
cmake --build . --parallel ${{ steps.number_of_threads.outputs.threads_number }} --target ${{ inputs.target }}

View File

@@ -1,27 +0,0 @@
name: Check format
description: Check format using clang-format-16
runs:
using: composite
steps:
- name: Add llvm repo
run: |
echo 'deb http://apt.llvm.org/focal/ llvm-toolchain-focal-16 main' | sudo tee -a /etc/apt/sources.list
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add -
shell: bash
- name: Install packages
run: |
sudo apt update -qq
sudo apt install -y jq clang-format-16
shell: bash
- name: Run formatter
run: |
find src unittests -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -print0 | xargs -0 clang-format-16 -i
shell: bash
- name: Check for differences
id: assert
shell: bash
run: |
git diff --color --exit-code | tee "clang-format.patch"

View File

@@ -0,0 +1,21 @@
name: Generate code coverage report
description: Run tests, generate code coverage report and upload it to codecov.io
runs:
using: composite
steps:
- name: Run tests
shell: bash
run: |
build/clio_tests --backend_host=scylladb
- name: Run gcovr
shell: bash
run: |
gcovr -e unittests --xml build/coverage_report.xml -j8 --exclude-throw-branches
- name: Archive coverage report
uses: actions/upload-artifact@v3
with:
name: coverage-report.xml
path: build/coverage_report.xml
retention-days: 30

41
.github/actions/generate/action.yml vendored Normal file
View File

@@ -0,0 +1,41 @@
name: Run conan and cmake
description: Run conan and cmake
inputs:
conan_profile:
description: Conan profile name
required: true
conan_cache_hit:
description: Whether conan cache has been downloaded
required: true
default: 'false'
build_type:
description: Build type for third-party libraries and clio. Could be 'Release', 'Debug'
required: true
default: 'Release'
code_coverage:
description: Whether conan's coverage option should be on or not
required: true
default: 'false'
runs:
using: composite
steps:
- name: Create build directory
shell: bash
run: mkdir -p build
- name: Run conan
shell: bash
env:
BUILD_OPTION: "${{ inputs.conan_cache_hit == 'true' && 'missing' || '' }}"
CODE_COVERAGE: "${{ inputs.code_coverage == 'true' && 'True' || 'False' }}"
run: |
cd build
conan install .. -of . -b $BUILD_OPTION -s build_type=${{ inputs.build_type }} -o clio:tests=True -o clio:lint=False -o clio:coverage="${CODE_COVERAGE}" --profile ${{ inputs.conan_profile }}
- name: Run cmake
shell: bash
env:
BUILD_TYPE: "${{ inputs.build_type }}"
run: |
cd build
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=${{ inputs.build_type }} ${{ inputs.extra_cmake_args }} .. -G Ninja

View File

@@ -0,0 +1,26 @@
name: Get number of threads
description: Determines number of threads to use on macOS and Linux
outputs:
threads_number:
description: Number of threads to use
value: ${{ steps.number_of_threads_export.outputs.num }}
runs:
using: composite
steps:
- name: Get number of threads on mac
id: mac_threads
if: ${{ runner.os == 'macOS' }}
shell: bash
run: echo "num=$(($(sysctl -n hw.logicalcpu) - 2))" >> $GITHUB_OUTPUT
- name: Get number of threads on Linux
id: linux_threads
if: ${{ runner.os == 'Linux' }}
shell: bash
run: echo "num=$(($(nproc) - 2))" >> $GITHUB_OUTPUT
- name: Export output variable
shell: bash
id: number_of_threads_export
run: |
echo "num=${{ steps.mac_threads.outputs.num || steps.linux_threads.outputs.num }}" >> $GITHUB_OUTPUT

View File

@@ -0,0 +1,50 @@
name: Prepare runner
description: Install packages, set environment variables, create directories
inputs:
disable_ccache:
description: Whether ccache should be disabled
required: true
runs:
using: composite
steps:
- name: Install packages on mac
if: ${{ runner.os == 'macOS' }}
shell: bash
run: |
brew install llvm@14 pkg-config ninja bison cmake ccache jq gh conan@1
if ! command -v conan &> /dev/null; then
echo "/opt/homebrew/opt/conan@1/bin" >> $GITHUB_PATH
fi
- name: Fix git permissions on Linux
if: ${{ runner.os == 'Linux' }}
shell: bash
run: git config --global --add safe.directory $PWD
- name: Set env variables for macOS
if: ${{ runner.os == 'macOS' }}
shell: bash
run: |
echo "CCACHE_DIR=${{ github.workspace }}/.ccache" >> $GITHUB_ENV
echo "CONAN_USER_HOME=${{ github.workspace }}" >> $GITHUB_ENV
- name: Set env variables for Linux
if: ${{ runner.os == 'Linux' }}
shell: bash
run: |
echo "CCACHE_DIR=/root/.ccache" >> $GITHUB_ENV
echo "CONAN_USER_HOME=/root/" >> $GITHUB_ENV
- name: Set CCACHE_DISABLE=1
if: ${{ inputs.disable_ccache == 'true' }}
shell: bash
run: |
echo "CCACHE_DISABLE=1" >> $GITHUB_ENV
- name: Create directories
shell: bash
run: |
mkdir -p $CCACHE_DIR
mkdir -p $CONAN_USER_HOME/.conan

View File

@@ -7,6 +7,14 @@ inputs:
ccache_dir:
description: Path to .ccache directory
required: true
build_type:
description: Current build type (e.g. Release, Debug)
required: true
default: Release
code_coverage:
description: Whether code coverage is on
required: true
default: 'false'
outputs:
conan_hash:
description: Hash to use as a part of conan cache key
@@ -40,11 +48,12 @@ runs:
id: conan_cache
with:
path: ${{ inputs.conan_dir }}/data
key: clio-conan_data-${{ runner.os }}-develop-${{ steps.conan_hash.outputs.hash }}
key: clio-conan_data-${{ runner.os }}-${{ inputs.build_type }}-develop-${{ steps.conan_hash.outputs.hash }}
- name: Restore ccache cache
uses: actions/cache/restore@v3
id: ccache_cache
if: ${{ env.CCACHE_DISABLE != '1' }}
with:
path: ${{ inputs.ccache_dir }}
key: clio-ccache-${{ runner.os }}-develop-${{ steps.git_common_ancestor.outputs.commit }}
key: clio-ccache-${{ runner.os }}-${{ inputs.build_type }}${{ inputs.code_coverage == 'true' && '-code_coverage' || '' }}-develop-${{ steps.git_common_ancestor.outputs.commit }}

View File

@@ -16,6 +16,16 @@ inputs:
ccache_cache_hit:
description: Whether conan cache has been downloaded
required: true
ccache_cache_miss_rate:
description: How many cache misses happened
build_type:
description: Current build type (e.g. Release, Debug)
required: true
default: Release
code_coverage:
description: Whether code coverage is on
required: true
default: 'false'
runs:
using: composite
steps:
@@ -34,13 +44,13 @@ runs:
uses: actions/cache/save@v3
with:
path: ${{ inputs.conan_dir }}/data
key: clio-conan_data-${{ runner.os }}-develop-${{ inputs.conan_hash }}
key: clio-conan_data-${{ runner.os }}-${{ inputs.build_type }}-develop-${{ inputs.conan_hash }}
- name: Save ccache cache
if: ${{ inputs.ccache_cache_hit != 'true' }}
if: ${{ inputs.ccache_cache_hit != 'true' || inputs.ccache_cache_miss_rate == '100.0' }}
uses: actions/cache/save@v3
with:
path: ${{ inputs.ccache_dir }}
key: clio-ccache-${{ runner.os }}-develop-${{ steps.git_common_ancestor.outputs.commit }}
key: clio-ccache-${{ runner.os }}-${{ inputs.build_type }}${{ inputs.code_coverage == 'true' && '-code_coverage' || '' }}-develop-${{ steps.git_common_ancestor.outputs.commit }}

View File

@@ -11,19 +11,15 @@ runs:
if: ${{ runner.os == 'macOS' }}
shell: bash
env:
CONAN_PROFILE: clio_clang_14
CONAN_PROFILE: clio_apple_clang_15
id: conan_setup_mac
run: |
echo "Creating $CONAN_PROFILE conan profile";
clang_path="$(brew --prefix llvm@14)/bin/clang"
clang_cxx_path="$(brew --prefix llvm@14)/bin/clang++"
conan profile new $CONAN_PROFILE --detect --force
conan profile update settings.compiler=clang $CONAN_PROFILE
conan profile update settings.compiler.version=14 $CONAN_PROFILE
conan profile update settings.compiler.libcxx=libc++ $CONAN_PROFILE
conan profile update settings.compiler.cppstd=20 $CONAN_PROFILE
conan profile update "conf.tools.build:compiler_executables={\"c\": \"$clang_path\", \"cpp\": \"$clang_cxx_path\"}" $CONAN_PROFILE
conan profile update env.CC="$clang_path" $CONAN_PROFILE
conan profile update env.CXX="$clang_cxx_path" $CONAN_PROFILE
conan profile update env.CXXFLAGS=-DBOOST_ASIO_DISABLE_CONCEPTS $CONAN_PROFILE
conan profile update "conf.tools.build:cxxflags+=[\"-DBOOST_ASIO_DISABLE_CONCEPTS\"]" $CONAN_PROFILE
echo "created_conan_profile=$CONAN_PROFILE" >> $GITHUB_OUTPUT
- name: On linux
@@ -31,9 +27,6 @@ runs:
shell: bash
id: conan_setup_linux
run: |
conan profile new default --detect
conan profile update settings.compiler.cppstd=20 default
conan profile update settings.compiler.libcxx=libstdc++11 default
echo "created_conan_profile=default" >> $GITHUB_OUTPUT
- name: Export output variable

View File

@@ -1,4 +1,4 @@
name: Build Clio
name: Build
on:
push:
branches: [master, release/*, develop]
@@ -7,29 +7,75 @@ on:
workflow_dispatch:
jobs:
lint:
name: Lint
check_format:
name: Check format
runs-on: ubuntu-20.04
container:
image: rippleci/clio_ci:latest
steps:
- uses: actions/checkout@v3
- name: Run clang-format
uses: ./.github/actions/clang_format
build_mac:
name: Build macOS
needs: lint
runs-on: [self-hosted, macOS]
env:
CCACHE_DIR: ${{ github.workspace }}/.ccache
CONAN_USER_HOME: ${{ github.workspace }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install packages
- uses: actions/checkout@v4
- name: Run formatters
id: run_formatters
run: |
brew install llvm@14 pkg-config ninja bison cmake ccache jq
./.githooks/check-format
shell: bash
check_docs:
name: Check documentation
runs-on: ubuntu-20.04
container:
image: rippleci/clio_ci:latest
steps:
- uses: actions/checkout@v4
- name: Run linter
id: run_linter
run: |
./.githooks/check-docs
shell: bash
build:
name: Build
needs:
- check_format
- check_docs
strategy:
fail-fast: false
matrix:
include:
- os: heavy
container:
image: rippleci/clio_ci:latest
build_type: Release
code_coverage: false
- os: heavy
container:
image: rippleci/clio_ci:latest
build_type: Debug
code_coverage: true
- os: macos14
build_type: Release
code_coverage: false
runs-on: [self-hosted, "${{ matrix.os }}"]
container: ${{ matrix.container }}
services:
scylladb:
image: ${{ (matrix.code_coverage) && 'scylladb/scylla' || '' }}
options: >-
--health-cmd "cqlsh -e 'describe cluster'"
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Prepare runner
uses: ./.github/actions/prepare_runner
with:
disable_ccache: false
- name: Setup conan
uses: ./.github/actions/setup_conan
@@ -41,88 +87,44 @@ jobs:
with:
conan_dir: ${{ env.CONAN_USER_HOME }}/.conan
ccache_dir: ${{ env.CCACHE_DIR }}
build_type: ${{ matrix.build_type }}
code_coverage: ${{ matrix.code_coverage }}
- name: Build Clio
uses: ./.github/actions/build_clio
- name: Run conan and cmake
uses: ./.github/actions/generate
with:
conan_profile: ${{ steps.conan.outputs.conan_profile }}
conan_cache_hit: ${{ steps.restore_cache.outputs.conan_cache_hit }}
- name: Strip tests
run: strip build/clio_tests
- name: Upload clio_tests
uses: actions/upload-artifact@v3
with:
name: clio_tests_mac
path: build/clio_tests
- name: Save cache
uses: ./.github/actions/save_cache
with:
conan_dir: ${{ env.CONAN_USER_HOME }}/.conan
conan_hash: ${{ steps.restore_cache.outputs.conan_hash }}
conan_cache_hit: ${{ steps.restore_cache.outputs.conan_cache_hit }}
ccache_dir: ${{ env.CCACHE_DIR }}
ccache_cache_hit: ${{ steps.restore_cache.outputs.ccache_cache_hit }}
build_linux:
name: Build linux
needs: lint
runs-on: [self-hosted, Linux]
container:
image: conanio/gcc11:1.61.0
options: --user root
env:
CCACHE_DIR: /root/.ccache
CONAN_USER_HOME: /root/
steps:
- name: Get Clio
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Add llvm repo
run: |
echo 'deb http://apt.llvm.org/focal/ llvm-toolchain-focal-16 main' >> /etc/apt/sources.list
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
- name: Install packages
run: |
apt update -qq
apt install -y jq clang-tidy-16
- name: Install ccache
run: |
wget https://github.com/ccache/ccache/releases/download/v4.8.3/ccache-4.8.3-linux-x86_64.tar.xz
tar xf ./ccache-4.8.3-linux-x86_64.tar.xz
mv ./ccache-4.8.3-linux-x86_64/ccache /usr/bin/ccache
- name: Fix git permissions
run: git config --global --add safe.directory $PWD
- name: Setup conan
uses: ./.github/actions/setup_conan
- name: Restore cache
uses: ./.github/actions/restore_cache
id: restore_cache
with:
conan_dir: ${{ env.CONAN_USER_HOME }}/.conan
ccache_dir: ${{ env.CCACHE_DIR }}
build_type: ${{ matrix.build_type }}
code_coverage: ${{ matrix.code_coverage }}
- name: Build Clio
uses: ./.github/actions/build_clio
with:
conan_cache_hit: ${{ steps.restore_cache.outputs.conan_cache_hit }}
- name: Show ccache's statistics
shell: bash
id: ccache_stats
run: |
ccache -s > /tmp/ccache.stats
miss_rate=$(cat /tmp/ccache.stats | grep 'Misses' | head -n1 | sed 's/.*(\(.*\)%).*/\1/')
echo "miss_rate=${miss_rate}" >> $GITHUB_OUTPUT
cat /tmp/ccache.stats
- name: Strip tests
if: ${{ !matrix.code_coverage }}
run: strip build/clio_tests
- name: Upload clio_tests
- name: Upload clio_server
uses: actions/upload-artifact@v3
with:
name: clio_tests_linux
name: clio_server_${{ runner.os }}_${{ matrix.build_type }}
path: build/clio_server
- name: Upload clio_tests
if: ${{ !matrix.code_coverage }}
uses: actions/upload-artifact@v3
with:
name: clio_tests_${{ runner.os }}
path: build/clio_tests
- name: Save cache
@@ -133,26 +135,41 @@ jobs:
conan_cache_hit: ${{ steps.restore_cache.outputs.conan_cache_hit }}
ccache_dir: ${{ env.CCACHE_DIR }}
ccache_cache_hit: ${{ steps.restore_cache.outputs.ccache_cache_hit }}
ccache_cache_miss_rate: ${{ steps.ccache_stats.outputs.miss_rate }}
build_type: ${{ matrix.build_type }}
code_coverage: ${{ matrix.code_coverage }}
# TODO: This is not a part of build process but it is the easiest way to do it here.
# It will be refactored in https://github.com/XRPLF/clio/issues/1075
- name: Run code coverage
if: ${{ matrix.code_coverage }}
uses: ./.github/actions/code_coverage
upload_coverage_report:
name: Codecov
needs: build
uses: ./.github/workflows/upload_coverage_report.yml
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
test:
name: Run Tests
needs: build
strategy:
fail-fast: false
matrix:
include:
- os: heavy
container:
image: rippleci/clio_ci:latest
- os: macos14
runs-on: [self-hosted, "${{ matrix.os }}"]
container: ${{ matrix.container }}
test_mac:
needs: build_mac
runs-on: [self-hosted, macOS]
steps:
- uses: actions/download-artifact@v3
with:
name: clio_tests_mac
- name: Run clio_tests
run: |
chmod +x ./clio_tests
./clio_tests --gtest_filter="-BackendCassandraBaseTest*:BackendCassandraTest*:BackendCassandraFactoryTestWithDB*"
test_linux:
needs: build_linux
runs-on: [self-hosted, x-heavy]
steps:
- uses: actions/download-artifact@v3
with:
name: clio_tests_linux
name: clio_tests_${{ runner.os }}
- name: Run clio_tests
run: |
chmod +x ./clio_tests

117
.github/workflows/clang-tidy.yml vendored Normal file
View File

@@ -0,0 +1,117 @@
name: Clang-tidy check
on:
schedule:
- cron: "0 6 * * 1-5"
workflow_dispatch:
pull_request:
branches: [develop]
paths:
- .clang_tidy
- .github/workflows/clang-tidy.yml
workflow_call:
jobs:
clang_tidy:
runs-on: [self-hosted, Linux]
container:
image: rippleci/clio_ci:latest
permissions:
contents: write
issues: write
pull-requests: write
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Prepare runner
uses: ./.github/actions/prepare_runner
with:
disable_ccache: true
- name: Setup conan
uses: ./.github/actions/setup_conan
id: conan
- name: Restore cache
uses: ./.github/actions/restore_cache
id: restore_cache
with:
conan_dir: ${{ env.CONAN_USER_HOME }}/.conan
ccache_dir: ${{ env.CCACHE_DIR }}
- name: Run conan and cmake
uses: ./.github/actions/generate
with:
conan_profile: ${{ steps.conan.outputs.conan_profile }}
conan_cache_hit: ${{ steps.restore_cache.outputs.conan_cache_hit }}
build_type: Release
- name: Get number of threads
uses: ./.github/actions/get_number_of_threads
id: number_of_threads
- name: Run clang-tidy
continue-on-error: true
shell: bash
id: run_clang_tidy
run: |
run-clang-tidy-17 -p build -j ${{ steps.number_of_threads.outputs.threads_number }} -fix -quiet 1>output.txt
- name: Check format
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
continue-on-error: true
shell: bash
run: ./.githooks/check-format
- name: Print issues found
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
shell: bash
run: |
sed -i '/error\||/!d' ./output.txt
cat output.txt
rm output.txt
- name: Create an issue
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
id: create_issue
shell: bash
env:
GH_TOKEN: ${{ github.token }}
run: |
echo -e 'Clang-tidy found issues in the code:\n' > issue.md
echo -e "List of the issues found: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/" >> issue.md
gh issue create --assignee 'cindyyan317,godexsoft,kuznetsss' --label bug --title 'Clang-tidy found bugs in code🐛' --body-file ./issue.md > create_issue.log
created_issue=$(cat create_issue.log | sed 's|.*/||')
echo "created_issue=$created_issue" >> $GITHUB_OUTPUT
rm create_issue.log issue.md
- uses: crazy-max/ghaction-import-gpg@v5
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
with:
gpg_private_key: ${{ secrets.ACTIONS_GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.ACTIONS_GPG_PASSPHRASE }}
git_user_signingkey: true
git_commit_gpgsign: true
- name: Create PR with fixes
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
uses: peter-evans/create-pull-request@v5
env:
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ github.token }}
with:
commit-message: "[CI] clang-tidy auto fixes"
committer: Clio CI <skuznetsov@ripple.com>
branch: "clang_tidy/autofix"
branch-suffix: timestamp
delete-branch: true
title: "[CI] clang-tidy auto fixes"
body: "Fixes #${{ steps.create_issue.outputs.created_issue }}. Please review and commit clang-tidy fixes."
reviewers: "cindyyan317,godexsoft,kuznetsss"
- name: Fail the job
if: ${{ steps.run_clang_tidy.outcome != 'success' }}
shell: bash
run: exit 1

View File

@@ -0,0 +1,29 @@
name: Restart clang-tidy workflow
on:
push:
branches: [develop]
workflow_dispatch:
jobs:
restart_clang_tidy:
runs-on: ubuntu-20.04
permissions:
actions: write
steps:
- uses: actions/checkout@v4
- name: Check last commit matches clang-tidy auto fixes
id: check
shell: bash
run: |
passed=$(if [[ $(git log -1 --pretty=format:%s | grep '\[CI\] clang-tidy auto fixes') ]]; then echo 'true' ; else echo 'false' ; fi)
echo "passed=$passed" >> $GITHUB_OUTPUT
- name: Run clang-tidy workflow
if: ${{ contains(steps.check.outputs.passed, 'true') }}
shell: bash
env:
GH_TOKEN: ${{ github.token }}
GH_REPO: ${{ github.repository }}
run: gh workflow run clang-tidy.yml

47
.github/workflows/docs.yml vendored Normal file
View File

@@ -0,0 +1,47 @@
name: Documentation
on:
push:
branches: [release/*, develop]
workflow_dispatch:
permissions:
contents: read
pages: write
id-token: write
concurrency:
group: "pages"
cancel-in-progress: true
jobs:
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-20.04
continue-on-error: true
container:
image: rippleci/clio_ci:latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Build docs
run: |
mkdir -p build_docs && cd build_docs
cmake ../docs && cmake --build . --target docs
- name: Setup Pages
uses: actions/configure-pages@v3
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: build_docs/html
name: docs-develop # TODO: use x.y.z for `release/x.y.z` branches and `develop` for latest dev docs
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4
with:
artifact_name: docs-develop

138
.github/workflows/nightly.yml vendored Normal file
View File

@@ -0,0 +1,138 @@
name: Nightly release
on:
schedule:
- cron: '0 5 * * 1-5'
workflow_dispatch:
jobs:
build:
name: Build clio
strategy:
fail-fast: false
matrix:
include:
- os: macos14
build_type: Release
- os: heavy
build_type: Release
container:
image: rippleci/clio_ci:latest
- os: heavy
build_type: Debug
container:
image: rippleci/clio_ci:latest
runs-on: [self-hosted, "${{ matrix.os }}"]
container: ${{ matrix.container }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Prepare runner
uses: ./.github/actions/prepare_runner
with:
disable_ccache: true
- name: Setup conan
uses: ./.github/actions/setup_conan
id: conan
- name: Run conan and cmake
uses: ./.github/actions/generate
with:
conan_profile: ${{ steps.conan.outputs.conan_profile }}
conan_cache_hit: ${{ steps.restore_cache.outputs.conan_cache_hit }}
build_type: ${{ matrix.build_type }}
- name: Build Clio
uses: ./.github/actions/build_clio
- name: Strip tests
run: strip build/clio_tests
- name: Upload clio_tests
uses: actions/upload-artifact@v3
with:
name: clio_tests_${{ runner.os }}_${{ matrix.build_type }}
path: build/clio_tests
- name: Compress clio_server
shell: bash
run: |
cd build
tar czf ./clio_server_${{ runner.os }}_${{ matrix.build_type }}.tar.gz ./clio_server
- name: Upload clio_server
uses: actions/upload-artifact@v3
with:
name: clio_server_${{ runner.os }}_${{ matrix.build_type }}
path: build/clio_server_${{ runner.os }}_${{ matrix.build_type }}.tar.gz
run_tests:
needs: build
strategy:
fail-fast: false
matrix:
include:
- os: macos14
build_type: Release
- os: heavy
build_type: Release
- os: heavy
build_type: Debug
runs-on: [self-hosted, "${{ matrix.os }}"]
steps:
- uses: actions/download-artifact@v3
with:
name: clio_tests_${{ runner.os }}_${{ matrix.build_type }}
- name: Run clio_tests
run: |
chmod +x ./clio_tests
./clio_tests --gtest_filter="-BackendCassandraBaseTest*:BackendCassandraTest*:BackendCassandraFactoryTestWithDB*"
nightly_release:
needs: run_tests
runs-on: ubuntu-20.04
env:
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ github.token }}
permissions:
contents: write
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v3
with:
path: nightly_release
- name: Prepare files
shell: bash
run: |
cp ${{ github.workspace }}/.github/workflows/nightly_notes.md "${RUNNER_TEMP}/nightly_notes.md"
cd nightly_release
rm -r clio_tests*
for d in $(ls); do
archive_name=$(ls $d)
mv ${d}/${archive_name} ./
rm -r $d
sha256sum ./$archive_name > ./${archive_name}.sha256sum
cat ./$archive_name.sha256sum >> "${RUNNER_TEMP}/nightly_notes.md"
done
echo '```' >> "${RUNNER_TEMP}/nightly_notes.md"
- name: Remove current nightly release and nightly tag
shell: bash
run: |
gh release delete nightly --yes || true
git push origin :nightly || true
- name: Publish nightly release
shell: bash
run: |
gh release create nightly --prerelease --title "Clio development (nightly) build" \
--target $GITHUB_SHA --notes-file "${RUNNER_TEMP}/nightly_notes.md" \
./nightly_release/clio_server*

6
.github/workflows/nightly_notes.md vendored Normal file
View File

@@ -0,0 +1,6 @@
> **Note:** Please remember that this is a development release and it is not recommended for production use.
Changelog (including previous releases): https://github.com/XRPLF/clio/commits/nightly
## SHA256 checksums
```

40
.github/workflows/update_docker_ci.yml vendored Normal file
View File

@@ -0,0 +1,40 @@
name: Update CI docker image
on:
push:
branches: [develop]
paths:
- 'docker/ci/**'
- .github/workflows/update_docker_ci.yml
workflow_dispatch:
jobs:
build_and_push:
name: Build and push docker image
runs-on: ubuntu-20.04
steps:
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_PW }}
- uses: actions/checkout@v4
- uses: docker/setup-qemu-action@v3
- uses: docker/setup-buildx-action@v3
- uses: docker/metadata-action@v5
id: meta
with:
images: rippleci/clio_ci
tags: |
type=raw,value=latest
type=raw,value=gcc_11
type=raw,value=${{ env.GITHUB_SHA }}
- name: Build and push
uses: docker/build-push-action@v5
with:
context: ${{ github.workspace }}/docker/ci
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}

View File

@@ -0,0 +1,35 @@
name: Upload report
on:
workflow_dispatch:
workflow_call:
secrets:
CODECOV_TOKEN:
required: true
jobs:
upload_report:
name: Upload report
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Download report artifact
uses: actions/download-artifact@v3
with:
name: coverage-report.xml
path: build
- name: Upload coverage report
if: ${{ hashFiles('build/coverage_report.xml') != '' }}
uses: wandalen/wretry.action@v1.3.0
with:
action: codecov/codecov-action@v3
with: |
files: build/coverage_report.xml
fail_ci_if_error: false
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}
attempt_limit: 5
attempt_delay: 10000

2
.gitignore vendored
View File

@@ -1,9 +1,11 @@
*clio*.log
/build*/
.devcontainer
.build
.cache
.vscode
.python-version
.DS_Store
CMakeUserPresets.json
config.json
src/main/impl/Build.cpp

View File

@@ -17,7 +17,9 @@
*/
//==============================================================================
#include <main/Build.h>
#include "main/Build.hpp"
#include <string>
namespace Build {
static constexpr char versionString[] = "@VERSION@";

View File

@@ -1,5 +1,5 @@
find_program (CCACHE_PATH "ccache")
find_program(CCACHE_PATH "ccache")
if (CCACHE_PATH)
set (CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_PATH}")
message (STATUS "Using ccache: ${CCACHE_PATH}")
set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_PATH}")
message(STATUS "Using ccache: ${CCACHE_PATH}")
endif ()

View File

@@ -1,42 +1,42 @@
if (CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 14)
message (FATAL_ERROR "Clang 14+ required for building clio")
message(FATAL_ERROR "Clang 14+ required for building clio")
endif ()
set (is_clang TRUE)
set(is_clang TRUE)
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang")
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 14)
message (FATAL_ERROR "AppleClang 14+ required for building clio")
message(FATAL_ERROR "AppleClang 14+ required for building clio")
endif ()
set (is_appleclang TRUE)
set(is_appleclang TRUE)
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 11)
message (FATAL_ERROR "GCC 11+ required for building clio")
message(FATAL_ERROR "GCC 11+ required for building clio")
endif ()
set (is_gcc TRUE)
set(is_gcc TRUE)
else ()
message (FATAL_ERROR "Supported compilers: AppleClang 14+, Clang 14+, GCC 11+")
message(FATAL_ERROR "Supported compilers: AppleClang 14+, Clang 14+, GCC 11+")
endif ()
if (san)
string (TOLOWER ${san} san)
set (SAN_FLAG "-fsanitize=${san}")
set (SAN_LIB "")
string(TOLOWER ${san} san)
set(SAN_FLAG "-fsanitize=${san}")
set(SAN_LIB "")
if (is_gcc)
if (san STREQUAL "address")
set (SAN_LIB "asan")
set(SAN_LIB "asan")
elseif (san STREQUAL "thread")
set (SAN_LIB "tsan")
set(SAN_LIB "tsan")
elseif (san STREQUAL "memory")
set (SAN_LIB "msan")
set(SAN_LIB "msan")
elseif (san STREQUAL "undefined")
set (SAN_LIB "ubsan")
set(SAN_LIB "ubsan")
endif ()
endif ()
set (_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
set (CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
CHECK_CXX_COMPILER_FLAG (${SAN_FLAG} COMPILER_SUPPORTS_SAN)
set (CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
set(_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
set(CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
check_cxx_compiler_flag(${SAN_FLAG} COMPILER_SUPPORTS_SAN)
set(CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
if (NOT COMPILER_SUPPORTS_SAN)
message (FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
message(FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
endif ()
endif ()

View File

@@ -2,30 +2,32 @@ if (lint)
# Find clang-tidy binary
if (DEFINED ENV{CLIO_CLANG_TIDY_BIN})
set (_CLANG_TIDY_BIN $ENV{CLIO_CLANG_TIDY_BIN})
set(_CLANG_TIDY_BIN $ENV{CLIO_CLANG_TIDY_BIN})
if ((NOT EXISTS ${_CLANG_TIDY_BIN}) OR IS_DIRECTORY ${_CLANG_TIDY_BIN})
message (FATAL_ERROR "$ENV{CLIO_CLANG_TIDY_BIN} no such file. Check CLIO_CLANG_TIDY_BIN env variable")
message(FATAL_ERROR "$ENV{CLIO_CLANG_TIDY_BIN} no such file. Check CLIO_CLANG_TIDY_BIN env variable")
endif ()
message (STATUS "Using clang-tidy from CLIO_CLANG_TIDY_BIN")
message(STATUS "Using clang-tidy from CLIO_CLANG_TIDY_BIN")
else ()
find_program (_CLANG_TIDY_BIN NAMES "clang-tidy-16" "clang-tidy" REQUIRED)
find_program(_CLANG_TIDY_BIN NAMES "clang-tidy-17" "clang-tidy" REQUIRED)
endif ()
if (NOT _CLANG_TIDY_BIN)
message (FATAL_ERROR
"clang-tidy binary not found. Please set the CLIO_CLANG_TIDY_BIN environment variable or install clang-tidy.")
message(
FATAL_ERROR
"clang-tidy binary not found. Please set the CLIO_CLANG_TIDY_BIN environment variable or install clang-tidy."
)
endif ()
# Support for https://github.com/matus-chochlik/ctcache
find_program (CLANG_TIDY_CACHE_PATH NAMES "clang-tidy-cache")
find_program(CLANG_TIDY_CACHE_PATH NAMES "clang-tidy-cache")
if (CLANG_TIDY_CACHE_PATH)
set (_CLANG_TIDY_CMD
"${CLANG_TIDY_CACHE_PATH};${_CLANG_TIDY_BIN}"
CACHE STRING "A combined command to run clang-tidy with caching wrapper")
set(_CLANG_TIDY_CMD "${CLANG_TIDY_CACHE_PATH};${_CLANG_TIDY_BIN}"
CACHE STRING "A combined command to run clang-tidy with caching wrapper"
)
else ()
set(_CLANG_TIDY_CMD "${_CLANG_TIDY_BIN}")
endif ()
set (CMAKE_CXX_CLANG_TIDY "${_CLANG_TIDY_CMD};--quiet")
message (STATUS "Using clang-tidy: ${CMAKE_CXX_CLANG_TIDY}")
set(CMAKE_CXX_CLANG_TIDY "${_CLANG_TIDY_CMD};--quiet")
message(STATUS "Using clang-tidy: ${CMAKE_CXX_CLANG_TIDY}")
endif ()

View File

@@ -2,38 +2,44 @@
write version to source
#]===================================================================]
find_package (Git REQUIRED)
find_package(Git REQUIRED)
set (GIT_COMMAND rev-parse --short HEAD)
execute_process (COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND}
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_VARIABLE REV OUTPUT_STRIP_TRAILING_WHITESPACE)
set(GIT_COMMAND rev-parse --short HEAD)
execute_process(
COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE REV
OUTPUT_STRIP_TRAILING_WHITESPACE
)
set (GIT_COMMAND branch --show-current)
execute_process (COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND}
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_VARIABLE BRANCH OUTPUT_STRIP_TRAILING_WHITESPACE)
set(GIT_COMMAND branch --show-current)
execute_process(
COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE BRANCH
OUTPUT_STRIP_TRAILING_WHITESPACE
)
if (BRANCH STREQUAL "")
set (BRANCH "dev")
set(BRANCH "dev")
endif ()
if (NOT (BRANCH MATCHES master OR BRANCH MATCHES release/*)) # for develop and any other branch name YYYYMMDDHMS-<branch>-<git-rev>
execute_process (COMMAND date +%Y%m%d%H%M%S OUTPUT_VARIABLE DATE OUTPUT_STRIP_TRAILING_WHITESPACE)
set (VERSION "${DATE}-${BRANCH}-${REV}")
if (NOT (BRANCH MATCHES master OR BRANCH MATCHES release/*)) # for develop and any other branch name
# YYYYMMDDHMS-<branch>-<git-rev>
execute_process(COMMAND date +%Y%m%d%H%M%S OUTPUT_VARIABLE DATE OUTPUT_STRIP_TRAILING_WHITESPACE)
set(VERSION "${DATE}-${BRANCH}-${REV}")
set(DOC_CLIO_VERSION "develop")
else ()
set (GIT_COMMAND describe --tags)
execute_process (COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND}
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_VARIABLE TAG_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE)
set (VERSION "${TAG_VERSION}-${REV}")
set(GIT_COMMAND describe --tags)
execute_process(
COMMAND ${GIT_EXECUTABLE} ${GIT_COMMAND} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_VARIABLE TAG_VERSION
OUTPUT_STRIP_TRAILING_WHITESPACE
)
set(VERSION "${TAG_VERSION}-${REV}")
set(DOC_CLIO_VERSION "${TAG_VERSION}")
endif ()
if (CMAKE_BUILD_TYPE MATCHES Debug)
set (VERSION "${VERSION}+DEBUG")
set(VERSION "${VERSION}+DEBUG")
endif ()
message (STATUS "Build version: ${VERSION}")
set (clio_version "${VERSION}")
message(STATUS "Build version: ${VERSION}")
set(clio_version "${VERSION}")
configure_file (CMake/Build.cpp.in ${CMAKE_SOURCE_DIR}/src/main/impl/Build.cpp)
configure_file(${CMAKE_CURRENT_LIST_DIR}/Build.cpp.in ${CMAKE_CURRENT_LIST_DIR}/../src/main/impl/Build.cpp)

361
CMake/CodeCoverage.cmake Normal file
View File

@@ -0,0 +1,361 @@
# Copyright (c) 2012 - 2017, Lars Bilke All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
#
# 1. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the distribution.
#
# 1. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# CHANGES:
#
# 2012-01-31, Lars Bilke - Enable Code Coverage
#
# 2013-09-17, Joakim Söderberg - Added support for Clang. - Some additional usage instructions.
#
# 2016-02-03, Lars Bilke - Refactored functions to use named parameters
#
# 2017-06-02, Lars Bilke - Merged with modified version from github.com/ufz/ogs
#
# 2019-05-06, Anatolii Kurotych - Remove unnecessary --coverage flag
#
# 2019-12-13, FeRD (Frank Dana) - Deprecate COVERAGE_LCOVR_EXCLUDES and COVERAGE_GCOVR_EXCLUDES lists in favor of
# tool-agnostic COVERAGE_EXCLUDES variable, or EXCLUDE setup arguments. - CMake 3.4+: All excludes can be specified
# relative to BASE_DIRECTORY - All setup functions: accept BASE_DIRECTORY, EXCLUDE list - Set lcov basedir with -b
# argument - Add automatic --demangle-cpp in lcovr, if 'c++filt' is available (can be overridden with NO_DEMANGLE option
# in setup_target_for_coverage_lcovr().) - Delete output dir, .info file on 'make clean' - Remove Python detection,
# since version mismatches will break gcovr - Minor cleanup (lowercase function names, update examples...)
#
# 2019-12-19, FeRD (Frank Dana) - Rename Lcov outputs, make filtered file canonical, fix cleanup for targets
#
# 2020-01-19, Bob Apthorpe - Added gfortran support
#
# 2020-02-17, FeRD (Frank Dana) - Make all add_custom_target()s VERBATIM to auto-escape wildcard characters in EXCLUDEs,
# and remove manual escaping from gcovr targets
#
# 2021-01-19, Robin Mueller - Add CODE_COVERAGE_VERBOSE option which will allow to print out commands which are run -
# Added the option for users to set the GCOVR_ADDITIONAL_ARGS variable to supply additional flags to the gcovr command
#
# 2020-05-04, Mihchael Davis - Add -fprofile-abs-path to make gcno files contain absolute paths - Fix BASE_DIRECTORY not
# working when defined - Change BYPRODUCT from folder to index.html to stop ninja from complaining about double defines
#
# 2021-05-10, Martin Stump - Check if the generator is multi-config before warning about non-Debug builds
#
# 2022-02-22, Marko Wehle - Change gcovr output from -o <filename> for --xml <filename> and --html <filename> output
# respectively. This will allow for Multiple Output Formats at the same time by making use of GCOVR_ADDITIONAL_ARGS,
# e.g. GCOVR_ADDITIONAL_ARGS "--txt".
#
# 2022-09-28, Sebastian Mueller - fix append_coverage_compiler_flags_to_target to correctly add flags - replace
# "-fprofile-arcs -ftest-coverage" with "--coverage" (equivalent)
#
# 2023-12-15, Bronek Kozicki - remove setup_target_for_coverage_lcov (slow) and setup_target_for_coverage_fastcov (no
# support for Clang) - fix Clang support by adding find_program( ... llvm-cov ) - add Apple Clang support by adding
# execute_process( COMMAND xcrun -f llvm-cov ... ) - add CODE_COVERAGE_GCOV_TOOL to explicitly select gcov tool and
# disable find_program - replace both functions setup_target_for_coverage_gcovr_* with single
# setup_target_for_coverage_gcovr - add support for all gcovr output formats
#
# USAGE:
#
# 1. Copy this file into your cmake modules path.
#
# 1. Add the following line to your CMakeLists.txt (best inside an if-condition using a CMake option() to enable it just
# optionally): include(CodeCoverage)
#
# 1. Append necessary compiler flags for all supported source files: append_coverage_compiler_flags() Or for specific
# target: append_coverage_compiler_flags_to_target(YOUR_TARGET_NAME)
#
# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og
#
# 1. If you need to exclude additional directories from the report, specify them using full paths in the
# COVERAGE_EXCLUDES variable before calling setup_target_for_coverage_*(). Example: set(COVERAGE_EXCLUDES
# '${PROJECT_SOURCE_DIR}/src/dir1/*'
# '/path/to/my/src/dir2/*') Or, use the EXCLUDE argument to setup_target_for_coverage_*(). Example:
# setup_target_for_coverage_gcovr( NAME coverage EXECUTABLE testrunner EXCLUDE "${PROJECT_SOURCE_DIR}/src/dir1/*"
# "/path/to/my/src/dir2/*")
#
# 4.a NOTE: With CMake 3.4+, COVERAGE_EXCLUDES or EXCLUDE can also be set relative to the BASE_DIRECTORY (default:
# PROJECT_SOURCE_DIR) Example: set(COVERAGE_EXCLUDES "dir1/*") setup_target_for_coverage_gcovr( NAME coverage EXECUTABLE
# testrunner FORMAT html-details BASE_DIRECTORY "${PROJECT_SOURCE_DIR}/src" EXCLUDE "dir2/*")
#
# 4.b If you need to pass specific options to gcovr, specify them in GCOVR_ADDITIONAL_ARGS variable. Example: set
# (GCOVR_ADDITIONAL_ARGS --exclude-throw-branches --exclude-noncode-lines -s) setup_target_for_coverage_gcovr( NAME
# coverage EXECUTABLE testrunner EXCLUDE "src/dir1" "src/dir2")
#
# 1. Use the functions described below to create a custom make target which runs your test executable and produces a code
# coverage report.
#
# 1. Build a Debug build: cmake -DCMAKE_BUILD_TYPE=Debug .. make make my_coverage_target
include(CMakeParseArguments)
option(CODE_COVERAGE_VERBOSE "Verbose information" FALSE)
# Check prereqs
find_program(GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test)
if (DEFINED CODE_COVERAGE_GCOV_TOOL)
set(GCOV_TOOL "${CODE_COVERAGE_GCOV_TOOL}")
elseif (DEFINED ENV{CODE_COVERAGE_GCOV_TOOL})
set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}")
elseif ("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
if (APPLE)
execute_process(COMMAND xcrun -f llvm-cov OUTPUT_VARIABLE LLVMCOV_PATH OUTPUT_STRIP_TRAILING_WHITESPACE)
else ()
find_program(LLVMCOV_PATH llvm-cov)
endif ()
if (LLVMCOV_PATH)
set(GCOV_TOOL "${LLVMCOV_PATH} gcov")
endif ()
elseif ("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
find_program(GCOV_PATH gcov)
set(GCOV_TOOL "${GCOV_PATH}")
endif ()
# Check supported compiler (Clang, GNU and Flang)
get_property(LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES)
foreach (LANG ${LANGUAGES})
if ("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
if ("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3)
message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
endif ()
elseif (NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU" AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES
"(LLVM)?[Ff]lang"
)
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
endif ()
endforeach ()
set(COVERAGE_COMPILER_FLAGS "-g --coverage" CACHE INTERNAL "")
if (CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
include(CheckCXXCompilerFlag)
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
if (HAVE_cxx_fprofile_abs_path)
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
endif ()
include(CheckCCompilerFlag)
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
if (HAVE_c_fprofile_abs_path)
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
endif ()
endif ()
set(CMAKE_Fortran_FLAGS_COVERAGE ${COVERAGE_COMPILER_FLAGS}
CACHE STRING "Flags used by the Fortran compiler during coverage builds." FORCE
)
set(CMAKE_CXX_FLAGS_COVERAGE ${COVERAGE_COMPILER_FLAGS}
CACHE STRING "Flags used by the C++ compiler during coverage builds." FORCE
)
set(CMAKE_C_FLAGS_COVERAGE ${COVERAGE_COMPILER_FLAGS}
CACHE STRING "Flags used by the C compiler during coverage builds." FORCE
)
set(CMAKE_EXE_LINKER_FLAGS_COVERAGE "" CACHE STRING "Flags used for linking binaries during coverage builds." FORCE)
set(CMAKE_SHARED_LINKER_FLAGS_COVERAGE ""
CACHE STRING "Flags used by the shared libraries linker during coverage builds." FORCE
)
mark_as_advanced(
CMAKE_Fortran_FLAGS_COVERAGE CMAKE_CXX_FLAGS_COVERAGE CMAKE_C_FLAGS_COVERAGE CMAKE_EXE_LINKER_FLAGS_COVERAGE
CMAKE_SHARED_LINKER_FLAGS_COVERAGE
)
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
if (NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
endif () # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
if (CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
link_libraries(gcov)
endif ()
# Defines a target for running and collection code coverage information Builds dependencies, runs the given executable
# and outputs reports. NOTE! The executable should always have a ZERO as exit code otherwise the coverage generation
# will not complete.
#
# setup_target_for_coverage_gcovr( NAME ctest_coverage # New target name EXECUTABLE ctest -j
# ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR DEPENDENCIES executable_target # Dependencies to build
# first BASE_DIRECTORY "../" # Base directory for report # (defaults to PROJECT_SOURCE_DIR) FORMAT
# "cobertura" # Output format, one of: # xml cobertura sonarqube json-summary # json-details
# coveralls csv txt # html-single html-nested html-details # (xml is an alias to cobertura; # if no format is set,
# defaults to xml) EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative # to BASE_DIRECTORY,
# with CMake 3.4+) ) The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the GCVOR
# command.
function (setup_target_for_coverage_gcovr)
set(options NONE)
set(oneValueArgs BASE_DIRECTORY NAME FORMAT)
set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES)
cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
if (NOT GCOV_TOOL)
message(FATAL_ERROR "Could not find gcov or llvm-cov tool! Aborting...")
endif ()
if (NOT GCOVR_PATH)
message(FATAL_ERROR "Could not find gcovr tool! Aborting...")
endif ()
# Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR
if (DEFINED Coverage_BASE_DIRECTORY)
get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE)
else ()
set(BASEDIR ${PROJECT_SOURCE_DIR})
endif ()
if (NOT DEFINED Coverage_FORMAT)
set(Coverage_FORMAT xml)
endif ()
if ("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
else ()
if ((Coverage_FORMAT STREQUAL "html-details") OR (Coverage_FORMAT STREQUAL "html-nested"))
set(GCOVR_OUTPUT_FILE ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html)
set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME})
elseif (Coverage_FORMAT STREQUAL "html-single")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html)
elseif ((Coverage_FORMAT STREQUAL "json-summary") OR (Coverage_FORMAT STREQUAL "json-details")
OR (Coverage_FORMAT STREQUAL "coveralls")
)
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json)
elseif (Coverage_FORMAT STREQUAL "txt")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.txt)
elseif (Coverage_FORMAT STREQUAL "csv")
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.csv)
else ()
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.xml)
endif ()
endif ()
if ((Coverage_FORMAT STREQUAL "cobertura") OR (Coverage_FORMAT STREQUAL "xml"))
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura "${GCOVR_OUTPUT_FILE}")
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura-pretty)
set(Coverage_FORMAT cobertura) # overwrite xml
elseif (Coverage_FORMAT STREQUAL "sonarqube")
list(APPEND GCOVR_ADDITIONAL_ARGS --sonarqube "${GCOVR_OUTPUT_FILE}")
elseif (Coverage_FORMAT STREQUAL "json-summary")
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary "${GCOVR_OUTPUT_FILE}")
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary-pretty)
elseif (Coverage_FORMAT STREQUAL "json-details")
list(APPEND GCOVR_ADDITIONAL_ARGS --json "${GCOVR_OUTPUT_FILE}")
list(APPEND GCOVR_ADDITIONAL_ARGS --json-pretty)
elseif (Coverage_FORMAT STREQUAL "coveralls")
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls "${GCOVR_OUTPUT_FILE}")
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls-pretty)
elseif (Coverage_FORMAT STREQUAL "csv")
list(APPEND GCOVR_ADDITIONAL_ARGS --csv "${GCOVR_OUTPUT_FILE}")
elseif (Coverage_FORMAT STREQUAL "txt")
list(APPEND GCOVR_ADDITIONAL_ARGS --txt "${GCOVR_OUTPUT_FILE}")
elseif (Coverage_FORMAT STREQUAL "html-single")
list(APPEND GCOVR_ADDITIONAL_ARGS --html "${GCOVR_OUTPUT_FILE}")
list(APPEND GCOVR_ADDITIONAL_ARGS --html-self-contained)
elseif (Coverage_FORMAT STREQUAL "html-nested")
list(APPEND GCOVR_ADDITIONAL_ARGS --html-nested "${GCOVR_OUTPUT_FILE}")
elseif (Coverage_FORMAT STREQUAL "html-details")
list(APPEND GCOVR_ADDITIONAL_ARGS --html-details "${GCOVR_OUTPUT_FILE}")
else ()
message(FATAL_ERROR "Unsupported output style ${Coverage_FORMAT}! Aborting...")
endif ()
# Collect excludes (CMake 3.4+: Also compute absolute paths)
set(GCOVR_EXCLUDES "")
foreach (EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES})
if (CMAKE_VERSION VERSION_GREATER 3.4)
get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR})
endif ()
list(APPEND GCOVR_EXCLUDES "${EXCLUDE}")
endforeach ()
list(REMOVE_DUPLICATES GCOVR_EXCLUDES)
# Combine excludes to several -e arguments
set(GCOVR_EXCLUDE_ARGS "")
foreach (EXCLUDE ${GCOVR_EXCLUDES})
list(APPEND GCOVR_EXCLUDE_ARGS "-e")
list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}")
endforeach ()
# Set up commands which will be run to generate coverage data Run tests
set(GCOVR_EXEC_TESTS_CMD ${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS})
# Create folder
if (DEFINED GCOVR_CREATE_FOLDER)
set(GCOVR_FOLDER_CMD ${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER})
else ()
set(GCOVR_FOLDER_CMD echo) # dummy
endif ()
# Running gcovr
set(GCOVR_CMD
${GCOVR_PATH}
--gcov-executable
${GCOV_TOOL}
--gcov-ignore-parse-errors=negative_hits.warn_once_per_file
-r
${BASEDIR}
${GCOVR_ADDITIONAL_ARGS}
${GCOVR_EXCLUDE_ARGS}
--object-directory=${PROJECT_BINARY_DIR}
)
if (CODE_COVERAGE_VERBOSE)
message(STATUS "Executed command report")
message(STATUS "Command to run tests: ")
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
if (NOT GCOVR_FOLDER_CMD STREQUAL "echo")
message(STATUS "Command to create a folder: ")
string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}")
message(STATUS "${GCOVR_FOLDER_CMD_SPACED}")
endif ()
message(STATUS "Command to generate gcovr coverage data: ")
string(REPLACE ";" " " GCOVR_CMD_SPACED "${GCOVR_CMD}")
message(STATUS "${GCOVR_CMD_SPACED}")
endif ()
add_custom_target(
${Coverage_NAME}
COMMAND ${GCOVR_EXEC_TESTS_CMD}
COMMAND ${GCOVR_FOLDER_CMD}
COMMAND ${GCOVR_CMD}
BYPRODUCTS ${GCOVR_OUTPUT_FILE}
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
DEPENDS ${Coverage_DEPENDENCIES}
VERBATIM # Protect arguments to commands
COMMENT "Running gcovr to produce code coverage report."
)
# Show info where to find the report
add_custom_command(
TARGET ${Coverage_NAME} POST_BUILD COMMAND ;
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
)
endfunction () # setup_target_for_coverage_gcovr
function (append_coverage_compiler_flags)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
message(STATUS "Appending code coverage compiler flags: ${COVERAGE_COMPILER_FLAGS}")
endfunction () # append_coverage_compiler_flags
# Setup coverage for specific library
function (append_coverage_compiler_flags_to_target name)
separate_arguments(_flag_list NATIVE_COMMAND "${COVERAGE_COMPILER_FLAGS}")
target_compile_options(${name} PRIVATE ${_flag_list})
if (CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
target_link_libraries(${name} PRIVATE gcov)
endif ()
endfunction ()

View File

@@ -1,125 +0,0 @@
# call add_coverage(module_name) to add coverage targets for the given module
function (add_coverage module)
if ("${CMAKE_C_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang"
OR "${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
message ("[Coverage] Building with llvm Code Coverage Tools")
# Using llvm gcov ; llvm install by xcode
set (LLVM_COV_PATH /Library/Developer/CommandLineTools/usr/bin)
if (NOT EXISTS ${LLVM_COV_PATH}/llvm-cov)
message (FATAL_ERROR "llvm-cov not found! Aborting.")
endif ()
# set Flags
target_compile_options (${module} PRIVATE
-fprofile-instr-generate
-fcoverage-mapping)
target_link_options (${module} PUBLIC
-fprofile-instr-generate
-fcoverage-mapping)
target_compile_options (clio PRIVATE
-fprofile-instr-generate
-fcoverage-mapping)
target_link_options (clio PUBLIC
-fprofile-instr-generate
-fcoverage-mapping)
# llvm-cov
add_custom_target (${module}-ccov-preprocessing
COMMAND LLVM_PROFILE_FILE=${module}.profraw $<TARGET_FILE:${module}>
COMMAND ${LLVM_COV_PATH}/llvm-profdata merge -sparse ${module}.profraw -o
${module}.profdata
DEPENDS ${module})
add_custom_target (${module}-ccov-show
COMMAND ${LLVM_COV_PATH}/llvm-cov show $<TARGET_FILE:${module}>
-instr-profile=${module}.profdata -show-line-counts-or-regions
DEPENDS ${module}-ccov-preprocessing)
# add summary for CI parse
add_custom_target (${module}-ccov-report
COMMAND
${LLVM_COV_PATH}/llvm-cov report $<TARGET_FILE:${module}>
-instr-profile=${module}.profdata
-ignore-filename-regex=".*_makefiles|.*unittests|.*_deps"
-show-region-summary=false
DEPENDS ${module}-ccov-preprocessing)
# exclude libs and unittests self
add_custom_target (${module}-ccov
COMMAND
${LLVM_COV_PATH}/llvm-cov show $<TARGET_FILE:${module}>
-instr-profile=${module}.profdata -show-line-counts-or-regions
-output-dir=${module}-llvm-cov -format="html"
-ignore-filename-regex=".*_makefiles|.*unittests|.*_deps" > /dev/null 2>&1
DEPENDS ${module}-ccov-preprocessing)
add_custom_command (
TARGET ${module}-ccov
POST_BUILD
COMMENT
"Open ${module}-llvm-cov/index.html in your browser to view the coverage report."
)
elseif ("${CMAKE_C_COMPILER_ID}" MATCHES "GNU" OR "${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
message ("[Coverage] Building with Gcc Code Coverage Tools")
find_program (GCOV_PATH gcov)
if (NOT GCOV_PATH)
message (FATAL_ERROR "gcov not found! Aborting...")
endif () # NOT GCOV_PATH
find_program (GCOVR_PATH gcovr)
if (NOT GCOVR_PATH)
message (FATAL_ERROR "gcovr not found! Aborting...")
endif () # NOT GCOVR_PATH
set (COV_OUTPUT_PATH ${module}-gcc-cov)
target_compile_options (${module} PRIVATE -fprofile-arcs -ftest-coverage
-fPIC)
target_link_libraries (${module} PRIVATE gcov)
target_compile_options (clio PRIVATE -fprofile-arcs -ftest-coverage
-fPIC)
target_link_libraries (clio PRIVATE gcov)
# this target is used for CI as well generate the summary out.xml will send
# to github action to generate markdown, we can paste it to comments or
# readme
add_custom_target (${module}-ccov
COMMAND ${module} ${TEST_PARAMETER}
COMMAND rm -rf ${COV_OUTPUT_PATH}
COMMAND mkdir ${COV_OUTPUT_PATH}
COMMAND
gcovr -r ${CMAKE_SOURCE_DIR} --object-directory=${PROJECT_BINARY_DIR} -x
${COV_OUTPUT_PATH}/out.xml --exclude='${CMAKE_SOURCE_DIR}/unittests/'
--exclude='${PROJECT_BINARY_DIR}/'
COMMAND
gcovr -r ${CMAKE_SOURCE_DIR} --object-directory=${PROJECT_BINARY_DIR}
--html ${COV_OUTPUT_PATH}/report.html
--exclude='${CMAKE_SOURCE_DIR}/unittests/'
--exclude='${PROJECT_BINARY_DIR}/'
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
COMMENT "Running gcovr to produce Cobertura code coverage report.")
# generate the detail report
add_custom_target (${module}-ccov-report
COMMAND ${module} ${TEST_PARAMETER}
COMMAND rm -rf ${COV_OUTPUT_PATH}
COMMAND mkdir ${COV_OUTPUT_PATH}
COMMAND
gcovr -r ${CMAKE_SOURCE_DIR} --object-directory=${PROJECT_BINARY_DIR}
--html-details ${COV_OUTPUT_PATH}/index.html
--exclude='${CMAKE_SOURCE_DIR}/unittests/'
--exclude='${PROJECT_BINARY_DIR}/'
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
COMMENT "Running gcovr to produce Cobertura code coverage report.")
add_custom_command (
TARGET ${module}-ccov-report
POST_BUILD
COMMENT
"Open ${COV_OUTPUT_PATH}/index.html in your browser to view the coverage report."
)
else ()
message (FATAL_ERROR "Complier not support yet")
endif ()
endfunction ()

View File

@@ -1,11 +1,20 @@
find_package (Doxygen REQUIRED)
find_package(Doxygen REQUIRED)
set (DOXYGEN_IN ${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile)
set (DOXYGEN_OUT ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile)
# See Doxyfile for these settings:
set(SOURCE ${CMAKE_CURRENT_SOURCE_DIR}/..)
set(USE_DOT "YES")
set(LINT "NO")
set(EXCLUDES "")
# ---
configure_file (${DOXYGEN_IN} ${DOXYGEN_OUT} @ONLY)
add_custom_target (docs
COMMAND ${DOXYGEN_EXECUTABLE} ${DOXYGEN_OUT}
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMENT "Generating API documentation with Doxygen"
VERBATIM)
set(DOXYGEN_IN ${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile)
set(DOXYGEN_OUT ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile)
configure_file(${DOXYGEN_IN} ${DOXYGEN_OUT})
add_custom_target(
docs
COMMAND ${DOXYGEN_EXECUTABLE} ${DOXYGEN_OUT}
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMENT "Generating API documentation with Doxygen"
VERBATIM
)

View File

@@ -1,45 +1,38 @@
set(COMPILER_FLAGS
-Wall
-Wcast-align
-Wdouble-promotion
-Wextra
-Werror
-Wformat=2
-Wimplicit-fallthrough
-Wmisleading-indentation
-Wno-narrowing
-Wno-deprecated-declarations
-Wno-dangling-else
-Wno-unused-but-set-variable
-Wnon-virtual-dtor
-Wnull-dereference
-Wold-style-cast
-pedantic
-Wpedantic
-Wunused
-Wall
-Wcast-align
-Wdouble-promotion
-Wextra
-Werror
-Wformat=2
-Wimplicit-fallthrough
-Wmisleading-indentation
-Wno-narrowing
-Wno-deprecated-declarations
-Wno-dangling-else
-Wno-unused-but-set-variable
-Wnon-virtual-dtor
-Wnull-dereference
-Wold-style-cast
-pedantic
-Wpedantic
-Wunused
)
if (is_gcc AND NOT lint)
list(APPEND COMPILER_FLAGS
-Wduplicated-branches
-Wduplicated-cond
-Wlogical-op
-Wuseless-cast
)
endif ()
# TODO: reenable when we change CI #884 if (is_gcc AND NOT lint) list(APPEND COMPILER_FLAGS -Wduplicated-branches
# -Wduplicated-cond -Wlogical-op -Wuseless-cast ) endif ()
if (is_clang)
list(APPEND COMPILER_FLAGS
-Wshadow # gcc is to aggressive with shadowing https://gcc.gnu.org/bugzilla/show_bug.cgi?id=78147
list(APPEND COMPILER_FLAGS -Wshadow # gcc is to aggressive with shadowing
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=78147
)
endif ()
if (is_appleclang)
list(APPEND COMPILER_FLAGS
-Wreorder-init-list
)
list(APPEND COMPILER_FLAGS -Wreorder-init-list)
endif ()
# See https://github.com/cpp-best-practices/cppbestpractices/blob/master/02-Use_the_Tools_Available.md#gcc--clang for the flags description
# See https://github.com/cpp-best-practices/cppbestpractices/blob/master/02-Use_the_Tools_Available.md#gcc--clang for
# the flags description
target_compile_options (clio PUBLIC ${COMPILER_FLAGS})
target_compile_options(clio PUBLIC ${COMPILER_FLAGS})

View File

@@ -1,11 +1,11 @@
include (CheckIncludeFileCXX)
include(CheckIncludeFileCXX)
check_include_file_cxx ("source_location" SOURCE_LOCATION_AVAILABLE)
if (SOURCE_LOCATION_AVAILABLE)
target_compile_definitions (clio PUBLIC "HAS_SOURCE_LOCATION")
check_include_file_cxx("source_location" SOURCE_LOCATION_AVAILABLE)
if (SOURCE_LOCATION_AVAILABLE)
target_compile_definitions(clio PUBLIC "HAS_SOURCE_LOCATION")
endif ()
check_include_file_cxx ("experimental/source_location" EXPERIMENTAL_SOURCE_LOCATION_AVAILABLE)
if (EXPERIMENTAL_SOURCE_LOCATION_AVAILABLE)
target_compile_definitions (clio PUBLIC "HAS_EXPERIMENTAL_SOURCE_LOCATION")
check_include_file_cxx("experimental/source_location" EXPERIMENTAL_SOURCE_LOCATION_AVAILABLE)
if (EXPERIMENTAL_SOURCE_LOCATION_AVAILABLE)
target_compile_definitions(clio PUBLIC "HAS_EXPERIMENTAL_SOURCE_LOCATION")
endif ()

View File

@@ -1,11 +1,4 @@
set (Boost_USE_STATIC_LIBS ON)
set (Boost_USE_STATIC_RUNTIME ON)
set(Boost_USE_STATIC_LIBS ON)
set(Boost_USE_STATIC_RUNTIME ON)
find_package (Boost 1.82 REQUIRED
COMPONENTS
program_options
coroutine
system
log
log_setup
)
find_package(Boost 1.82 REQUIRED COMPONENTS program_options coroutine system log log_setup)

View File

@@ -1,5 +1,3 @@
find_package (OpenSSL 1.1.1 REQUIRED)
find_package(OpenSSL 1.1.1 REQUIRED)
set_target_properties (OpenSSL::SSL PROPERTIES
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
)
set_target_properties(OpenSSL::SSL PROPERTIES INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2)

View File

@@ -1,2 +1,2 @@
set (THREADS_PREFER_PTHREAD_FLAG ON)
find_package (Threads)
set(THREADS_PREFER_PTHREAD_FLAG ON)
find_package(Threads)

View File

@@ -1 +1 @@
find_package (cassandra-cpp-driver REQUIRED)
find_package(cassandra-cpp-driver REQUIRED)

1
CMake/deps/gbench.cmake Normal file
View File

@@ -0,0 +1 @@
find_package(benchmark REQUIRED)

View File

@@ -1,4 +1,4 @@
find_package (GTest REQUIRED)
find_package(GTest REQUIRED)
enable_testing ()
include (GoogleTest)
enable_testing()
include(GoogleTest)

View File

@@ -0,0 +1,3 @@
target_compile_definitions(clio PUBLIC BOOST_STACKTRACE_LINK)
target_compile_definitions(clio PUBLIC BOOST_STACKTRACE_USE_BACKTRACE)
find_package(libbacktrace REQUIRED)

View File

@@ -1 +1 @@
find_package (fmt REQUIRED)
find_package(fmt REQUIRED)

View File

@@ -1 +1 @@
find_package (xrpl REQUIRED)
find_package(xrpl REQUIRED)

View File

@@ -1,14 +1,13 @@
set (CLIO_INSTALL_DIR "/opt/clio")
set (CMAKE_INSTALL_PREFIX ${CLIO_INSTALL_DIR})
set(CLIO_INSTALL_DIR "/opt/clio")
set(CMAKE_INSTALL_PREFIX ${CLIO_INSTALL_DIR})
install (TARGETS clio_server DESTINATION bin)
install(TARGETS clio_server DESTINATION bin)
file (READ example-config.json config)
string (REGEX REPLACE "./clio_log" "/var/log/clio/" config "${config}")
file (WRITE ${CMAKE_BINARY_DIR}/install-config.json "${config}")
install (FILES ${CMAKE_BINARY_DIR}/install-config.json DESTINATION etc RENAME config.json)
file(READ docs/examples/config/example-config.json config)
string(REGEX REPLACE "./clio_log" "/var/log/clio/" config "${config}")
file(WRITE ${CMAKE_BINARY_DIR}/install-config.json "${config}")
install(FILES ${CMAKE_BINARY_DIR}/install-config.json DESTINATION etc RENAME config.json)
configure_file ("${CMAKE_SOURCE_DIR}/CMake/install/clio.service.in" "${CMAKE_BINARY_DIR}/clio.service")
install (FILES "${CMAKE_BINARY_DIR}/clio.service" DESTINATION /lib/systemd/system)
configure_file("${CMAKE_SOURCE_DIR}/CMake/install/clio.service.in" "${CMAKE_BINARY_DIR}/clio.service")
install(FILES "${CMAKE_BINARY_DIR}/clio.service" DESTINATION /lib/systemd/system)

View File

@@ -1,291 +1,466 @@
cmake_minimum_required(VERSION 3.16.3)
project(clio)
# =========================== Options ====================================== #
option(verbose "Verbose build" FALSE)
option(tests "Build tests" FALSE)
option(benchmark "Build benchmarks" FALSE)
option(docs "Generate doxygen docs" FALSE)
option(coverage "Build test coverage report" FALSE)
option(packaging "Create distribution packages" FALSE)
option(lint "Run clang-tidy checks during compilation" FALSE)
# ========================================================================== #
# Options #
# ========================================================================== #
option (verbose "Verbose build" FALSE)
option (tests "Build tests" FALSE)
option (docs "Generate doxygen docs" FALSE)
option (coverage "Build test coverage report" FALSE)
option (packaging "Create distribution packages" FALSE)
option (lint "Run clang-tidy checks during compilation" FALSE)
# ========================================================================== #
set (san "" CACHE STRING "Add sanitizer instrumentation")
set (CMAKE_EXPORT_COMPILE_COMMANDS TRUE)
set_property (CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
set(san "" CACHE STRING "Add sanitizer instrumentation")
set(CMAKE_EXPORT_COMPILE_COMMANDS TRUE)
set_property(CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
# ========================================================================== #
# Include required modules
include (CMake/Ccache.cmake)
include (CheckCXXCompilerFlag)
include (CMake/ClangTidy.cmake)
include(CMake/Ccache.cmake)
include(CheckCXXCompilerFlag)
include(CMake/ClangTidy.cmake)
# Set coverage build options
if (tests AND coverage)
include(CMake/CodeCoverage.cmake)
append_coverage_compiler_flags()
endif ()
if (verbose)
set (CMAKE_VERBOSE_MAKEFILE TRUE)
set(CMAKE_VERBOSE_MAKEFILE TRUE)
endif ()
if (packaging)
add_definitions (-DPKG=1)
add_definitions(-DPKG=1)
endif ()
add_library (clio)
add_library(clio)
# Clio tweaks and checks
include (CMake/CheckCompiler.cmake)
include (CMake/Settings.cmake)
include (CMake/ClioVersion.cmake)
include (CMake/SourceLocation.cmake)
include(CMake/CheckCompiler.cmake)
include(CMake/Settings.cmake)
include(CMake/ClioVersion.cmake)
include(CMake/SourceLocation.cmake)
# Clio deps
include (CMake/deps/libxrpl.cmake)
include (CMake/deps/Boost.cmake)
include (CMake/deps/OpenSSL.cmake)
include (CMake/deps/Threads.cmake)
include (CMake/deps/libfmt.cmake)
include (CMake/deps/cassandra.cmake)
include(CMake/deps/libxrpl.cmake)
include(CMake/deps/Boost.cmake)
include(CMake/deps/OpenSSL.cmake)
include(CMake/deps/Threads.cmake)
include(CMake/deps/libfmt.cmake)
include(CMake/deps/cassandra.cmake)
include(CMake/deps/libbacktrace.cmake)
# TODO: Include directory will be wrong when installed.
target_include_directories (clio PUBLIC src)
target_compile_features (clio PUBLIC cxx_std_20)
target_include_directories(clio PUBLIC src)
target_compile_features(clio PUBLIC cxx_std_20)
target_link_libraries (clio
target_link_libraries(
clio
PUBLIC Boost::boost
PUBLIC Boost::coroutine
PUBLIC Boost::program_options
PUBLIC Boost::system
PUBLIC Boost::log
PUBLIC Boost::log_setup
PUBLIC Boost::stacktrace_backtrace
PUBLIC cassandra-cpp-driver::cassandra-cpp-driver
PUBLIC fmt::fmt
PUBLIC OpenSSL::Crypto
PUBLIC OpenSSL::SSL
PUBLIC xrpl::libxrpl
PUBLIC dl
PUBLIC libbacktrace::libbacktrace
INTERFACE Threads::Threads
)
if (is_gcc)
# FIXME: needed on gcc for now
target_compile_definitions (clio PUBLIC BOOST_ASIO_DISABLE_CONCEPTS)
target_compile_definitions(clio PUBLIC BOOST_ASIO_DISABLE_CONCEPTS)
endif ()
target_sources (clio PRIVATE
## Main
src/main/impl/Build.cpp
## Backend
src/data/BackendCounters.cpp
src/data/BackendInterface.cpp
src/data/LedgerCache.cpp
src/data/cassandra/impl/Future.cpp
src/data/cassandra/impl/Cluster.cpp
src/data/cassandra/impl/Batch.cpp
src/data/cassandra/impl/Result.cpp
src/data/cassandra/impl/Tuple.cpp
src/data/cassandra/impl/SslContext.cpp
src/data/cassandra/Handle.cpp
src/data/cassandra/SettingsProvider.cpp
## ETL
src/etl/Source.cpp
src/etl/ProbingSource.cpp
src/etl/NFTHelpers.cpp
src/etl/ETLService.cpp
src/etl/ETLState.cpp
src/etl/LoadBalancer.cpp
src/etl/impl/ForwardCache.cpp
## Feed
src/feed/SubscriptionManager.cpp
## Web
src/web/impl/AdminVerificationStrategy.cpp
src/web/IntervalSweepHandler.cpp
## RPC
src/rpc/Errors.cpp
src/rpc/Factories.cpp
src/rpc/RPCHelpers.cpp
src/rpc/Counters.cpp
src/rpc/WorkQueue.cpp
src/rpc/common/Specs.cpp
src/rpc/common/Validators.cpp
src/rpc/common/MetaProcessors.cpp
src/rpc/common/impl/APIVersionParser.cpp
src/rpc/common/impl/HandlerProvider.cpp
## RPC handlers
src/rpc/handlers/AccountChannels.cpp
src/rpc/handlers/AccountCurrencies.cpp
src/rpc/handlers/AccountInfo.cpp
src/rpc/handlers/AccountLines.cpp
src/rpc/handlers/AccountNFTs.cpp
src/rpc/handlers/AccountObjects.cpp
src/rpc/handlers/AccountOffers.cpp
src/rpc/handlers/AccountTx.cpp
src/rpc/handlers/BookChanges.cpp
src/rpc/handlers/BookOffers.cpp
src/rpc/handlers/DepositAuthorized.cpp
src/rpc/handlers/GatewayBalances.cpp
src/rpc/handlers/Ledger.cpp
src/rpc/handlers/LedgerData.cpp
src/rpc/handlers/LedgerEntry.cpp
src/rpc/handlers/LedgerRange.cpp
src/rpc/handlers/NFTsByIssuer.cpp
src/rpc/handlers/NFTBuyOffers.cpp
src/rpc/handlers/NFTHistory.cpp
src/rpc/handlers/NFTInfo.cpp
src/rpc/handlers/NFTOffersCommon.cpp
src/rpc/handlers/NFTSellOffers.cpp
src/rpc/handlers/NoRippleCheck.cpp
src/rpc/handlers/Random.cpp
src/rpc/handlers/TransactionEntry.cpp
## Util
src/util/config/Config.cpp
src/util/log/Logger.cpp
src/util/prometheus/Http.cpp
src/util/prometheus/Label.cpp
src/util/prometheus/Metrics.cpp
src/util/prometheus/Prometheus.cpp
src/util/Random.cpp
src/util/Taggable.cpp)
target_sources(
clio
PRIVATE # Main
src/main/impl/Build.cpp
# Backend
src/data/BackendCounters.cpp
src/data/BackendInterface.cpp
src/data/LedgerCache.cpp
src/data/cassandra/impl/Future.cpp
src/data/cassandra/impl/Cluster.cpp
src/data/cassandra/impl/Batch.cpp
src/data/cassandra/impl/Result.cpp
src/data/cassandra/impl/Tuple.cpp
src/data/cassandra/impl/SslContext.cpp
src/data/cassandra/Handle.cpp
src/data/cassandra/SettingsProvider.cpp
# ETL
src/etl/NFTHelpers.cpp
src/etl/ETLService.cpp
src/etl/ETLState.cpp
src/etl/LoadBalancer.cpp
src/etl/CacheLoaderSettings.cpp
src/etl/Source.cpp
src/etl/impl/ForwardingCache.cpp
src/etl/impl/ForwardingSource.cpp
src/etl/impl/GrpcSource.cpp
src/etl/impl/SubscriptionSource.cpp
# Feed
src/feed/SubscriptionManager.cpp
src/feed/impl/TransactionFeed.cpp
src/feed/impl/LedgerFeed.cpp
src/feed/impl/ProposedTransactionFeed.cpp
src/feed/impl/SingleFeedBase.cpp
# Web
src/web/impl/AdminVerificationStrategy.cpp
src/web/IntervalSweepHandler.cpp
src/web/Resolver.cpp
# RPC
src/rpc/Errors.cpp
src/rpc/Factories.cpp
src/rpc/AMMHelpers.cpp
src/rpc/RPCHelpers.cpp
src/rpc/Counters.cpp
src/rpc/WorkQueue.cpp
src/rpc/common/Specs.cpp
src/rpc/common/Validators.cpp
src/rpc/common/MetaProcessors.cpp
src/rpc/common/impl/APIVersionParser.cpp
src/rpc/common/impl/HandlerProvider.cpp
# RPC handlers
src/rpc/handlers/AccountChannels.cpp
src/rpc/handlers/AccountCurrencies.cpp
src/rpc/handlers/AccountInfo.cpp
src/rpc/handlers/AccountLines.cpp
src/rpc/handlers/AccountNFTs.cpp
src/rpc/handlers/AccountObjects.cpp
src/rpc/handlers/AccountOffers.cpp
src/rpc/handlers/AccountTx.cpp
src/rpc/handlers/AMMInfo.cpp
src/rpc/handlers/BookChanges.cpp
src/rpc/handlers/BookOffers.cpp
src/rpc/handlers/DepositAuthorized.cpp
src/rpc/handlers/GatewayBalances.cpp
src/rpc/handlers/Ledger.cpp
src/rpc/handlers/LedgerData.cpp
src/rpc/handlers/LedgerEntry.cpp
src/rpc/handlers/LedgerRange.cpp
src/rpc/handlers/NFTsByIssuer.cpp
src/rpc/handlers/NFTBuyOffers.cpp
src/rpc/handlers/NFTHistory.cpp
src/rpc/handlers/NFTInfo.cpp
src/rpc/handlers/NFTOffersCommon.cpp
src/rpc/handlers/NFTSellOffers.cpp
src/rpc/handlers/NoRippleCheck.cpp
src/rpc/handlers/Random.cpp
src/rpc/handlers/TransactionEntry.cpp
# Util
src/util/config/Config.cpp
src/util/log/Logger.cpp
src/util/prometheus/Http.cpp
src/util/prometheus/Label.cpp
src/util/prometheus/MetricBase.cpp
src/util/prometheus/MetricBuilder.cpp
src/util/prometheus/MetricsFamily.cpp
src/util/prometheus/OStream.cpp
src/util/prometheus/Prometheus.cpp
src/util/Random.cpp
src/util/Retry.cpp
src/util/requests/RequestBuilder.cpp
src/util/requests/Types.cpp
src/util/requests/WsConnection.cpp
src/util/requests/impl/SslContext.cpp
src/util/Taggable.cpp
src/util/TerminationHandler.cpp
src/util/TxUtils.cpp
src/util/LedgerUtils.cpp
)
# Clio server
add_executable (clio_server src/main/Main.cpp)
target_link_libraries (clio_server PRIVATE clio)
target_link_options(clio_server
PRIVATE
$<$<AND:$<NOT:$<BOOL:${APPLE}>>,$<NOT:$<BOOL:${san}>>>:-static-libstdc++ -static-libgcc>
add_executable(clio_server src/main/Main.cpp)
target_link_libraries(clio_server PRIVATE clio)
target_link_options(
clio_server PRIVATE $<$<AND:$<NOT:$<BOOL:${APPLE}>>,$<NOT:$<BOOL:${san}>>>:-static-libstdc++ -static-libgcc>
)
# Unittesting
if (tests)
set (TEST_TARGET clio_tests)
add_executable (${TEST_TARGET}
set(TEST_TARGET clio_tests)
add_executable(
${TEST_TARGET}
# Common
unittests/Main.cpp
unittests/Playground.cpp
unittests/LoggerTests.cpp
unittests/ConfigTests.cpp
unittests/ProfilerTests.cpp
unittests/JsonUtilTests.cpp
unittests/DOSGuardTests.cpp
unittests/SubscriptionTests.cpp
unittests/SubscriptionManagerTests.cpp
unittests/util/TestObject.cpp
unittests/util/StringUtils.cpp
unittests/util/prometheus/CounterTests.cpp
unittests/util/prometheus/GaugeTests.cpp
unittests/util/prometheus/HttpTests.cpp
unittests/util/prometheus/LabelTests.cpp
unittests/util/prometheus/MetricsTests.cpp
# ETL
unittests/etl/ExtractionDataPipeTests.cpp
unittests/etl/ExtractorTests.cpp
unittests/etl/TransformerTests.cpp
unittests/etl/CacheLoaderTests.cpp
unittests/etl/AmendmentBlockHandlerTests.cpp
unittests/etl/LedgerPublisherTests.cpp
unittests/etl/ETLStateTests.cpp
# RPC
unittests/rpc/ErrorTests.cpp
unittests/rpc/BaseTests.cpp
unittests/rpc/RPCHelpersTests.cpp
unittests/rpc/CountersTests.cpp
unittests/rpc/APIVersionTests.cpp
unittests/rpc/ForwardingProxyTests.cpp
unittests/rpc/WorkQueueTests.cpp
unittests/rpc/AmendmentsTests.cpp
unittests/rpc/JsonBoolTests.cpp
## RPC handlers
unittests/rpc/handlers/DefaultProcessorTests.cpp
unittests/rpc/handlers/TestHandlerTests.cpp
unittests/rpc/handlers/AccountCurrenciesTests.cpp
unittests/rpc/handlers/AccountLinesTests.cpp
unittests/rpc/handlers/AccountTxTests.cpp
unittests/rpc/handlers/AccountOffersTests.cpp
unittests/rpc/handlers/AccountInfoTests.cpp
unittests/rpc/handlers/AccountChannelsTests.cpp
unittests/rpc/handlers/AccountNFTsTests.cpp
unittests/rpc/handlers/BookOffersTests.cpp
unittests/rpc/handlers/DepositAuthorizedTests.cpp
unittests/rpc/handlers/GatewayBalancesTests.cpp
unittests/rpc/handlers/TxTests.cpp
unittests/rpc/handlers/TransactionEntryTests.cpp
unittests/rpc/handlers/LedgerEntryTests.cpp
unittests/rpc/handlers/LedgerRangeTests.cpp
unittests/rpc/handlers/NoRippleCheckTests.cpp
unittests/rpc/handlers/ServerInfoTests.cpp
unittests/rpc/handlers/PingTests.cpp
unittests/rpc/handlers/RandomTests.cpp
unittests/rpc/handlers/NFTInfoTests.cpp
unittests/rpc/handlers/NFTBuyOffersTests.cpp
unittests/rpc/handlers/NFTsByIssuerTest.cpp
unittests/rpc/handlers/NFTSellOffersTests.cpp
unittests/rpc/handlers/NFTHistoryTests.cpp
unittests/rpc/handlers/SubscribeTests.cpp
unittests/rpc/handlers/UnsubscribeTests.cpp
unittests/rpc/handlers/LedgerDataTests.cpp
unittests/rpc/handlers/AccountObjectsTests.cpp
unittests/rpc/handlers/BookChangesTests.cpp
unittests/rpc/handlers/LedgerTests.cpp
unittests/rpc/handlers/VersionHandlerTests.cpp
# Backend
unittests/data/BackendFactoryTests.cpp
unittests/data/BackendCountersTests.cpp
unittests/data/cassandra/BaseTests.cpp
unittests/data/cassandra/BackendTests.cpp
unittests/data/cassandra/RetryPolicyTests.cpp
unittests/data/cassandra/SettingsProviderTests.cpp
unittests/data/cassandra/ExecutionStrategyTests.cpp
unittests/data/BackendCountersTests.cpp
unittests/data/BackendFactoryTests.cpp
unittests/data/BackendFactoryTests.cpp
unittests/data/cassandra/AsyncExecutorTests.cpp
# Webserver
unittests/data/cassandra/AsyncExecutorTests.cpp
# Webserver
unittests/data/cassandra/BackendTests.cpp
unittests/data/cassandra/BackendTests.cpp
unittests/data/cassandra/BaseTests.cpp
unittests/data/cassandra/BaseTests.cpp
unittests/data/cassandra/ExecutionStrategyTests.cpp
unittests/data/cassandra/ExecutionStrategyTests.cpp
unittests/data/cassandra/RetryPolicyTests.cpp
unittests/data/cassandra/RetryPolicyTests.cpp
unittests/data/cassandra/SettingsProviderTests.cpp
unittests/data/cassandra/SettingsProviderTests.cpp
unittests/DOSGuardTests.cpp
unittests/etl/AmendmentBlockHandlerTests.cpp
unittests/etl/AmendmentBlockHandlerTests.cpp
unittests/etl/CacheLoaderSettingsTests.cpp
unittests/etl/CacheLoaderTests.cpp
unittests/etl/CacheLoaderTests.cpp
unittests/etl/CursorProviderTests.cpp
unittests/etl/ETLStateTests.cpp
unittests/etl/ETLStateTests.cpp
unittests/etl/ExtractionDataPipeTests.cpp
unittests/etl/ExtractionDataPipeTests.cpp
unittests/etl/ExtractorTests.cpp
unittests/etl/ExtractorTests.cpp
unittests/etl/ForwardingCacheTests.cpp
unittests/etl/ForwardingSourceTests.cpp
unittests/etl/ForwardingSourceTests.cpp
unittests/etl/GrpcSourceTests.cpp
unittests/etl/GrpcSourceTests.cpp
unittests/etl/LedgerPublisherTests.cpp
unittests/etl/LedgerPublisherTests.cpp
unittests/etl/SourceTests.cpp
unittests/etl/SourceTests.cpp
unittests/etl/SubscriptionSourceDependenciesTests.cpp
unittests/etl/SubscriptionSourceDependenciesTests.cpp
unittests/etl/SubscriptionSourceTests.cpp
unittests/etl/SubscriptionSourceTests.cpp
unittests/etl/TransformerTests.cpp
# RPC
unittests/etl/TransformerTests.cpp
# RPC
unittests/feed/BookChangesFeedTests.cpp
unittests/feed/ForwardFeedTests.cpp
unittests/feed/LedgerFeedTests.cpp
unittests/feed/ProposedTransactionFeedTests.cpp
unittests/feed/SingleFeedBaseTests.cpp
unittests/feed/SubscriptionManagerTests.cpp
unittests/feed/TrackableSignalTests.cpp
unittests/feed/TransactionFeedTests.cpp
unittests/JsonUtilTests.cpp
unittests/LoggerTests.cpp
unittests/Main.cpp
unittests/Playground.cpp
unittests/ProfilerTests.cpp
unittests/rpc/AmendmentsTests.cpp
unittests/rpc/AmendmentsTests.cpp
unittests/rpc/APIVersionTests.cpp
unittests/rpc/APIVersionTests.cpp
unittests/rpc/BaseTests.cpp
unittests/rpc/BaseTests.cpp
unittests/rpc/CountersTests.cpp
unittests/rpc/CountersTests.cpp
unittests/rpc/ErrorTests.cpp
unittests/rpc/ErrorTests.cpp
unittests/rpc/ForwardingProxyTests.cpp
unittests/rpc/ForwardingProxyTests.cpp
unittests/rpc/handlers/AccountChannelsTests.cpp
unittests/rpc/handlers/AccountChannelsTests.cpp
unittests/rpc/handlers/AccountCurrenciesTests.cpp
unittests/rpc/handlers/AccountCurrenciesTests.cpp
unittests/rpc/handlers/AccountInfoTests.cpp
unittests/rpc/handlers/AccountInfoTests.cpp
unittests/rpc/handlers/AccountLinesTests.cpp
unittests/rpc/handlers/AccountLinesTests.cpp
unittests/rpc/handlers/AccountNFTsTests.cpp
unittests/rpc/handlers/AccountNFTsTests.cpp
unittests/rpc/handlers/AccountObjectsTests.cpp
unittests/rpc/handlers/AccountObjectsTests.cpp
unittests/rpc/handlers/AccountOffersTests.cpp
unittests/rpc/handlers/AccountOffersTests.cpp
unittests/rpc/handlers/AccountTxTests.cpp
unittests/rpc/handlers/AccountTxTests.cpp
unittests/rpc/handlers/AMMInfoTests.cpp
# Backend
unittests/rpc/handlers/AMMInfoTests.cpp
# Backend
unittests/rpc/handlers/BookChangesTests.cpp
unittests/rpc/handlers/BookChangesTests.cpp
unittests/rpc/handlers/BookOffersTests.cpp
unittests/rpc/handlers/BookOffersTests.cpp
unittests/rpc/handlers/DefaultProcessorTests.cpp
unittests/rpc/handlers/DefaultProcessorTests.cpp
unittests/rpc/handlers/DepositAuthorizedTests.cpp
unittests/rpc/handlers/DepositAuthorizedTests.cpp
unittests/rpc/handlers/GatewayBalancesTests.cpp
unittests/rpc/handlers/GatewayBalancesTests.cpp
unittests/rpc/handlers/LedgerDataTests.cpp
unittests/rpc/handlers/LedgerDataTests.cpp
unittests/rpc/handlers/LedgerEntryTests.cpp
unittests/rpc/handlers/LedgerEntryTests.cpp
unittests/rpc/handlers/LedgerRangeTests.cpp
unittests/rpc/handlers/LedgerRangeTests.cpp
unittests/rpc/handlers/LedgerTests.cpp
unittests/rpc/handlers/LedgerTests.cpp
unittests/rpc/handlers/NFTBuyOffersTests.cpp
unittests/rpc/handlers/NFTBuyOffersTests.cpp
unittests/rpc/handlers/NFTHistoryTests.cpp
unittests/rpc/handlers/NFTHistoryTests.cpp
unittests/rpc/handlers/NFTInfoTests.cpp
unittests/rpc/handlers/NFTInfoTests.cpp
unittests/rpc/handlers/NFTsByIssuerTest.cpp
unittests/rpc/handlers/NFTsByIssuerTest.cpp
unittests/rpc/handlers/NFTSellOffersTests.cpp
unittests/rpc/handlers/NFTSellOffersTests.cpp
unittests/rpc/handlers/NoRippleCheckTests.cpp
unittests/rpc/handlers/NoRippleCheckTests.cpp
unittests/rpc/handlers/PingTests.cpp
unittests/rpc/handlers/PingTests.cpp
unittests/rpc/handlers/RandomTests.cpp
unittests/rpc/handlers/RandomTests.cpp
unittests/rpc/handlers/ServerInfoTests.cpp
unittests/rpc/handlers/ServerInfoTests.cpp
unittests/rpc/handlers/SubscribeTests.cpp
unittests/rpc/handlers/SubscribeTests.cpp
unittests/rpc/handlers/TestHandlerTests.cpp
unittests/rpc/handlers/TestHandlerTests.cpp
unittests/rpc/handlers/TransactionEntryTests.cpp
unittests/rpc/handlers/TransactionEntryTests.cpp
unittests/rpc/handlers/TxTests.cpp
unittests/rpc/handlers/TxTests.cpp
unittests/rpc/handlers/UnsubscribeTests.cpp
unittests/rpc/handlers/UnsubscribeTests.cpp
unittests/rpc/handlers/VersionHandlerTests.cpp
unittests/rpc/handlers/VersionHandlerTests.cpp
unittests/rpc/JsonBoolTests.cpp
# RPC handlers
unittests/rpc/JsonBoolTests.cpp
# RPC handlers
unittests/rpc/RPCHelpersTests.cpp
unittests/rpc/RPCHelpersTests.cpp
unittests/rpc/WorkQueueTests.cpp
unittests/rpc/WorkQueueTests.cpp
unittests/util/AssertTests.cpp
unittests/util/async/AnyExecutionContextTests.cpp
unittests/util/async/AnyOperationTests.cpp
unittests/util/async/AnyStopTokenTests.cpp
unittests/util/async/AnyStrandTests.cpp
unittests/util/async/AsyncExecutionContextTests.cpp
# Requests framework
unittests/util/BatchingTests.cpp
unittests/util/LedgerUtilsTests.cpp
# Prometheus support
unittests/util/prometheus/CounterTests.cpp
unittests/util/prometheus/GaugeTests.cpp
unittests/util/prometheus/HistogramTests.cpp
unittests/util/prometheus/HttpTests.cpp
unittests/util/prometheus/LabelTests.cpp
unittests/util/prometheus/MetricBuilderTests.cpp
unittests/util/prometheus/MetricsFamilyTests.cpp
unittests/util/prometheus/OStreamTests.cpp
unittests/util/requests/RequestBuilderTests.cpp
unittests/util/requests/SslContextTests.cpp
unittests/util/requests/WsConnectionTests.cpp
# ETL
unittests/util/RetryTests.cpp
# Async framework
unittests/util/StringUtils.cpp
unittests/util/TestGlobals.cpp
unittests/util/TestHttpServer.cpp
unittests/util/TestObject.cpp
unittests/util/TestWsServer.cpp
unittests/util/TxUtilTests.cpp
unittests/web/AdminVerificationTests.cpp
unittests/web/ServerTests.cpp
unittests/web/RPCServerHandlerTests.cpp
unittests/web/ServerTests.cpp
unittests/web/SweepHandlerTests.cpp
# Feed
unittests/web/WhitelistHandlerTests.cpp
unittests/web/SweepHandlerTests.cpp)
)
include (CMake/deps/gtest.cmake)
include(CMake/deps/gtest.cmake)
# See https://github.com/google/googletest/issues/3475
gtest_discover_tests (clio_tests DISCOVERY_TIMEOUT 10)
gtest_discover_tests(clio_tests DISCOVERY_TIMEOUT 90)
# Fix for dwarf5 bug on ci
target_compile_options (clio PUBLIC -gdwarf-4)
target_compile_options(clio PUBLIC -gdwarf-4)
target_compile_definitions (${TEST_TARGET} PUBLIC UNITTEST_BUILD)
target_include_directories (${TEST_TARGET} PRIVATE unittests)
target_link_libraries (${TEST_TARGET} PUBLIC clio gtest::gtest)
target_compile_definitions(${TEST_TARGET} PUBLIC UNITTEST_BUILD)
target_include_directories(${TEST_TARGET} PRIVATE unittests)
target_link_libraries(${TEST_TARGET} PUBLIC clio gtest::gtest)
# Generate `clio_tests-ccov` if coverage is enabled
# Note: use `make clio_tests-ccov` to generate report
# Generate `coverage_report` target if coverage is enabled
if (coverage)
target_compile_definitions(${TEST_TARGET} PRIVATE COVERAGE_ENABLED)
include (CMake/Coverage.cmake)
add_coverage (${TEST_TARGET})
if (DEFINED CODE_COVERAGE_REPORT_FORMAT)
set(CODE_COVERAGE_FORMAT ${CODE_COVERAGE_REPORT_FORMAT})
else ()
set(CODE_COVERAGE_FORMAT html-details)
endif ()
if (DEFINED CODE_COVERAGE_TESTS_ARGS)
set(TESTS_ADDITIONAL_ARGS ${CODE_COVERAGE_TESTS_ARGS})
separate_arguments(TESTS_ADDITIONAL_ARGS)
else ()
set(TESTS_ADDITIONAL_ARGS "")
endif ()
set(GCOVR_ADDITIONAL_ARGS --exclude-throw-branches -s)
setup_target_for_coverage_gcovr(
NAME
coverage_report
FORMAT
${CODE_COVERAGE_FORMAT}
EXECUTABLE
clio_tests
EXECUTABLE_ARGS
--gtest_brief=1
${TESTS_ADDITIONAL_ARGS}
EXCLUDE
"unittests"
DEPENDENCIES
clio_tests
)
endif ()
endif ()
# Benchmarks
if (benchmark)
set(BENCH_TARGET clio_benchmarks)
add_executable(
${BENCH_TARGET}
# Common
benchmarks/Main.cpp benchmarks/Playground.cpp
# ExecutionContext
benchmarks/util/async/ExecutionContextBenchmarks.cpp
)
include(CMake/deps/gbench.cmake)
target_include_directories(${BENCH_TARGET} PRIVATE benchmarks)
target_link_libraries(${BENCH_TARGET} PUBLIC clio benchmark::benchmark_main)
endif ()
# Enable selected sanitizer if enabled via `san`
if (san)
target_compile_options (clio
PUBLIC
# Sanitizers recommend minimum of -O1 for reasonable performance
$<$<CONFIG:Debug>:-O1>
${SAN_FLAG}
-fno-omit-frame-pointer)
target_compile_definitions (clio
PUBLIC
$<$<STREQUAL:${san},address>:SANITIZER=ASAN>
$<$<STREQUAL:${san},thread>:SANITIZER=TSAN>
$<$<STREQUAL:${san},memory>:SANITIZER=MSAN>
$<$<STREQUAL:${san},undefined>:SANITIZER=UBSAN>)
target_link_libraries (clio INTERFACE ${SAN_FLAG} ${SAN_LIB})
target_compile_options(
clio PUBLIC # Sanitizers recommend minimum of -O1 for reasonable performance
$<$<CONFIG:Debug>:-O1> ${SAN_FLAG} -fno-omit-frame-pointer
)
target_compile_definitions(
clio PUBLIC $<$<STREQUAL:${san},address>:SANITIZER=ASAN> $<$<STREQUAL:${san},thread>:SANITIZER=TSAN>
$<$<STREQUAL:${san},memory>:SANITIZER=MSAN> $<$<STREQUAL:${san},undefined>:SANITIZER=UBSAN>
)
target_link_libraries(clio INTERFACE ${SAN_FLAG} ${SAN_LIB})
endif ()
# Generate `docs` target for doxygen documentation if enabled
# Note: use `make docs` to generate the documentation
# Generate `docs` target for doxygen documentation if enabled Note: use `make docs` to generate the documentation
if (docs)
include (CMake/Docs.cmake)
add_subdirectory(docs)
endif ()
include (CMake/install/install.cmake)
include(CMake/install/install.cmake)
if (packaging)
include (CMake/packaging.cmake) # This file exists only in build runner
include(CMake/packaging.cmake) # This file exists only in build runner
endif ()

View File

@@ -20,6 +20,12 @@ Please run the following command in order to use git hooks that are helpful for
git config --local core.hooksPath .githooks
```
## Git hooks dependencies
The pre-commit hook requires `clang-format >= 17.0.0` and `cmake-format` to be installed on your machine.
`clang-format` can be installed using `brew` on macOS and default package manager on Linux.
`cmake-format` can be installed using `pip`.
The hook will also attempt to automatically use `doxygen` to verify that everything public in the codebase is covered by doc comments. If `doxygen` is not installed, the hook will raise a warning suggesting to install `doxygen` for future commits.
## Git commands
This sections offers a detailed look at the git commands you will need to use to get your PR submitted.
Please note that there are more than one way to do this and these commands are provided for your convenience.
@@ -62,6 +68,11 @@ git commit --amend -S
git push --force
```
## Use ccache (optional)
Clio uses `ccache` to speed up compilation. If you want to use it, please make sure it is installed on your machine.
CMake will automatically detect it and use it if it is available.
## Fixing issues found during code review
While your code is in review, it's possible that some changes will be requested by reviewer(s).
This section describes the process of adding your fixes.
@@ -91,8 +102,14 @@ The button for that is near the bottom of the PR's page on GitHub.
This is a non-exhaustive list of recommended style guidelines. These are not always strictly enforced and serve as a way to keep the codebase coherent.
## Formatting
Code must conform to `clang-format` version 16, unless the result would be unreasonably difficult to read or maintain.
To change your code to conform use `clang-format -i <your changed files>`.
Code must conform to `clang-format` version 17, unless the result would be unreasonably difficult to read or maintain.
In most cases the pre-commit hook will take care of formatting and will fix any issues automatically.
To manually format your code, use `clang-format -i <your changed files>` for C++ files and `cmake-format -i <your changed files>` for CMake files.
## Documentation
All public namespaces, classes and functions must be covered by doc (`doxygen`) comments. Everything that is not within a nested `impl` namespace is considered public.
> **Note:** Keep in mind that this is enforced by Clio's CI and your build will fail if newly added public code lacks documentation.
## Avoid
* Proliferation of nearly identical code.
@@ -126,6 +143,7 @@ Existing maintainers can resign, or be subject to a vote for removal at the behe
* [cindyyan317](https://github.com/cindyyan317) (Ripple)
* [godexsoft](https://github.com/godexsoft) (Ripple)
* [kuznetsss](https://github.com/kuznetsss) (Ripple)
* [legleux](https://github.com/legleux) (Ripple)
## Honorable ex-Maintainers

View File

@@ -1,16 +0,0 @@
PROJECT_NAME = "Clio"
INPUT = ../src ../unittests
EXCLUDE_PATTERNS = *Test*.cpp *Test*.h
RECURSIVE = YES
HAVE_DOT = YES
QUIET = YES
WARNINGS = NO
WARN_NO_PARAMDOC = NO
WARN_IF_INCOMPLETE_DOC = NO
WARN_IF_UNDOCUMENTED = NO
GENERATE_LATEX = NO
GENERATE_HTML = YES
SORT_MEMBERS_CTORS_1ST = YES

380
README.md
View File

@@ -1,342 +1,48 @@
# Clio
# <img src='./docs/img/xrpl-logo.svg' width='40' valign="top" /> Clio
Clio is an XRP Ledger API server. Clio is optimized for RPC calls, over WebSocket or JSON-RPC.
Validated historical ledger and transaction data are stored in a more space-efficient format,
using up to 4 times less space than rippled. Clio can be configured to store data in Apache Cassandra or ScyllaDB,
allowing for scalable read throughput. Multiple Clio nodes can share access to the same dataset,
allowing for a highly available cluster of Clio nodes, without the need for redundant data storage or computation.
[![Build status](https://github.com/XRPLF/clio/actions/workflows/build.yml/badge.svg?branch=develop)](https://github.com/XRPLF/clio/actions/workflows/build.yml?query=branch%3Adevelop)
[![Nightly release status](https://github.com/XRPLF/clio/actions/workflows/nightly.yml/badge.svg?branch=develop)](https://github.com/XRPLF/clio/actions/workflows/nightly.yml?query=branch%3Adevelop)
[![Clang-tidy checks status](https://github.com/XRPLF/clio/actions/workflows/clang-tidy.yml/badge.svg?branch=develop)](https://github.com/XRPLF/clio/actions/workflows/clang-tidy.yml?query=branch%3Adevelop)
[![Code coverage develop branch](https://codecov.io/gh/XRPLF/clio/branch/develop/graph/badge.svg?)](https://app.codecov.io/gh/XRPLF/clio)
Clio offers the full rippled API, with the caveat that Clio by default only returns validated data.
This means that `ledger_index` defaults to `validated` instead of `current` for all requests.
Other non-validated data is also not returned, such as information about queued transactions.
For requests that require access to the p2p network, such as `fee` or `submit`, Clio automatically forwards the request to a rippled node and propagates the response back to the client.
To access non-validated data for *any* request, simply add `ledger_index: "current"` to the request, and Clio will forward the request to rippled.
Clio is an XRP Ledger API server optimized for RPC calls over WebSocket or JSON-RPC.
It stores validated historical ledger and transaction data in a more space efficient format, and uses up to 4 times less space than [rippled](https://github.com/XRPLF/rippled).
Clio does not connect to the peer-to-peer network. Instead, Clio extracts data from a group of specified rippled nodes. Running Clio requires access to at least one rippled node
from which data can be extracted. The rippled node does not need to be running on the same machine as Clio.
Clio can be configured to store data in [Apache Cassandra](https://cassandra.apache.org/_/index.html) or [ScyllaDB](https://www.scylladb.com/), enabling scalable read throughput.
Multiple Clio nodes can share access to the same dataset, which allows for a highly available cluster of Clio nodes without the need for redundant data storage or computation.
## 📡 Clio and `rippled`
Clio offers the full `rippled` API, with the caveat that Clio by default only returns validated data. This means that `ledger_index` defaults to `validated` instead of `current` for all requests. Other non-validated data, such as information about queued transactions, is also not returned.
Clio retrieves data from a designated group of `rippled` nodes instead of connecting to the peer-to-peer network.
For requests that require access to the peer-to-peer network, such as `fee` or `submit`, Clio automatically forwards the request to a `rippled` node and propagates the response back to the client. To access non-validated data for *any* request, simply add `ledger_index: "current"` to the request, and Clio will forward the request to `rippled`.
> [!NOTE]
> Clio requires access to at least one `rippled` node, which can run on the same machine as Clio or separately.
## 📚 Learn more about Clio
Below are some useful docs to learn more about Clio.
**For Developers**:
- [How to build Clio](./docs/build-clio.md)
- [Metrics and static analysis](./docs/metrics-and-static-analysis.md)
- [Coverage report](./docs/coverage-report.md)
**For Operators**:
- [How to configure Clio and rippled](./docs/configure-clio.md)
- [How to run Clio](./docs/run-clio.md)
- [Logging](./docs/logging.md)
**General reference material:**
- [API reference](https://xrpl.org/http-websocket-apis.html)
- [Clio documentation](https://xrpl.org/the-clio-server.html#the-clio-server)
## 🆘 Help
## Help
Feel free to open an [issue](https://github.com/XRPLF/clio/issues) if you have a feature request or something doesn't work as expected.
If you have any questions about building, running, contributing, using clio or any other, you could always start a new [discussion](https://github.com/XRPLF/clio/discussions).
## Requirements
1. Access to a Cassandra cluster or ScyllaDB cluster. Can be local or remote.
2. Access to one or more rippled nodes. Can be local or remote.
## Building
Clio is built with CMake and uses Conan for managing dependencies.
It is written in C++20 and therefore requires a modern compiler.
## Prerequisites
### Minimum Requirements
- [Python 3.7](https://www.python.org/downloads/)
- [Conan 1.55](https://conan.io/downloads.html)
- [CMake 3.16](https://cmake.org/download/)
- [**Optional**] [GCovr](https://gcc.gnu.org/onlinedocs/gcc/Gcov.html) (needed for code coverage generation)
| Compiler | Version |
|-------------|---------|
| GCC | 11 |
| Clang | 14 |
| Apple Clang | 14.0.3 |
### Conan configuration
Clio does not require anything but default settings in your (`~/.conan/profiles/default`) Conan profile. It's best to have no extra flags specified.
> Mac example:
```
[settings]
os=Macos
os_build=Macos
arch=armv8
arch_build=armv8
compiler=apple-clang
compiler.version=14
compiler.libcxx=libc++
build_type=Release
compiler.cppstd=20
```
> Linux example:
```
[settings]
os=Linux
os_build=Linux
arch=x86_64
arch_build=x86_64
compiler=gcc
compiler.version=11
compiler.libcxx=libstdc++11
build_type=Release
compiler.cppstd=20
```
### Artifactory
1. Make sure artifactory is setup with Conan
```sh
conan remote add --insert 0 conan-non-prod http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
```
Now you should be able to download prebuilt `xrpl` package on some platforms.
2. Remove old packages you may have cached:
```sh
conan remove -f xrpl
```
## Building Clio
Navigate to Clio's root directory and perform
```sh
mkdir build && cd build
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
cmake --build . --parallel 8 # or without the number if you feel extra adventurous
```
If all goes well, `conan install` will find required packages and `cmake` will do the rest. you should end up with `clio_server` and `clio_tests` in the `build` directory (the current directory).
> **Tip:** You can omit the `-o tests=True` in `conan install` command above if you don't want to build `clio_tests`.
> **Tip:** To generate a Code Coverage report, include `-o coverage=True` in the `conan install` command above, along with `-o tests=True` to enable tests. After running the `cmake` commands, execute `make clio_tests-ccov`. The coverage report will be found at `clio_tests-llvm-cov/index.html`.
## Running
```sh
./clio_server config.json
```
Clio needs access to a rippled server. The config files of rippled and Clio need
to match in a certain sense.
Clio needs to know:
- the IP of rippled
- the port on which rippled is accepting unencrypted WebSocket connections
- the port on which rippled is handling gRPC requests
rippled needs to open:
- a port to accept unencrypted websocket connections
- a port to handle gRPC requests, with the IP(s) of Clio specified in the `secure_gateway` entry
The example configs of rippled and Clio are setups such that minimal changes are
required. When running locally, the only change needed is to uncomment the `port_grpc`
section of the rippled config. When running Clio and rippled on separate machines,
in addition to uncommenting the `port_grpc` section, a few other steps must be taken:
1. change the `ip` of the first entry of `etl_sources` to the IP where your rippled
server is running
2. open a public, unencrypted WebSocket port on your rippled server
3. change the IP specified in `secure_gateway` of `port_grpc` section of the rippled config
to the IP of your Clio server. This entry can take the form of a comma-separated list if
you are running multiple Clio nodes.
In addition, the parameter `start_sequence` can be included and configured within the top level of the config file. This parameter specifies the sequence of first ledger to extract if the database is empty. Note that ETL extracts ledgers in order and that no backfilling functionality currently exists, meaning Clio will not retroactively learn ledgers older than the one you specify. Choosing to specify this or not will yield the following behavior:
- If this setting is absent and the database is empty, ETL will start with the next ledger validated by the network.
- If this setting is present and the database is not empty, an exception is thrown.
In addition, the optional parameter `finish_sequence` can be added to the json file as well, specifying where the ledger can stop.
To add `start_sequence` and/or `finish_sequence` to the config.json file appropriately, they will be on the same top level of precedence as other parameters (such as `database`, `etl_sources`, `read_only`, etc.) and be specified with an integer. Here is an example snippet from the config file:
```json
"start_sequence": 12345,
"finish_sequence": 54321
```
The parameters `ssl_cert_file` and `ssl_key_file` can also be added to the top level of precedence of our Clio config. `ssl_cert_file` specifies the filepath for your SSL cert while `ssl_key_file` specifies the filepath for your SSL key. It is up to you how to change ownership of these folders for your designated Clio user. Your options include:
- Copying the two files as root somewhere that's accessible by the Clio user, then running `sudo chown` to your user
- Changing the permissions directly so it's readable by your Clio user
- Running Clio as root (strongly discouraged)
An example of how to specify `ssl_cert_file` and `ssl_key_file` in the config:
```json
"server": {
"ip": "0.0.0.0",
"port": 51233
},
"ssl_cert_file": "/full/path/to/cert.file",
"ssl_key_file": "/full/path/to/key.file"
```
Once your config files are ready, start rippled and Clio. It doesn't matter which you
start first, and it's fine to stop one or the other and restart at any given time.
Clio will wait for rippled to sync before extracting any ledgers. If there is already
data in Clio's database, Clio will begin extraction with the ledger whose sequence
is one greater than the greatest sequence currently in the database. Clio will wait
for this ledger to be available. Be aware that the behavior of rippled is to sync to
the most recent ledger on the network, and then backfill. If Clio is extracting ledgers
from rippled, and then rippled is stopped for a significant amount of time and then restarted, rippled
will take time to backfill to the next ledger that Clio wants. The time it takes is proportional
to the amount of time rippled was offline for. Also be aware that the amount rippled backfills
are dependent on the online_delete and ledger_history config values; if these values
are small, and rippled is stopped for a significant amount of time, rippled may never backfill
to the ledger that Clio wants. To avoid this situation, it is advised to keep history
proportional to the amount of time that you expect rippled to be offline. For example, if you
expect rippled to be offline for a few days from time to time, you should keep at least
a few days of history. If you expect rippled to never be offline, then you can keep a very small
amount of history.
Clio can use multiple rippled servers as a data source. Simply add more entries to
the `etl_sources` section. Clio will load balance requests across the servers specified
in this list. As long as one rippled server is up and synced, Clio will continue
extracting ledgers.
In contrast to rippled, Clio will answer RPC requests for the data already in the
database as soon as the server starts. Clio doesn't wait to sync to the network, or
for rippled to sync.
When starting Clio with a fresh database, Clio needs to download a ledger in full.
This can take some time, and depends on database throughput. With a moderately fast
database, this should take less than 10 minutes. If you did not properly set `secure_gateway`
in the `port_grpc` section of rippled, this step will fail. Once the first ledger
is fully downloaded, Clio only needs to extract the changed data for each ledger,
so extraction is much faster and Clio can keep up with rippled in real-time. Even under
intense load, Clio should not lag behind the network, as Clio is not processing the data,
and is simply writing to a database. The throughput of Clio is dependent on the throughput
of your database, but a standard Cassandra or Scylla deployment can handle
the write load of the XRP Ledger without any trouble. Generally the performance considerations
come on the read side, and depends on the number of RPC requests your Clio nodes
are serving. Be aware that very heavy read traffic can impact write throughput. Again, this
is on the database side, so if you are seeing this, upgrade your database.
It is possible to run multiple Clio nodes that share access to the same database.
The Clio nodes don't need to know about each other. You can simply spin up more Clio
nodes pointing to the same database as you wish, and shut them down as you wish.
On startup, each Clio node queries the database for the latest ledger. If this latest
ledger does not change for some time, the Clio node begins extracting ledgers
and writing to the database. If the Clio node detects a ledger that it is trying to
write has already been written, the Clio node will backoff and stop writing. If later
the Clio node sees no ledger written for some time, it will start writing again.
This algorithm ensures that at any given time, one and only one Clio node is writing
to the database.
It is possible to force Clio to only read data, and to never become a writer.
To do this, set `read_only: true` in the config. One common setup is to have a
small number of writer nodes that are inaccessible to clients, with several
read only nodes handling client requests. The number of read only nodes can be scaled
up or down in response to request volume.
When using multiple rippled servers as data sources and multiple Clio nodes,
each Clio node should use the same set of rippled servers as sources. The order doesn't matter.
The only reason not to do this is if you are running servers in different regions, and
you want the Clio nodes to extract from servers in their region. However, if you
are doing this, be aware that database traffic will be flowing across regions,
which can cause high latencies. A possible alternative to this is to just deploy
a database in each region, and the Clio nodes in each region use their region's database.
This is effectively two systems.
Clio supports API versioning as [described here](https://xrpl.org/request-formatting.html#api-versioning).
It's possible to configure `minimum`, `maximum` and `default` version like so:
```json
"api_version": {
"min": 1,
"max": 2,
"default": 1
}
```
All of the above are optional.
Clio will fallback to hardcoded defaults when not specified in the config file or configured values are outside
of the minimum and maximum supported versions hardcoded in `src/rpc/common/APIVersion.h`.
> **Note:** See `example-config.json` for more details.
## Admin rights for requests
By default clio checks admin privileges by IP address from request (only `127.0.0.1` is considered to be an admin).
It is not very secure because the IP could be spoofed.
For a better security `admin_password` could be provided in the `server` section of clio's config:
```json
"server": {
"admin_password": "secret"
}
```
If the password is presented in the config, clio will check the Authorization header (if any) in each request for the password.
The Authorization header should contain type `Password` and the password from the config, e.g. `Password secret`.
Exactly equal password gains admin rights for the request or a websocket connection.
## Prometheus metrics collection
Clio natively supports Prometheus metrics collection. It accepts Prometheus requests on the port configured in `server` section of config.
Prometheus metrics are enabled by default. To disable it add `"prometheus_enabled": false` to the config.
It is important to know that clio responds to Prometheus request only if they are admin requests, so Prometheus should be configured to send admin password in header.
There is an example of docker-compose file, Prometheus and Grafana configs in [examples/infrastructure](examples/infrastructure).
## Using clang-tidy for static analysis
Minimum clang-tidy version required is 16.0.
Clang-tidy could be run by cmake during building the project.
For that provide the option `-o lint=True` for `conan install` command:
```sh
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=True
```
By default cmake will try to find clang-tidy automatically in your system.
To force cmake use desired binary set `CLIO_CLANG_TIDY_BIN` environment variable as path to clang-tidy binary.
E.g.:
```sh
export CLIO_CLANG_TIDY_BIN=/opt/homebrew/opt/llvm@16/bin/clang-tidy
```
## Developing against `rippled` in standalone mode
If you wish you develop against a `rippled` instance running in standalone
mode there are a few quirks of both clio and rippled you need to keep in mind.
You must:
1. Advance the `rippled` ledger to at least ledger 256
2. Wait 10 minutes before first starting clio against this standalone node.
## Logging
Clio provides several logging options, all are configurable via the config file and are detailed below.
`log_level`: The minimum level of severity at which the log message will be outputted by default.
Severity options are `trace`, `debug`, `info`, `warning`, `error`, `fatal`. Defaults to `info`.
`log_format`: The format of log lines produced by clio. Defaults to `"%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%"`.
Each of the variables expands like so
- `TimeStamp`: The full date and time of the log entry
- `SourceLocation`: A partial path to the c++ file and the line number in said file (`source/file/path:linenumber`)
- `ThreadID`: The ID of the thread the log entry is written from
- `Channel`: The channel that this log entry was sent to
- `Severity`: The severity (aka log level) the entry was sent at
- `Message`: The actual log message
`log_channels`: An array of json objects, each overriding properties for a logging `channel`.
At the moment of writing, only `log_level` can be overriden using this mechanism.
Each object is of this format:
```json
{
"channel": "Backend",
"log_level": "fatal"
}
```
If no override is present for a given channel, that channel will log at the severity specified by the global `log_level`.
Overridable log channels: `Backend`, `WebServer`, `Subscriptions`, `RPC`, `ETL` and `Performance`.
> **Note:** See `example-config.json` for more details.
`log_to_console`: Enable/disable log output to console. Options are `true`/`false`. Defaults to true.
`log_directory`: Path to the directory where log files are stored. If such directory doesn't exist, Clio will create it. If not specified, logs are not written to a file.
`log_rotation_size`: The max size of the log file in **megabytes** before it will rotate into a smaller file. Defaults to 2GB.
`log_directory_max_size`: The max size of the log directory in **megabytes** before old log files will be
deleted to free up space. Defaults to 50GB.
`log_rotation_hour_interval`: The time interval in **hours** after the last log rotation to automatically
rotate the current log file. Defaults to 12 hours.
Note, time-based log rotation occurs dependently on size-based log rotation, where if a
size-based log rotation occurs, the timer for the time-based rotation will reset.
`log_tag_style`: Tag implementation to use. Must be one of:
- `uint`: Lock free and threadsafe but outputs just a simple unsigned integer
- `uuid`: Threadsafe and outputs a UUID tag
- `none`: Don't use tagging at all
## Cassandra / Scylla Administration
Since Clio relies on either Cassandra or Scylla for its database backend, here are some important considerations:
- Scylla, by default, will reserve all free RAM on a machine for itself. If you are running `rippled` or other services on the same machine, restrict its memory usage using the `--memory` argument: https://docs.scylladb.com/getting-started/scylla-in-a-shared-environment/
If you have any questions about building, running, contributing, using Clio or any other, you could always start a new [discussion](https://github.com/XRPLF/clio/discussions).

View File

@@ -1,23 +0,0 @@
# Release Notes
This document contains the release notes for `clio_server`, an XRP Ledger API Server.
To build and run `clio_server`, follow the instructions in [README.md](https://github.com/XRPLF/clio).
If you find issues or have a new idea, please open [an issue](https://github.com/XRPLF/clio/issues).
# Releases
## 0.1.0
Clio is an XRP Ledger API server. Clio is optimized for RPC calls, over websocket or JSON-RPC. Validated historical ledger and transaction data is stored in a more space efficient format, using up to 4 times less space than rippled.
Clio uses Cassandra or ScyllaDB, allowing for scalable read throughput. Multiple clio nodes can share access to the same dataset, allowing for a highly available cluster of clio nodes, without the need for redundant data storage or computation.
**0.1.0** is the first beta of Project Clio. It contains:
- `./src/backend` is the BackendInterface. This provides an abstraction for reading and writing information to a database.
- `./src/etl` is the ReportingETL. The classes in this folder are used to extract information from the P2P network and write it to a database, either locally or over the network.
- `./src/rpc` contains RPC handlers that are called by clients. These handlers should expose the same API as rippled.
- `./src/subscriptions` contains the SubscriptionManager. This manages publishing to clients subscribing to streams or accounts.
- `./src/webserver` contains a flex server that handles both http/s and ws/s traffic on a single port.
- `./unittests` simple unit tests that write to and read from a database to verify that the ETL works.

22
benchmarks/Main.cpp Normal file
View File

@@ -0,0 +1,22 @@
//------------------------------------------------------------------------------
/*
This file is part of clio: https://github.com/XRPLF/clio
Copyright (c) 2023, the clio developers.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#include <benchmark/benchmark.h>
BENCHMARK_MAIN();

45
benchmarks/Playground.cpp Normal file
View File

@@ -0,0 +1,45 @@
//------------------------------------------------------------------------------
/*
This file is part of clio: https://github.com/XRPLF/clio
Copyright (c) 2023, the clio developers.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
/*
* Use this file for temporary benchmarks and implementations.
* Usage example:
* ```
* ./clio_benchmarks
* --benchmark_time_unit=ms
* --benchmark_repetitions=10
* --benchmark_display_aggregates_only=true
* --benchmark_min_time=1x
* --benchmark_filter="Playground"
* ```
*
* Note: Please don't push your temporary work to the repo.
*/
// #include <benchmark/benchmark.h>
// static void
// benchmarkPlaygroundTest1(benchmark::State& state)
// {
// for (auto _ : state) {
// // ...
// }
// }
// BENCHMARK(benchmarkPlaygroundTest1);

View File

@@ -0,0 +1,268 @@
//------------------------------------------------------------------------------
/*
This file is part of clio: https://github.com/XRPLF/clio
Copyright (c) 2024, the clio developers.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#include "etl/ETLHelpers.hpp"
#include "util/Random.hpp"
#include "util/async/AnyExecutionContext.hpp"
#include "util/async/AnyOperation.hpp"
#include "util/async/context/BasicExecutionContext.hpp"
#include "util/async/context/SyncExecutionContext.hpp"
#include <benchmark/benchmark.h>
#include <chrono>
#include <cstddef>
#include <cstdint>
#include <latch>
#include <optional>
#include <stdexcept>
#include <thread>
#include <vector>
using namespace util;
using namespace util::async;
class TestThread {
std::vector<std::thread> threads_;
etl::ThreadSafeQueue<std::optional<uint64_t>> q_;
etl::ThreadSafeQueue<uint64_t> res_;
public:
TestThread(std::vector<uint64_t> const& data) : q_(data.size()), res_(data.size())
{
for (auto el : data)
q_.push(el);
}
~TestThread()
{
for (auto& t : threads_) {
if (t.joinable())
t.join();
}
}
void
run(std::size_t numThreads)
{
std::latch completion{numThreads};
for (std::size_t i = 0; i < numThreads; ++i) {
q_.push(std::nullopt);
threads_.emplace_back([this, &completion]() { process(completion); });
}
completion.wait();
}
private:
void
process(std::latch& completion)
{
while (auto v = q_.pop()) {
if (not v.has_value())
break;
res_.push(v.value() * v.value());
}
completion.count_down(1);
}
};
template <typename CtxType>
class TestExecutionContextBatched {
etl::ThreadSafeQueue<std::optional<uint64_t>> q_;
etl::ThreadSafeQueue<uint64_t> res_;
std::size_t batchSize_;
public:
TestExecutionContextBatched(std::vector<uint64_t> const& data, std::size_t batchSize = 5000u)
: q_(data.size()), res_(data.size()), batchSize_(batchSize)
{
for (auto el : data)
q_.push(el);
}
void
run(std::size_t numThreads)
{
using OpType = typename CtxType::template StoppableOperation<void>;
CtxType ctx{numThreads};
std::vector<OpType> operations;
for (std::size_t i = 0; i < numThreads; ++i) {
q_.push(std::nullopt);
operations.push_back(ctx.execute(
[this](auto stopRequested) {
bool hasMore = true;
auto doOne = [this] {
auto v = q_.pop();
if (not v.has_value())
return false;
res_.push(v.value() * v.value());
return true;
};
while (not stopRequested and hasMore) {
for (std::size_t i = 0; i < batchSize_ and hasMore; ++i)
hasMore = doOne();
}
},
std::chrono::seconds{5}
));
}
for (auto& op : operations)
op.wait();
}
};
template <typename CtxType>
class TestAnyExecutionContextBatched {
etl::ThreadSafeQueue<std::optional<uint64_t>> q_;
etl::ThreadSafeQueue<uint64_t> res_;
std::size_t batchSize_;
public:
TestAnyExecutionContextBatched(std::vector<uint64_t> const& data, std::size_t batchSize = 5000u)
: q_(data.size()), res_(data.size()), batchSize_(batchSize)
{
for (auto el : data)
q_.push(el);
}
void
run(std::size_t numThreads)
{
CtxType ctx{numThreads};
AnyExecutionContext anyCtx{ctx};
std::vector<AnyOperation<void>> operations;
for (std::size_t i = 0; i < numThreads; ++i) {
q_.push(std::nullopt);
operations.push_back(anyCtx.execute(
[this](auto stopRequested) {
bool hasMore = true;
auto doOne = [this] {
auto v = q_.pop();
if (not v.has_value())
return false;
res_.push(v.value() * v.value());
return true;
};
while (not stopRequested and hasMore) {
for (std::size_t i = 0; i < batchSize_ and hasMore; ++i)
hasMore = doOne();
}
},
std::chrono::seconds{5}
));
}
for (auto& op : operations)
op.wait();
}
};
static auto
generateData()
{
constexpr auto TOTAL = 10'000;
std::vector<uint64_t> data;
data.reserve(TOTAL);
for (auto i = 0; i < TOTAL; ++i)
data.push_back(util::Random::uniform(1, 100'000'000));
return data;
}
static void
benchmarkThreads(benchmark::State& state)
{
auto data = generateData();
for (auto _ : state) {
TestThread t{data};
t.run(state.range(0));
}
}
template <typename CtxType>
void
benchmarkExecutionContextBatched(benchmark::State& state)
{
auto data = generateData();
for (auto _ : state) {
TestExecutionContextBatched<CtxType> t{data, state.range(1)};
t.run(state.range(0));
}
}
template <typename CtxType>
void
benchmarkAnyExecutionContextBatched(benchmark::State& state)
{
auto data = generateData();
for (auto _ : state) {
TestAnyExecutionContextBatched<CtxType> t{data, state.range(1)};
t.run(state.range(0));
}
}
// Simplest implementation using async queues and std::thread
BENCHMARK(benchmarkThreads)->Arg(1)->Arg(2)->Arg(4)->Arg(8);
// Same implementation using each of the available execution contexts
BENCHMARK(benchmarkExecutionContextBatched<PoolExecutionContext>)
->ArgsProduct({
{1, 2, 4, 8}, // threads
{500, 1000, 5000, 10000} // batch size
});
BENCHMARK(benchmarkExecutionContextBatched<CoroExecutionContext>)
->ArgsProduct({
{1, 2, 4, 8}, // threads
{500, 1000, 5000, 10000} // batch size
});
BENCHMARK(benchmarkExecutionContextBatched<SyncExecutionContext>)
->ArgsProduct({
{1, 2, 4, 8}, // threads
{500, 1000, 5000, 10000} // batch size
});
// Same implementations going thru AnyExecutionContext
BENCHMARK(benchmarkAnyExecutionContextBatched<PoolExecutionContext>)
->ArgsProduct({
{1, 2, 4, 8}, // threads
{500, 1000, 5000, 10000} // batch size
});
BENCHMARK(benchmarkAnyExecutionContextBatched<CoroExecutionContext>)
->ArgsProduct({
{1, 2, 4, 8}, // threads
{500, 1000, 5000, 10000} // batch size
});
BENCHMARK(benchmarkAnyExecutionContextBatched<SyncExecutionContext>)
->ArgsProduct({
{1, 2, 4, 8}, // threads
{500, 1000, 5000, 10000} // batch size
});

View File

@@ -1,6 +1,6 @@
from conan import ConanFile
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
import re
class Clio(ConanFile):
name = 'clio'
@@ -11,8 +11,9 @@ class Clio(ConanFile):
settings = 'os', 'compiler', 'build_type', 'arch'
options = {
'fPIC': [True, False],
'verbose': [True, False],
'verbose': [True, False],
'tests': [True, False], # build unit tests; create `clio_tests` binary
'benchmark': [True, False], # build benchmarks; create `clio_benchmarks` binary
'docs': [True, False], # doxygen API docs; create custom target 'docs'
'packaging': [True, False], # create distribution packages
'coverage': [True, False], # build for test coverage report; create custom target `clio_tests-ccov`
@@ -26,13 +27,15 @@ class Clio(ConanFile):
'protobuf/3.21.12',
'grpc/1.50.1',
'openssl/1.1.1u',
'xrpl/2.0.0-b4',
'xrpl/2.2.0-b1',
'libbacktrace/cci.20210118'
]
default_options = {
'fPIC': True,
'verbose': False,
'tests': False,
'benchmark': False,
'packaging': False,
'coverage': False,
'lint': False,
@@ -59,6 +62,8 @@ class Clio(ConanFile):
def requirements(self):
if self.options.tests:
self.requires('gtest/1.14.0')
if self.options.benchmark:
self.requires('benchmark/1.8.3')
def configure(self):
if self.settings.compiler == 'apple-clang':

View File

@@ -40,10 +40,10 @@ RUN source /opt/rh/devtoolset-11/enable && cd /tmp/clio && \
RUN mkdir output
RUN strip clio/build/clio_server && strip clio/build/clio_tests
RUN cp clio/build/clio_tests output/ && cp clio/build/clio_server output/
RUN cp clio/example-config.json output/example-config.json
RUN cp clio/docs/examples/config/example-config.json output/example-config.json
FROM centos:7
COPY --from=build /tmp/output /clio
RUN mkdir -p /opt/clio/etc && mv /clio/example-config.json /opt/clio/etc/config.json
RUN mkdir -p /opt/clio/etc && mv /clio/docs/examples/config/example-config.json /opt/clio/etc/config.json
CMD ["/clio/clio_server", "/opt/clio/etc/config.json"]

79
docker/ci/dockerfile Normal file
View File

@@ -0,0 +1,79 @@
FROM ubuntu:focal
ARG DEBIAN_FRONTEND=noninteractive
ARG TARGETARCH
SHELL ["/bin/bash", "-c"]
USER root
WORKDIR /root/
ENV GCC_VERSION=11 \
CCACHE_VERSION=4.8.3 \
LLVM_TOOLS_VERSION=17 \
GH_VERSION=2.40.0 \
DOXYGEN_VERSION=1.10.0
# Add repositories
RUN apt-get -qq update \
&& apt-get -qq install -y --no-install-recommends --no-install-suggests gnupg wget curl software-properties-common \
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
&& wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | apt-key add - \
&& apt-add-repository 'deb https://apt.kitware.com/ubuntu/ focal main' \
&& echo "deb http://apt.llvm.org/focal/ llvm-toolchain-focal-${LLVM_TOOLS_VERSION} main" >> /etc/apt/sources.list \
&& wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
# Install packages
RUN apt update -qq \
&& apt install -y --no-install-recommends --no-install-suggests cmake python3 python3-pip sudo git \
ninja-build make pkg-config libzstd-dev libzstd1 g++-${GCC_VERSION} flex bison jq graphviz \
clang-format-${LLVM_TOOLS_VERSION} clang-tidy-${LLVM_TOOLS_VERSION} clang-tools-${LLVM_TOOLS_VERSION} \
&& update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-${GCC_VERSION} 100 \
&& update-alternatives --install /usr/bin/c++ c++ /usr/bin/g++-${GCC_VERSION} 100 \
&& update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-${GCC_VERSION} 100 \
&& update-alternatives --install /usr/bin/cc cc /usr/bin/gcc-${GCC_VERSION} 100 \
&& update-alternatives --install /usr/bin/gcov gcov /usr/bin/gcov-${GCC_VERSION} 100 \
&& update-alternatives --install /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-${GCC_VERSION} 100 \
&& update-alternatives --install /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-${GCC_VERSION} 100 \
&& update-alternatives --install /usr/bin/clang-format clang-format /usr/bin/clang-format-${LLVM_TOOLS_VERSION} 100 \
&& apt-get clean && apt remove -y software-properties-common \
&& pip3 install -q --upgrade --no-cache-dir pip \
&& pip3 install -q --no-cache-dir conan==1.62 gcovr cmake-format
WORKDIR /tmp
# Install ccache from source
RUN wget "https://github.com/ccache/ccache/releases/download/v${CCACHE_VERSION}/ccache-${CCACHE_VERSION}.tar.gz" \
&& tar xf "ccache-${CCACHE_VERSION}.tar.gz" \
&& cd "ccache-${CCACHE_VERSION}" \
&& mkdir build && cd build \
&& cmake -GNinja -DCMAKE_BUILD_TYPE=Release .. \
&& cmake --build . --target install
# Install doxygen from sounce
RUN wget "https://github.com/doxygen/doxygen/releases/download/Release_${DOXYGEN_VERSION//./_}/doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
&& tar xf "doxygen-${DOXYGEN_VERSION}.src.tar.gz" \
&& cd "doxygen-${DOXYGEN_VERSION}" \
&& mkdir build && cd build \
&& cmake -GNinja -DCMAKE_BUILD_TYPE=Release .. \
&& cmake --build . --target install
# Install gh
RUN wget https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz \
&& tar xf gh_${GH_VERSION}_linux_${TARGETARCH}.tar.gz \
&& mv gh_${GH_VERSION}_linux_${TARGETARCH}/bin/gh /usr/bin/gh
# Clean up
RUN rm -rf /tmp/* /var/tmp/*
WORKDIR /root/
# Using root by default is not very secure but github checkout action doesn't work with any other user
# https://github.com/actions/checkout/issues/956
# And Github Actions doc recommends using root
# https://docs.github.com/en/actions/creating-actions/dockerfile-support-for-github-actions#user
# Setup conan
RUN conan profile new default --detect \
&& conan profile update settings.compiler.cppstd=20 default \
&& conan profile update settings.compiler.libcxx=libstdc++11 default \
&& conan remote add --insert 0 conan-non-prod http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod

8
docs/CMakeLists.txt Normal file
View File

@@ -0,0 +1,8 @@
cmake_minimum_required(VERSION 3.16.3)
project(docs)
include(${CMAKE_CURRENT_SOURCE_DIR}/../CMake/ClioVersion.cmake)
# Generate `docs` target for doxygen documentation
# Note: use `cmake --build . --target docs` from your `build` directory to generate the documentation
include(${CMAKE_CURRENT_SOURCE_DIR}/../CMake/Docs.cmake)

44
docs/Doxyfile Normal file
View File

@@ -0,0 +1,44 @@
PROJECT_NAME = "Clio"
PROJECT_LOGO = ${SOURCE}/docs/img/xrpl-logo.svg
PROJECT_NUMBER = ${DOC_CLIO_VERSION}
PROJECT_BRIEF = The XRP Ledger API server.
EXTRACT_ALL = NO
EXTRACT_PRIVATE = NO
EXTRACT_PACKAGE = YES
EXTRACT_STATIC = YES
EXTRACT_LOCAL_CLASSES = NO
EXTRACT_ANON_NSPACES = NO
SORT_MEMBERS_CTORS_1ST = YES
INPUT = ${SOURCE}/src
EXCLUDE_SYMBOLS = ${EXCLUDES}
RECURSIVE = YES
HAVE_DOT = ${USE_DOT}
QUIET = YES
WARNINGS = ${LINT}
WARN_NO_PARAMDOC = ${LINT}
WARN_IF_INCOMPLETE_DOC = ${LINT}
WARN_IF_UNDOCUMENTED = ${LINT}
GENERATE_LATEX = NO
GENERATE_HTML = YES
SORT_MEMBERS_CTORS_1ST = YES
GENERATE_TREEVIEW = YES
DISABLE_INDEX = NO
FULL_SIDEBAR = NO
HTML_HEADER = ${SOURCE}/docs/doxygen-awesome-theme/header.html
HTML_EXTRA_STYLESHEET = ${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome.css \
${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-sidebar-only.css \
${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-sidebar-only-darkmode-toggle.css
HTML_EXTRA_FILES = ${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-darkmode-toggle.js \
${SOURCE}/docs/doxygen-awesome-theme/doxygen-awesome-interactive-toc.js
HTML_COLORSTYLE = LIGHT
HTML_COLORSTYLE_HUE = 209
HTML_COLORSTYLE_SAT = 255
HTML_COLORSTYLE_GAMMA = 113

109
docs/build-clio.md Normal file
View File

@@ -0,0 +1,109 @@
# How to build Clio
Clio is built with [CMake](https://cmake.org/) and uses [Conan](https://conan.io/) for managing dependencies. It is written in C++20 and therefore requires a modern compiler.
## Minimum Requirements
- [Python 3.7](https://www.python.org/downloads/)
- [Conan 1.55](https://conan.io/downloads.html)
- [CMake 3.16](https://cmake.org/download/)
- [**Optional**] [GCovr](https://gcc.gnu.org/onlinedocs/gcc/Gcov.html): needed for code coverage generation
- [**Optional**] [CCache](https://ccache.dev/): speeds up compilation if you are going to compile Clio often
| Compiler | Version |
|-------------|---------|
| GCC | 11 |
| Clang | 14 |
| Apple Clang | 14.0.3 |
### Conan Configuration
Clio does not require anything but default settings in your (`~/.conan/profiles/default`) Conan profile. It's best to have no extra flags specified.
> Mac example:
```
[settings]
os=Macos
os_build=Macos
arch=armv8
arch_build=armv8
compiler=apple-clang
compiler.version=14
compiler.libcxx=libc++
build_type=Release
compiler.cppstd=20
```
> Linux example:
```
[settings]
os=Linux
os_build=Linux
arch=x86_64
arch_build=x86_64
compiler=gcc
compiler.version=11
compiler.libcxx=libstdc++11
build_type=Release
compiler.cppstd=20
```
#### Artifactory
Make sure artifactory is setup with Conan.
```sh
conan remote add --insert 0 conan-non-prod http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
```
Now you should be able to download the prebuilt `xrpl` package on some platforms.
> [!NOTE]
> You may need to edit the `~/.conan/remotes.json` file to ensure that this newly added artifactory is listed last. Otherwise, you could see compilation errors when building the project with gcc version 13 (or newer).
Remove old packages you may have cached.
```sh
conan remove -f xrpl
```
## Building Clio
Navigate to Clio's root directory and run:
```sh
mkdir build && cd build
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=False
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
cmake --build . --parallel 8 # or without the number if you feel extra adventurous
```
> [!TIP]
> You can omit the `-o tests=True` if you don't want to build `clio_tests`.
If successful, `conan install` will find the required packages and `cmake` will do the rest. You should see `clio_server` and `clio_tests` in the `build` directory (the current directory).
> [!TIP]
> To generate a Code Coverage report, include `-o coverage=True` in the `conan install` command above, along with `-o tests=True` to enable tests. After running the `cmake` commands, execute `make clio_tests-ccov`. The coverage report will be found at `clio_tests-llvm-cov/index.html`.
## Building Clio with Docker
It is also possible to build Clio using [Docker](https://www.docker.com/) if you don't want to install all the dependencies on your machine.
```sh
docker run -it rippleci/clio_ci:latest
git clone https://github.com/XRPLF/clio
mkdir build && cd build
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=False
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
cmake --build . --parallel 8 # or without the number if you feel extra adventurous
```
## Developing against `rippled` in standalone mode
If you wish to develop against a `rippled` instance running in standalone mode there are a few quirks of both Clio and `rippled` that you need to keep in mind. You must:
1. Advance the `rippled` ledger to at least ledger 256.
2. Wait 10 minutes before first starting Clio against this standalone node.

99
docs/configure-clio.md Normal file
View File

@@ -0,0 +1,99 @@
# How to configure Clio and `rippled`
## Ports
Clio needs access to a `rippled` server in order to work. The following configurations are required for Clio and `rippled` to communicate:
1. In the Clio config file, provide the following:
- The IP of the `rippled` server
- The port on which `rippled` is accepting unencrypted WebSocket connections
- The port on which `rippled` is handling gRPC requests
2. In the `rippled` config file, you need to open:
- A port to accept unencrypted WebSocket connections
- A port to handle gRPC requests, with the IP(s) of Clio specified in the `secure_gateway` entry
The example configs of [rippled](https://github.com/XRPLF/rippled/blob/develop/cfg/rippled-example.cfg) and [Clio](../docs/examples/config/example-config.json) are set up in a way that minimal changes are required.
When running locally, the only change needed is to uncomment the `port_grpc` section of the `rippled` config.
If you're running Clio and `rippled` on separate machines, in addition to uncommenting the `port_grpc` section, a few other steps must be taken:
1. Change the `ip` in `etl_sources` to the IP where your `rippled` server is running.
2. Open a public, unencrypted WebSocket port on your `rippled` server.
3. In the `rippled` config, change the IP specified for `secure_gateway`, under the `port_grpc` section, to the IP of your Clio server. This entry can take the form of a comma-separated list if you are running multiple Clio nodes.
## Ledger sequence
The parameter `start_sequence` can be included and configured within the top level of the config file. This parameter specifies the sequence of the first ledger to extract if the database is empty.
Note that ETL extracts ledgers in order, and backfilling functionality currently doesn't exist. This means Clio does not retroactively learn ledgers older than the one you specify. Choosing to specify this or not will yield the following behavior:
- If this setting is absent and the database is empty, ETL will start with the next ledger validated by the network.
- If this setting is present and the database is not empty, an exception is thrown.
In addition, the optional parameter `finish_sequence` can be added to the json file as well, specifying where the ledger can stop.
To add `start_sequence` and/or `finish_sequence` to the `config.json` file appropriately, they must be on the same top level of precedence as other parameters (i.e., `database`, `etl_sources`, `read_only`) and be specified with an integer.
Here is an example snippet from the config file:
```json
"start_sequence": 12345,
"finish_sequence": 54321
```
## SSL
The parameters `ssl_cert_file` and `ssl_key_file` can also be added to the top level of precedence of our Clio config. The `ssl_cert_file` field specifies the filepath for your SSL cert, while `ssl_key_file` specifies the filepath for your SSL key. It is up to you how to change ownership of these folders for your designated Clio user.
Your options include:
- Copying the two files as root somewhere that's accessible by the Clio user, then running `sudo chown` to your user
- Changing the permissions directly so it's readable by your Clio user
- Running Clio as root (strongly discouraged)
Here is an example of how to specify `ssl_cert_file` and `ssl_key_file` in the config:
```json
"server": {
"ip": "0.0.0.0",
"port": 51233
},
"ssl_cert_file": "/full/path/to/cert.file",
"ssl_key_file": "/full/path/to/key.file"
```
## Admin rights for requests
By default Clio checks admin privileges by IP address from requests (only `127.0.0.1` is considered to be an admin). This is not very secure because the IP could be spoofed. For better security, an `admin_password` can be provided in the `server` section of Clio's config:
```json
"server": {
"admin_password": "secret"
}
```
If the password is presented in the config, Clio will check the Authorization header (if any) in each request for the password. The Authorization header should contain the type `Password`, and the password from the config (e.g. `Password secret`).
Exactly equal password gains admin rights for the request or a websocket connection.
## ETL sources forwarding cache
Clio can cache requests to ETL sources to reduce the load on the ETL source.
Only following commands are cached: `server_info`, `server_state`, `server_definitions`, `fee`, `ledger_closed`.
By default the forwarding cache is off.
To enable the caching for a source, `forwarding_cache_timeout` value should be added to the configuration file, e.g.:
```json
"forwarding_cache_timeout": 0.250,
```
`forwarding_cache_timeout` defines for how long (in seconds) a cache entry will be valid after being placed into the cache.
Zero value turns off the cache feature.

42
docs/coverage-report.md Normal file
View File

@@ -0,0 +1,42 @@
# Coverage report
Coverage report is intended for developers using compilers GCC or Clang (including Apple Clang). It is generated by the build target `coverage_report`, which is only enabled when both `tests` and `coverage` options are set (e.g., with `-o coverage=True -o tests=True` in `conan`).
## Prerequisites
To generate the coverage report you need:
- [gcovr tool](https://gcovr.com/en/stable/getting-started.html) (can be installed e.g. with `pip install gcovr`)
- `gcov` for GCC (installed with the compiler by default)
- `llvm-cov` for Clang (installed with the compiler by default, also on Apple)
- `Debug` build type
## Creating the coverage report
The coverage report is created when the following steps are completed, in order:
1. `clio_tests` binary built with the instrumentation data, enabled by the `coverage`
option mentioned above.
2. Completed run of unit tests, which populates coverage capture data.
3. Completed run of `gcovr` tool, which internally invokes either `gcov` or `llvm-cov`
to assemble both instrumentation data and coverage capture data into a coverage report.
The above steps are automated into a single target `coverage_report`. The instrumented `clio_tests` binary can also be used for running regular unit tests.
In case of a spurious failure of unit tests, it is possible to re-run the `coverage_report` target without rebuilding the `clio_tests` binary (since it is simply a dependency of the coverage report target).
The default coverage report format is `html-details`, but developers can override it to any of the formats listed in `CMake/CodeCoverage.cmake` by setting `CODE_COVERAGE_REPORT_FORMAT` variable in `cmake`. For example, CI is setting this parameter to `xml` for the [codecov](https://codecov.io) integration.
If some unit tests predictably fail (e.g., due to absence of a Cassandra database), it is possible to set unit tests options in the `CODE_COVERAGE_TESTS_ARGS` cmake variable, as demonstrated below:
```sh
cd .build
conan install .. --output-folder . --build missing --settings build_type=Debug -o tests=True -o coverage=True
cmake -DCODE_COVERAGE_REPORT_FORMAT=json-details -DCMAKE_BUILD_TYPE=Debug -DCODE_COVERAGE_TESTS_ARGS="--gtest_filter=-BackendCassandra*" -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
cmake --build . --target coverage_report
```
After the `coverage_report` target is completed, the generated coverage report will be stored inside the build directory as either:
- A File named `coverage_report.*`, with a suitable extension for the report format.
- A Directory named `coverage_report`, with `index.html` and other files inside, for `html-details` or `html-nested` report formats.

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021 - 2023 jothepro
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,157 @@
/**
Doxygen Awesome
https://github.com/jothepro/doxygen-awesome-css
MIT License
Copyright (c) 2021 - 2023 jothepro
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
class DoxygenAwesomeDarkModeToggle extends HTMLElement {
// SVG icons from https://fonts.google.com/icons
// Licensed under the Apache 2.0 license:
// https://www.apache.org/licenses/LICENSE-2.0.html
static lightModeIcon = `<svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" height="24px" viewBox="0 0 24 24" width="24px" fill="#FCBF00"><rect fill="none" height="24" width="24"/><circle cx="12" cy="12" opacity=".3" r="3"/><path d="M12,9c1.65,0,3,1.35,3,3s-1.35,3-3,3s-3-1.35-3-3S10.35,9,12,9 M12,7c-2.76,0-5,2.24-5,5s2.24,5,5,5s5-2.24,5-5 S14.76,7,12,7L12,7z M2,13l2,0c0.55,0,1-0.45,1-1s-0.45-1-1-1l-2,0c-0.55,0-1,0.45-1,1S1.45,13,2,13z M20,13l2,0c0.55,0,1-0.45,1-1 s-0.45-1-1-1l-2,0c-0.55,0-1,0.45-1,1S19.45,13,20,13z M11,2v2c0,0.55,0.45,1,1,1s1-0.45,1-1V2c0-0.55-0.45-1-1-1S11,1.45,11,2z M11,20v2c0,0.55,0.45,1,1,1s1-0.45,1-1v-2c0-0.55-0.45-1-1-1C11.45,19,11,19.45,11,20z M5.99,4.58c-0.39-0.39-1.03-0.39-1.41,0 c-0.39,0.39-0.39,1.03,0,1.41l1.06,1.06c0.39,0.39,1.03,0.39,1.41,0s0.39-1.03,0-1.41L5.99,4.58z M18.36,16.95 c-0.39-0.39-1.03-0.39-1.41,0c-0.39,0.39-0.39,1.03,0,1.41l1.06,1.06c0.39,0.39,1.03,0.39,1.41,0c0.39-0.39,0.39-1.03,0-1.41 L18.36,16.95z M19.42,5.99c0.39-0.39,0.39-1.03,0-1.41c-0.39-0.39-1.03-0.39-1.41,0l-1.06,1.06c-0.39,0.39-0.39,1.03,0,1.41 s1.03,0.39,1.41,0L19.42,5.99z M7.05,18.36c0.39-0.39,0.39-1.03,0-1.41c-0.39-0.39-1.03-0.39-1.41,0l-1.06,1.06 c-0.39,0.39-0.39,1.03,0,1.41s1.03,0.39,1.41,0L7.05,18.36z"/></svg>`
static darkModeIcon = `<svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" height="24px" viewBox="0 0 24 24" width="24px" fill="#FE9700"><rect fill="none" height="24" width="24"/><path d="M9.37,5.51C9.19,6.15,9.1,6.82,9.1,7.5c0,4.08,3.32,7.4,7.4,7.4c0.68,0,1.35-0.09,1.99-0.27 C17.45,17.19,14.93,19,12,19c-3.86,0-7-3.14-7-7C5,9.07,6.81,6.55,9.37,5.51z" opacity=".3"/><path d="M9.37,5.51C9.19,6.15,9.1,6.82,9.1,7.5c0,4.08,3.32,7.4,7.4,7.4c0.68,0,1.35-0.09,1.99-0.27C17.45,17.19,14.93,19,12,19 c-3.86,0-7-3.14-7-7C5,9.07,6.81,6.55,9.37,5.51z M12,3c-4.97,0-9,4.03-9,9s4.03,9,9,9s9-4.03,9-9c0-0.46-0.04-0.92-0.1-1.36 c-0.98,1.37-2.58,2.26-4.4,2.26c-2.98,0-5.4-2.42-5.4-5.4c0-1.81,0.89-3.42,2.26-4.4C12.92,3.04,12.46,3,12,3L12,3z"/></svg>`
static title = "Toggle Light/Dark Mode"
static prefersLightModeInDarkModeKey = "prefers-light-mode-in-dark-mode"
static prefersDarkModeInLightModeKey = "prefers-dark-mode-in-light-mode"
static _staticConstructor = function() {
DoxygenAwesomeDarkModeToggle.enableDarkMode(DoxygenAwesomeDarkModeToggle.userPreference)
// Update the color scheme when the browsers preference changes
// without user interaction on the website.
window.matchMedia('(prefers-color-scheme: dark)').addEventListener('change', event => {
DoxygenAwesomeDarkModeToggle.onSystemPreferenceChanged()
})
// Update the color scheme when the tab is made visible again.
// It is possible that the appearance was changed in another tab
// while this tab was in the background.
document.addEventListener("visibilitychange", visibilityState => {
if (document.visibilityState === 'visible') {
DoxygenAwesomeDarkModeToggle.onSystemPreferenceChanged()
}
});
}()
static init() {
$(function() {
$(document).ready(function() {
const toggleButton = document.createElement('doxygen-awesome-dark-mode-toggle')
toggleButton.title = DoxygenAwesomeDarkModeToggle.title
toggleButton.updateIcon()
window.matchMedia('(prefers-color-scheme: dark)').addEventListener('change', event => {
toggleButton.updateIcon()
})
document.addEventListener("visibilitychange", visibilityState => {
if (document.visibilityState === 'visible') {
toggleButton.updateIcon()
}
});
$(document).ready(function(){
document.getElementById("MSearchBox").parentNode.appendChild(toggleButton)
})
$(window).resize(function(){
document.getElementById("MSearchBox").parentNode.appendChild(toggleButton)
})
})
})
}
constructor() {
super();
this.onclick=this.toggleDarkMode
}
/**
* @returns `true` for dark-mode, `false` for light-mode system preference
*/
static get systemPreference() {
return window.matchMedia('(prefers-color-scheme: dark)').matches
}
/**
* @returns `true` for dark-mode, `false` for light-mode user preference
*/
static get userPreference() {
return (!DoxygenAwesomeDarkModeToggle.systemPreference && localStorage.getItem(DoxygenAwesomeDarkModeToggle.prefersDarkModeInLightModeKey)) ||
(DoxygenAwesomeDarkModeToggle.systemPreference && !localStorage.getItem(DoxygenAwesomeDarkModeToggle.prefersLightModeInDarkModeKey))
}
static set userPreference(userPreference) {
DoxygenAwesomeDarkModeToggle.darkModeEnabled = userPreference
if(!userPreference) {
if(DoxygenAwesomeDarkModeToggle.systemPreference) {
localStorage.setItem(DoxygenAwesomeDarkModeToggle.prefersLightModeInDarkModeKey, true)
} else {
localStorage.removeItem(DoxygenAwesomeDarkModeToggle.prefersDarkModeInLightModeKey)
}
} else {
if(!DoxygenAwesomeDarkModeToggle.systemPreference) {
localStorage.setItem(DoxygenAwesomeDarkModeToggle.prefersDarkModeInLightModeKey, true)
} else {
localStorage.removeItem(DoxygenAwesomeDarkModeToggle.prefersLightModeInDarkModeKey)
}
}
DoxygenAwesomeDarkModeToggle.onUserPreferenceChanged()
}
static enableDarkMode(enable) {
if(enable) {
DoxygenAwesomeDarkModeToggle.darkModeEnabled = true
document.documentElement.classList.add("dark-mode")
document.documentElement.classList.remove("light-mode")
} else {
DoxygenAwesomeDarkModeToggle.darkModeEnabled = false
document.documentElement.classList.remove("dark-mode")
document.documentElement.classList.add("light-mode")
}
}
static onSystemPreferenceChanged() {
DoxygenAwesomeDarkModeToggle.darkModeEnabled = DoxygenAwesomeDarkModeToggle.userPreference
DoxygenAwesomeDarkModeToggle.enableDarkMode(DoxygenAwesomeDarkModeToggle.darkModeEnabled)
}
static onUserPreferenceChanged() {
DoxygenAwesomeDarkModeToggle.enableDarkMode(DoxygenAwesomeDarkModeToggle.darkModeEnabled)
}
toggleDarkMode() {
DoxygenAwesomeDarkModeToggle.userPreference = !DoxygenAwesomeDarkModeToggle.userPreference
this.updateIcon()
}
updateIcon() {
if(DoxygenAwesomeDarkModeToggle.darkModeEnabled) {
this.innerHTML = DoxygenAwesomeDarkModeToggle.darkModeIcon
} else {
this.innerHTML = DoxygenAwesomeDarkModeToggle.lightModeIcon
}
}
}
customElements.define("doxygen-awesome-dark-mode-toggle", DoxygenAwesomeDarkModeToggle);

View File

@@ -0,0 +1,81 @@
/**
Doxygen Awesome
https://github.com/jothepro/doxygen-awesome-css
MIT License
Copyright (c) 2022 - 2023 jothepro
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
class DoxygenAwesomeInteractiveToc {
static topOffset = 38
static hideMobileMenu = true
static headers = []
static init() {
window.addEventListener("load", () => {
let toc = document.querySelector(".contents > .toc")
if(toc) {
toc.classList.add("interactive")
if(!DoxygenAwesomeInteractiveToc.hideMobileMenu) {
toc.classList.add("open")
}
document.querySelector(".contents > .toc > h3")?.addEventListener("click", () => {
if(toc.classList.contains("open")) {
toc.classList.remove("open")
} else {
toc.classList.add("open")
}
})
document.querySelectorAll(".contents > .toc > ul a").forEach((node) => {
let id = node.getAttribute("href").substring(1)
DoxygenAwesomeInteractiveToc.headers.push({
node: node,
headerNode: document.getElementById(id)
})
document.getElementById("doc-content")?.addEventListener("scroll", () => {
DoxygenAwesomeInteractiveToc.update()
})
})
DoxygenAwesomeInteractiveToc.update()
}
})
}
static update() {
let active = DoxygenAwesomeInteractiveToc.headers[0]?.node
DoxygenAwesomeInteractiveToc.headers.forEach((header) => {
let position = header.headerNode.getBoundingClientRect().top
header.node.classList.remove("active")
header.node.classList.remove("aboveActive")
if(position < DoxygenAwesomeInteractiveToc.topOffset) {
active = header.node
active?.classList.add("aboveActive")
}
})
active?.classList.add("active")
active?.classList.remove("aboveActive")
}
}

View File

@@ -0,0 +1,40 @@
/**
Doxygen Awesome
https://github.com/jothepro/doxygen-awesome-css
MIT License
Copyright (c) 2021 - 2023 jothepro
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
@media screen and (min-width: 768px) {
#MSearchBox {
width: calc(var(--side-nav-fixed-width) - calc(2 * var(--spacing-medium)) - var(--searchbar-height) - 1px);
}
#MSearchField {
width: calc(var(--side-nav-fixed-width) - calc(2 * var(--spacing-medium)) - 66px - var(--searchbar-height));
}
}

View File

@@ -0,0 +1,116 @@
/**
Doxygen Awesome
https://github.com/jothepro/doxygen-awesome-css
MIT License
Copyright (c) 2021 - 2023 jothepro
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
html {
/* side nav width. MUST be = `TREEVIEW_WIDTH`.
* Make sure it is wide enough to contain the page title (logo + title + version)
*/
--side-nav-fixed-width: 335px;
--menu-display: none;
--top-height: 120px;
--toc-sticky-top: -25px;
--toc-max-height: calc(100vh - 2 * var(--spacing-medium) - 25px);
}
#projectname {
white-space: nowrap;
}
@media screen and (min-width: 768px) {
html {
--searchbar-background: var(--page-background-color);
}
#side-nav {
min-width: var(--side-nav-fixed-width);
max-width: var(--side-nav-fixed-width);
top: var(--top-height);
overflow: visible;
}
#nav-tree, #side-nav {
height: calc(100vh - var(--top-height)) !important;
}
#nav-tree {
padding: 0;
}
#top {
display: block;
border-bottom: none;
height: var(--top-height);
margin-bottom: calc(0px - var(--top-height));
max-width: var(--side-nav-fixed-width);
overflow: hidden;
background: var(--side-nav-background);
}
#main-nav {
float: left;
padding-right: 0;
}
.ui-resizable-handle {
cursor: default;
width: 1px !important;
background: var(--separator-color);
box-shadow: 0 calc(-2 * var(--top-height)) 0 0 var(--separator-color);
}
#nav-path {
position: fixed;
right: 0;
left: var(--side-nav-fixed-width);
bottom: 0;
width: auto;
}
#doc-content {
height: calc(100vh - 31px) !important;
padding-bottom: calc(3 * var(--spacing-large));
padding-top: calc(var(--top-height) - 80px);
box-sizing: border-box;
margin-left: var(--side-nav-fixed-width) !important;
}
#MSearchBox {
width: calc(var(--side-nav-fixed-width) - calc(2 * var(--spacing-medium)));
}
#MSearchField {
width: calc(var(--side-nav-fixed-width) - calc(2 * var(--spacing-medium)) - 65px);
}
#MSearchResultsWindow {
left: var(--spacing-medium) !important;
right: auto;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,82 @@
<!-- HTML header for doxygen 1.9.7-->
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "https://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="$langISO">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=11"/>
<meta name="generator" content="Doxygen $doxygenversion"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<!--BEGIN PROJECT_NAME--><title>$projectname: $title</title><!--END PROJECT_NAME-->
<!--BEGIN !PROJECT_NAME--><title>$title</title><!--END !PROJECT_NAME-->
<link href="$relpath^tabs.css" rel="stylesheet" type="text/css"/>
<!--BEGIN DISABLE_INDEX-->
<!--BEGIN FULL_SIDEBAR-->
<script type="text/javascript">var page_layout=1;</script>
<!--END FULL_SIDEBAR-->
<!--END DISABLE_INDEX-->
<script type="text/javascript" src="$relpath^jquery.js"></script>
<script type="text/javascript" src="$relpath^dynsections.js"></script>
$treeview
$search
$mathjax
$darkmode
<link href="$relpath^$stylesheet" rel="stylesheet" type="text/css" />
$extrastylesheet
<script type="text/javascript" src="$relpath^doxygen-awesome-darkmode-toggle.js"></script>
<script type="text/javascript">
DoxygenAwesomeDarkModeToggle.init()
</script>
<script type="text/javascript" src="$relpath^doxygen-awesome-interactive-toc.js"></script>
<script type="text/javascript">
DoxygenAwesomeInteractiveToc.init()
</script>
</head>
<body>
<!--BEGIN DISABLE_INDEX-->
<!--BEGIN FULL_SIDEBAR-->
<div id="side-nav" class="ui-resizable side-nav-resizable"><!-- do not remove this div, it is closed by doxygen! -->
<!--END FULL_SIDEBAR-->
<!--END DISABLE_INDEX-->
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<!--BEGIN TITLEAREA-->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr id="projectrow">
<!--BEGIN PROJECT_LOGO-->
<td id="projectlogo"><img alt="Logo" src="$relpath^$projectlogo"/></td>
<!--END PROJECT_LOGO-->
<!--BEGIN PROJECT_NAME-->
<td id="projectalign">
<div id="projectname">$projectname<!--BEGIN PROJECT_NUMBER--><span id="projectnumber">&#160;$projectnumber</span><!--END PROJECT_NUMBER-->
</div>
<!--BEGIN PROJECT_BRIEF--><div id="projectbrief">$projectbrief</div><!--END PROJECT_BRIEF-->
</td>
<!--END PROJECT_NAME-->
<!--BEGIN !PROJECT_NAME-->
<!--BEGIN PROJECT_BRIEF-->
<td>
<div id="projectbrief">$projectbrief</div>
</td>
<!--END PROJECT_BRIEF-->
<!--END !PROJECT_NAME-->
<!--BEGIN DISABLE_INDEX-->
<!--BEGIN SEARCHENGINE-->
<!--BEGIN !FULL_SIDEBAR-->
<td>$searchbox</td>
<!--END !FULL_SIDEBAR-->
<!--END SEARCHENGINE-->
<!--END DISABLE_INDEX-->
</tr>
<!--BEGIN SEARCHENGINE-->
<!--BEGIN FULL_SIDEBAR-->
<tr><td colspan="2">$searchbox</td></tr>
<!--END FULL_SIDEBAR-->
<!--END SEARCHENGINE-->
</tbody>
</table>
</div>
<!--END TITLEAREA-->
<!-- end header part -->

View File

@@ -16,7 +16,8 @@
//
// Advanced options. USE AT OWN RISK:
// ---
"core_connections_per_host": 1 // Defaults to 1
"core_connections_per_host": 1, // Defaults to 1
"write_batch_size": 20 // Defaults to 20
//
// Below options will use defaults from cassandra driver if left unspecified.
// See https://docs.datastax.com/en/developer/cpp-driver/2.17/api/struct.CassCluster/ for details.
@@ -34,6 +35,7 @@
"grpc_port": "50051"
}
],
"forwarding_cache_timeout": 0.250, // in seconds, could be 0, which means no cache
"dos_guard": {
// Comma-separated list of IPs to exclude from rate limiting
"whitelist": [
@@ -51,22 +53,13 @@
"max_requests": 20, // Max connections per IP per sweep interval
"sweep_interval": 1 // Time in seconds before resetting max_fetches and max_requests
},
"cache": {
// Comma-separated list of peer nodes that Clio can use to download cache from at startup
"peers": [
{
"ip": "127.0.0.1",
"port": 51234
}
]
},
"server": {
"ip": "0.0.0.0",
"port": 51233,
// Max number of requests to queue up before rejecting further requests.
// Defaults to 0, which disables the limit.
"max_queue_size": 500,
// If request contains header with authorization, Clio will check if it matches this value's hash
// If request contains header with authorization, Clio will check if it matches the prefix 'Password ' + this value's sha256 hash
// If matches, the request will be considered as admin request
"admin_password": "xrp",
// If local_admin is true, Clio will consider requests come from 127.0.0.1 as admin requests
@@ -101,7 +94,10 @@
"log_level": "trace"
}
],
"prometheus_enabled": true,
"prometheus": {
"enabled": true,
"compress_reply": true
},
"log_level": "info",
// Log format (this is the default format)
"log_format": "%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%",

View File

@@ -68,7 +68,7 @@
},
"gridPos": {
"h": 8,
"w": 5,
"w": 3,
"x": 0,
"y": 0
},
@@ -105,102 +105,6 @@
"title": "Service state",
"type": "stat"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "s"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 7,
"x": 5,
"y": 0
},
"id": 7,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": false
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"editorMode": "code",
"expr": "scrape_duration_seconds{job=\"clio\"}",
"instant": false,
"legendFormat": "__auto",
"range": true,
"refId": "A"
}
],
"title": "Prometheus Request Processing Time",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
@@ -262,8 +166,8 @@
},
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"w": 9,
"x": 3,
"y": 0
},
"id": 2,
@@ -296,102 +200,6 @@
"title": "Work Queue Size",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "µs"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 8
},
"id": 10,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"editorMode": "code",
"expr": "rpc_method_duration_us{job=\"clio\"}",
"instant": false,
"legendFormat": "{{method}}",
"range": true,
"refId": "A"
}
],
"title": "RPC Method Call Duration",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
@@ -455,7 +263,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 8
"y": 0
},
"id": 9,
"options": {
@@ -550,9 +358,9 @@
"h": 8,
"w": 12,
"x": 0,
"y": 16
"y": 8
},
"id": 8,
"id": 11,
"options": {
"legend": {
"calcs": [],
@@ -572,14 +380,206 @@
"uid": "PBFA97CFB590B2093"
},
"editorMode": "code",
"expr": "rate(rpc_error_total_number{job=\"clio\"}[$__rate_interval])",
"expr": "subscriptions_current_number{job=\"clio\"}",
"instant": false,
"legendFormat": "{{error_type}}",
"legendFormat": "{{collection}}{{stream}}",
"range": true,
"refId": "A"
}
],
"title": "RPC Error Rate",
"title": "Subscriptions Number",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 8
},
"id": 6,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": false
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"editorMode": "code",
"expr": "sum(increase(ledger_cache_counter_total_number{job=\"clio\",type=\"cache_hit\"}[1m])) / sum(increase(ledger_cache_counter_total_number{job=\"clio\",type=\"request\"}[1m]))",
"hide": false,
"instant": false,
"legendFormat": "ledger cache hit rate",
"range": true,
"refId": "A"
}
],
"title": "Ledger Cache Hit Rate",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "µs"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 16
},
"id": 10,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"editorMode": "code",
"expr": "rpc_method_duration_us{job=\"clio\"}",
"instant": false,
"legendFormat": "{{method}}",
"range": true,
"refId": "A"
}
],
"title": "RPC Method Call Duration",
"type": "timeseries"
},
{
@@ -647,13 +647,13 @@
"x": 12,
"y": 16
},
"id": 6,
"id": 8,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": false
"showLegend": true
},
"tooltip": {
"mode": "single",
@@ -667,15 +667,14 @@
"uid": "PBFA97CFB590B2093"
},
"editorMode": "code",
"expr": "sum(increase(ledger_cache_counter_total_number{job=\"clio\",type=\"cache_hit\"}[1m])) / sum(increase(ledger_cache_counter_total_number{job=\"clio\",type=\"request\"}[1m]))",
"hide": false,
"expr": "rate(rpc_error_total_number{job=\"clio\"}[$__rate_interval])",
"instant": false,
"legendFormat": "ledger cache hit rate",
"legendFormat": "{{error_type}}",
"range": true,
"refId": "A"
}
],
"title": "Ledger Cache Hit Rate",
"title": "RPC Error Rate",
"type": "timeseries"
},
{
@@ -791,7 +790,7 @@
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
@@ -800,6 +799,9 @@
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineStyle": {
"fill": "solid"
},
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
@@ -828,7 +830,8 @@
"value": 80
}
]
}
},
"unit": "ms"
},
"overrides": []
},
@@ -838,7 +841,7 @@
"x": 12,
"y": 24
},
"id": 11,
"id": 12,
"options": {
"legend": {
"calcs": [],
@@ -851,6 +854,7 @@
"sort": "none"
}
},
"pluginVersion": "10.2.0",
"targets": [
{
"datasource": {
@@ -858,14 +862,52 @@
"uid": "PBFA97CFB590B2093"
},
"editorMode": "code",
"expr": "subscriptions_current_number{job=\"clio\"}",
"expr": "histogram_quantile(0.50, sum(rate(backend_duration_milliseconds_histogram_bucket{job=\"clio\"}[$__interval])) by (le, operation))",
"hide": false,
"instant": false,
"legendFormat": "{{collection}}{{stream}}",
"legendFormat": "{{operation}} 0.5 percentile",
"range": true,
"refId": "A"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"editorMode": "code",
"expr": "histogram_quantile(0.75, sum(rate(backend_duration_milliseconds_histogram_bucket{job=\"clio\"}[$__interval])) by (le, operation))",
"hide": false,
"instant": false,
"legendFormat": "{{operation}} 0.75 percentile",
"range": true,
"refId": "B"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"editorMode": "code",
"expr": "histogram_quantile(0.95, sum(rate(backend_duration_milliseconds_histogram_bucket{job=\"clio\"}[$__interval])) by (le, operation))",
"hide": false,
"instant": false,
"legendFormat": "{{operation}} 0.95 percentile",
"range": true,
"refId": "C"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"expr": "",
"hide": false,
"instant": false,
"range": true,
"refId": "D"
}
],
"title": "Subscriptions Number",
"title": "DB operation duration",
"type": "timeseries"
},
{
@@ -1081,6 +1123,102 @@
],
"title": "DB Pending operations",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "s"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 10,
"x": 0,
"y": 40
},
"id": 7,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": false
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"editorMode": "code",
"expr": "scrape_duration_seconds{job=\"clio\"}",
"instant": false,
"legendFormat": "__auto",
"range": true,
"refId": "A"
}
],
"title": "Prometheus Request Processing Time",
"type": "timeseries"
}
],
"refresh": "5s",
@@ -1097,6 +1235,6 @@
"timezone": "",
"title": "Clio",
"uid": "aeaae84e-c194-47b2-ad65-86e45eebb815",
"version": 1,
"version": 3,
"weekStart": ""
}

117
docs/img/xrpl-logo.svg Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 13 KiB

76
docs/logging.md Normal file
View File

@@ -0,0 +1,76 @@
# Logging
Clio provides several logging options, which all are configurable via the config file. These are detailed in the following sections.
## `log_level`
The minimum level of severity at which the log message will be outputted by default. Severity options are `trace`, `debug`, `info`, `warning`, `error`, `fatal`. Defaults to `info`.
## `log_format`
The format of log lines produced by Clio. Defaults to `"%TimeStamp% (%SourceLocation%) [%ThreadID%] %Channel%:%Severity% %Message%"`.
Each of the variables expands like so:
- `TimeStamp`: The full date and time of the log entry
- `SourceLocation`: A partial path to the c++ file and the line number in said file (`source/file/path:linenumber`)
- `ThreadID`: The ID of the thread the log entry is written from
- `Channel`: The channel that this log entry was sent to
- `Severity`: The severity (aka log level) the entry was sent at
- `Message`: The actual log message
## `log_channels`
An array of JSON objects, each overriding properties for a logging `channel`.
> [!IMPORTANT]
> At the time of writing, only `log_level` can be overridden using this mechanism.
Each object is of this format:
```json
{
"channel": "Backend",
"log_level": "fatal"
}
```
If no override is present for a given channel, that channel will log at the severity specified by the global `log_level`.
The log channels that can be overridden are: `Backend`, `WebServer`, `Subscriptions`, `RPC`, `ETL` and `Performance`.
> [!NOTE]
> See [example-config.json](../docs/examples/config/example-config.json) for more details.
## `log_to_console`
Enable or disable log output to console. Options are `true`/`false`. This option defaults to `true`.
## `log_directory`
Path to the directory where log files are stored. If such directory doesn't exist, Clio will create it.
If the option is not specified, the logs are not written to a file.
## `log_rotation_size`
The max size of the log file in **megabytes** before it will rotate into a smaller file. Defaults to 2GB.
## `log_directory_max_size`
The max size of the log directory in **megabytes** before old log files will be deleted to free up space. Defaults to 50GB.
## `log_rotation_hour_interval`
The time interval in **hours** after the last log rotation to automatically rotate the current log file. Defaults to 12 hours.
> [!NOTE]
> Log rotation based on time occurs in conjunction with size-based log rotation. For example, if a size-based log rotation occurs, the timer for the time-based rotation will reset.
## `log_tag_style`
Tag implementation to use. Must be one of:
- `uint`: Lock free and threadsafe but outputs just a simple unsigned integer
- `uuid`: Threadsafe and outputs a UUID tag
- `none`: Doesn't use tagging at all

View File

@@ -0,0 +1,30 @@
# Metrics and static analysis
## Prometheus metrics collection
Clio natively supports [Prometheus](https://prometheus.io/) metrics collection. It accepts Prometheus requests on the port configured in the `server` section of the config.
Prometheus metrics are enabled by default, and replies to `/metrics` are compressed. To disable compression, and have human readable metrics, add `"prometheus": { "enabled": true, "compress_reply": false }` to Clio's config.
To completely disable Prometheus metrics add `"prometheus": { "enabled": false }` to Clio's config.
It is important to know that Clio responds to Prometheus request only if they are admin requests. If you are using the admin password feature, the same password should be provided in the Authorization header of Prometheus requests.
You can find an example docker-compose file, with Prometheus and Grafana configs, in [examples/infrastructure](../docs/examples/infrastructure/).
## Using `clang-tidy` for static analysis
The minimum [clang-tidy](https://clang.llvm.org/extra/clang-tidy/) version required is 17.0.
Clang-tidy can be run by Cmake when building the project. To achieve this, you just need to provide the option `-o lint=True` for the `conan install` command:
```sh
conan install .. --output-folder . --build missing --settings build_type=Release -o tests=True -o lint=True
```
By default Cmake will try to find `clang-tidy` automatically in your system.
To force Cmake to use your desired binary, set the `CLIO_CLANG_TIDY_BIN` environment variable to the path of the `clang-tidy` binary. For example:
```sh
export CLIO_CLANG_TIDY_BIN=/opt/homebrew/opt/llvm@17/bin/clang-tidy
```

82
docs/run-clio.md Normal file
View File

@@ -0,0 +1,82 @@
# How to run Clio
## Prerequisites
- Access to a Cassandra cluster or ScyllaDB cluster. Can be local or remote.
> [!IMPORTANT]
> There are some key considerations when using **ScyllaDB**. By default, Scylla reserves all free RAM on a machine for itself. If you are running `rippled` or other services on the same machine, restrict its memory usage using the `--memory` argument.
>
> See [ScyllaDB in a Shared Environment](https://docs.scylladb.com/getting-started/scylla-in-a-shared-environment/) to learn more.
- Access to one or more `rippled` nodes. Can be local or remote.
## Starting `rippled` and Clio
To run Clio you must first make the necessary changes to your configuration file, `config.json`. See [How to configure Clio and rippled](./configure-clio.md) to learn more.
Once your config files are ready, start `rippled` and Clio.
> [!TIP]
> It doesn't matter which you start first, and it's fine to stop one or the other and restart at any given time.
To start Clio, simply run:
```sh
./clio_server config.json
```
Clio will wait for `rippled` to sync before extracting any ledgers. If there is already data in Clio's database, Clio will begin extraction with the ledger whose sequence is one greater than the greatest sequence currently in the database. Clio will wait for this ledger to be available.
## Extracting ledgers from `rippled`
Be aware that the behavior of `rippled` is to sync to the most recent ledger on the network, and then backfill. If Clio is extracting ledgers from `rippled`, and then `rippled` is stopped for a significant amount of time and then restarted, `rippled` will take time to backfill to the next ledger that Clio wants.
The time it takes is proportional to the amount of time `rippled` was offline for. Additionally, the amount `rippled` backfills is dependent on the `online_delete` and `ledger_history` config values. If these values are small, and `rippled` is stopped for a significant amount of time, `rippled` may never backfill to the ledger that Clio wants.
To avoid this situation, it is advised to keep history proportional to the amount of time that you expect rippled to be offline. For example, if you expect `rippled` to be offline for a few days from time to time, you should keep at least a few days of history. If you expect `rippled` to never be offline, then you can keep a very small
amount of history.
Clio can use multiple `rippled` servers as a data source. Simply add more entries to the `etl_sources` section, and Clio will load balance requests across the servers specified in this list. As long as one `rippled` server is up and synced, Clio will continue extracting ledgers.
In contrast to `rippled`, Clio answers RPC requests for the data already in the database as soon as the server starts. Clio does not wait to sync to the network, or for `rippled` to sync.
## Starting Clio with a fresh database
When starting Clio with a fresh database, Clio needs to download a ledger in full.
This can take some time, and depends on database throughput. With a moderately fast database, this should take less than 10 minutes. If you did not properly set `secure_gateway` in the `port_grpc` section of `rippled`, this step will fail.
Once the first ledger is fully downloaded, Clio only needs to extract the changed data for each ledger, so extraction is much faster and Clio can keep up with `rippled` in real-time. Even under intense load, Clio should not lag behind the network, as Clio is not processing the data, and is simply writing to a database. The throughput of Clio is dependent on the throughput of your database, but a standard Cassandra or Scylla deployment can handle the write load of the XRP Ledger without any trouble.
> [!IMPORTANT]
> Generally the performance considerations come on the read side, and depend on the number of RPC requests your Clio nodes are serving. Be aware that very heavy read traffic can impact write throughput. Again, this is on the database side, so if you are seeing this, upgrade your database.
## Running multiple Clio nodes
It is possible to run multiple Clio nodes that share access to the same database. The Clio nodes don't need to know about each other. You can simply spin up more Clio nodes pointing to the same database, and shut them down as you wish.
On startup, each Clio node queries the database for the latest ledger. If this latest ledger does not change for some time, the Clio node begins extracting ledgers and writing to the database. If the Clio node detects a ledger that it is trying to write has already been written, the Clio node will backoff and stop writing. If the node does not see a ledger written for some time, it will start writing again. This algorithm ensures that at any given time, one and only one Clio node is writing to the database.
### Configuring read only Clio nodes
It is possible to force Clio to only read data, and to never become a writer. To do this, set `read_only: true` in the config. One common setup is to have a small number of writer nodes that are inaccessible to clients, with several read only nodes handling client requests. The number of read only nodes can be scaled up or down in response to request volume.
### Running multiple `rippled` servers
When using multiple `rippled` servers as data sources and multiple Clio nodes, each Clio node should use the same set of `rippled` servers as sources. The order doesn't matter. The only reason not to do this is if you are running servers in different regions, and you want the Clio nodes to extract from servers in their region. However, if you are doing this, be aware that database traffic will be flowing across regions, which can cause high latencies. A possible alternative to this is to just deploy a database in each region, and the Clio nodes in each region use their region's database. This is effectively two systems.
Clio supports API versioning as [described here](https://xrpl.org/request-formatting.html#api-versioning).
It's possible to configure `minimum`, `maximum` and `default` version like so:
```json
"api_version": {
"min": 1,
"max": 2,
"default": 1
}
```
All of the above are optional.
Clio will fallback to hardcoded defaults when these values are not specified in the config file, or if the configured values are outside of the minimum and maximum supported versions hardcoded in [src/rpc/common/APIVersion.h](../src/rpc/common/APIVersion.hpp).
> [!TIP]
> See the [example-config.json](../docs/examples/config/example-config.json) for more details.

View File

@@ -1,183 +0,0 @@
#!/usr/bin/python3
import argparse
from datetime import datetime
def getTime(line):
bracketOpen = line.find("[")
bracketClose = line.find("]")
timestampSub = line[bracketOpen+1:bracketClose]
timestamp = datetime.strptime(timestampSub, '%Y-%m-%d %H:%M:%S.%f')
return timestamp.timestamp()
def parseAccountTx(filename):
with open(filename) as f:
totalProcTime = 0.0
totalTxnTime = 0.0
numCalls = 0
for line in f:
if "executed stored_procedure" in line:
idx = line.find("in ")
idx = idx + 3
idx2 = line.find("num")
procTime = float(line[idx:idx2])
totalProcTime += procTime
if "fetchTransactions fetched" in line:
idx = line.find("took ")
idx = idx + 5
txnTime = float(line[idx:])
totalTxnTime += txnTime
numCalls = numCalls + 1
print(totalProcTime)
print(totalProcTime/numCalls)
print(totalTxnTime)
print(totalTxnTime/numCalls)
def parseLogs(filename, interval, minTxnCount = 0):
with open(filename) as f:
totalTime = 0
totalTxns = 0
totalObjs = 0
totalLoadTime = 0
start = 0
end = 0
totalLedgers = 0
intervalTime = 0
intervalTxns = 0
intervalObjs = 0
intervalLoadTime = 0
intervalStart = 0
intervalEnd = 0
intervalLedgers = 0
ledgersPerSecond = 0
print("ledgers, transactions, objects, loadTime, loadTime/ledger, ledgers/sec, txns/sec, objs/sec")
for line in f:
if "Load phase" in line:
sequenceIdx = line.find("Sequence : ")
hashIdx = line.find(" Hash :")
sequence = line[sequenceIdx + len("Sequence : "):hashIdx]
txnCountSubstr = "txn count = "
objCountSubstr = ". object count = "
loadTimeSubstr = ". load time = "
txnsSubstr = ". load txns per second = "
objsSubstr = ". load objs per second = "
txnCountIdx = line.find(txnCountSubstr)
objCountIdx = line.find(objCountSubstr)
loadTimeIdx = line.find(loadTimeSubstr)
txnsIdx = line.find(txnsSubstr)
objsIdx = line.find(objsSubstr)
txnCount = line[txnCountIdx + len(txnCountSubstr):objCountIdx]
objCount = line[objCountIdx + len(objCountSubstr):loadTimeIdx]
loadTime = line[loadTimeIdx + len(loadTimeSubstr):txnsIdx]
txnsPerSecond = line[txnsIdx + len(txnsSubstr):objsIdx]
objsPerSecond = line[objsIdx + len(objsSubstr):-1]
if int(txnCount) >= minTxnCount:
totalTime += float(loadTime);
totalTxns += float(txnCount)
totalObjs += float(objCount)
intervalTime += float(loadTime)
intervalTxns += float(txnCount)
intervalObjs += float(objCount)
totalLoadTime += float(loadTime)
intervalLoadTime += float(loadTime)
if start == 0:
start = getTime(line)
prevEnd = end
end = getTime(line)
if intervalStart == 0:
intervalStart = getTime(line)
intervalEnd = getTime(line)
totalLedgers+=1
intervalLedgers+=1
ledgersPerSecond = 0
if end != start:
ledgersPerSecond = float(totalLedgers) / float((end - start))
intervalLedgersPerSecond = 0
if intervalEnd != intervalStart:
intervalLedgersPerSecond = float(intervalLedgers) / float((intervalEnd - intervalStart))
if int(sequence) % interval == 0:
# print("Sequence = " + sequence + " : [time, txCount, objCount, txPerSec, objsPerSec]")
# print(loadTime + " , "
# + txnCount + " , "
# + objCount + " , "
# + txnsPerSecond + " , "
# + objsPerSecond)
# print("Interval Aggregate ( " + str(interval) + " ) [ledgers, txns, objects, elapsedTime, ledgersPerSec, avgLoadTime, txPerSec, objsPerSec]: ")
print(str(intervalLedgers) + " , "
+ str(intervalTxns) + " , "
+ str(intervalObjs) + " , "
+ str(intervalLoadTime) + " , "
+ str(intervalLoadTime/intervalLedgers) + " , "
+ str(intervalLedgers/intervalLoadTime) + " , "
+ str(intervalTxns/intervalLoadTime) + " , "
+ str(intervalObjs/intervalLoadTime))
# print("Total Aggregate: [ledgers, txns, objects, elapsedTime, ledgersPerSec, avgLoadTime, txPerSec, objsPerSec]")
# print(str(totalLedgers) + " , "
# + str(totalTxns) + " , "
# + str(totalObjs) + " , "
# + str(end-start) + " , "
# + str(ledgersPerSecond) + " , "
# + str(totalLoadTime/totalLedgers) + " , "
# + str(totalTxns/totalTime) + " , "
# + str(totalObjs/totalTime))
if int(sequence) % interval == 0:
intervalTime = 0
intervalTxns = 0
intervalObjs = 0
intervalStart = 0
intervalEnd = 0
intervalLedgers = 0
intervalLoadTime = 0
print("Total Aggregate: [ledgers, elapsedTime, ledgersPerSec, avgLoadTime, txPerSec, objsPerSec]")
print(totalLedgers)
print(totalLoadTime)
print(str(totalLedgers) + " : "
+ str(end-start) + " : "
+ str(ledgersPerSecond) + " : "
+ str(totalLoadTime/totalLedgers) + " : "
+ str(totalTxns/totalTime) + " : "
+ str(totalObjs/totalTime))
parser = argparse.ArgumentParser(description='parses logs')
parser.add_argument("--filename")
parser.add_argument("--interval",default=100000)
parser.add_argument("--minTxnCount",default=0)
parser.add_argument("--account_tx",default=False)
args = parser.parse_args()
def run(args):
if args.account_tx:
parseAccountTx(args.filename)
else:
parseLogs(args.filename, int(args.interval))
run(args)

View File

@@ -17,12 +17,35 @@
*/
//==============================================================================
#include <data/BackendCounters.h>
#include "data/BackendCounters.hpp"
#include <util/prometheus/Prometheus.h>
#include "util/Assert.hpp"
#include "util/prometheus/Label.hpp"
#include "util/prometheus/Prometheus.hpp"
#include <boost/json/object.hpp>
#include <chrono>
#include <cstdint>
#include <memory>
#include <string>
#include <utility>
#include <vector>
namespace data {
namespace {
std::vector<std::int64_t> const histogramBuckets{1, 2, 5, 10, 20, 50, 100, 200, 500, 700, 1000};
std::int64_t
durationInMillisecondsSince(std::chrono::steady_clock::time_point const startTime)
{
return std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - startTime).count();
}
} // namespace
using namespace util::prometheus;
BackendCounters::BackendCounters()
@@ -43,6 +66,18 @@ BackendCounters::BackendCounters()
))
, asyncWriteCounters_{"write_async"}
, asyncReadCounters_{"read_async"}
, readDurationHistogram_(PrometheusService::histogramInt(
"backend_duration_milliseconds_histogram",
Labels({Label{"operation", "read"}}),
histogramBuckets,
"The duration of backend read operations including retries"
))
, writeDurationHistogram_(PrometheusService::histogramInt(
"backend_duration_milliseconds_histogram",
Labels({Label{"operation", "write"}}),
histogramBuckets,
"The duration of backend write operations including retries"
))
{
}
@@ -60,9 +95,10 @@ BackendCounters::registerTooBusy()
}
void
BackendCounters::registerWriteSync()
BackendCounters::registerWriteSync(std::chrono::steady_clock::time_point const startTime)
{
++writeSyncCounter_.get();
writeDurationHistogram_.get().observe(durationInMillisecondsSince(startTime));
}
void
@@ -78,9 +114,11 @@ BackendCounters::registerWriteStarted()
}
void
BackendCounters::registerWriteFinished()
BackendCounters::registerWriteFinished(std::chrono::steady_clock::time_point const startTime)
{
asyncWriteCounters_.registerFinished(1u);
auto const duration = durationInMillisecondsSince(startTime);
writeDurationHistogram_.get().observe(duration);
}
void
@@ -96,9 +134,12 @@ BackendCounters::registerReadStarted(std::uint64_t const count)
}
void
BackendCounters::registerReadFinished(std::uint64_t const count)
BackendCounters::registerReadFinished(std::chrono::steady_clock::time_point const startTime, std::uint64_t const count)
{
asyncReadCounters_.registerFinished(count);
auto const duration = durationInMillisecondsSince(startTime);
for (std::uint64_t i = 0; i < count; ++i)
readDurationHistogram_.get().observe(duration);
}
void
@@ -161,7 +202,10 @@ BackendCounters::AsyncOperationCounters::registerStarted(std::uint64_t const cou
void
BackendCounters::AsyncOperationCounters::registerFinished(std::uint64_t const count)
{
assert(pendingCounter_.get().value() >= static_cast<std::int64_t>(count));
ASSERT(
pendingCounter_.get().value() >= static_cast<std::int64_t>(count),
"Finished operations can't be more than pending"
);
pendingCounter_.get() -= count;
completedCounter_.get() += count;
}
@@ -175,7 +219,9 @@ BackendCounters::AsyncOperationCounters::registerRetry(std::uint64_t count)
void
BackendCounters::AsyncOperationCounters::registerError(std::uint64_t count)
{
assert(pendingCounter_.get().value() >= static_cast<std::int64_t>(count));
ASSERT(
pendingCounter_.get().value() >= static_cast<std::int64_t>(count), "Error operations can't be more than pending"
);
pendingCounter_.get() -= count;
errorCounter_.get() += count;
}
@@ -187,7 +233,8 @@ BackendCounters::AsyncOperationCounters::report() const
{name_ + "_pending", pendingCounter_.get().value()},
{name_ + "_completed", completedCounter_.get().value()},
{name_ + "_retry", retryCounter_.get().value()},
{name_ + "_error", errorCounter_.get().value()}};
{name_ + "_error", errorCounter_.get().value()}
};
}
} // namespace data

View File

@@ -19,37 +19,60 @@
#pragma once
#include <util/prometheus/Prometheus.h>
#include "util/prometheus/Counter.hpp"
#include "util/prometheus/Gauge.hpp"
#include "util/prometheus/Histogram.hpp"
#include <boost/json/object.hpp>
#include <atomic>
#include <chrono>
#include <cstdint>
#include <functional>
#include <memory>
#include <utility>
#include <string>
namespace data {
/**
* @brief A concept for a class that can be used to count backend operations.
*/
// clang-format off
template <typename T>
concept SomeBackendCounters = requires(T a) {
typename T::PtrType;
{ a.registerTooBusy() } -> std::same_as<void>;
{ a.registerWriteSync() } -> std::same_as<void>;
{ a.registerWriteSyncRetry() } -> std::same_as<void>;
{ a.registerWriteStarted() } -> std::same_as<void>;
{ a.registerWriteFinished() } -> std::same_as<void>;
{ a.registerWriteRetry() } -> std::same_as<void>;
{ a.registerReadStarted(std::uint64_t{}) } -> std::same_as<void>;
{ a.registerReadFinished(std::uint64_t{}) } -> std::same_as<void>;
{ a.registerReadRetry(std::uint64_t{}) } -> std::same_as<void>;
{ a.registerReadError(std::uint64_t{}) } -> std::same_as<void>;
{ a.report() } -> std::same_as<boost::json::object>;
{
a.registerTooBusy()
} -> std::same_as<void>;
{
a.registerWriteSync(std::chrono::steady_clock::time_point{})
} -> std::same_as<void>;
{
a.registerWriteSyncRetry()
} -> std::same_as<void>;
{
a.registerWriteStarted()
} -> std::same_as<void>;
{
a.registerWriteFinished(std::chrono::steady_clock::time_point{})
} -> std::same_as<void>;
{
a.registerWriteRetry()
} -> std::same_as<void>;
{
a.registerReadStarted(std::uint64_t{})
} -> std::same_as<void>;
{
a.registerReadFinished(std::chrono::steady_clock::time_point{}, std::uint64_t{})
} -> std::same_as<void>;
{
a.registerReadRetry(std::uint64_t{})
} -> std::same_as<void>;
{
a.registerReadError(std::uint64_t{})
} -> std::same_as<void>;
{
a.report()
} -> std::same_as<boost::json::object>;
};
// clang-format on
/**
* @brief Holds statistics about the backend.
@@ -60,39 +83,92 @@ class BackendCounters {
public:
using PtrType = std::shared_ptr<BackendCounters>;
/**
* @brief Create a new BackendCounters object
*
* @return A shared pointer to the new BackendCounters object
*/
static PtrType
make();
/**
* @brief Register that the backend was too busy to process a request
*/
void
registerTooBusy();
/**
* @brief Register that a write operation was started
*
* @param startTime The time the operation was started
*/
void
registerWriteSync();
registerWriteSync(std::chrono::steady_clock::time_point startTime);
/**
* @brief Register that a write operation was retried
*/
void
registerWriteSyncRetry();
/**
* @brief Register that a write operation was started
*/
void
registerWriteStarted();
/**
* @brief Register that a write operation was finished
*
* @param startTime The time the operation was started
*/
void
registerWriteFinished();
registerWriteFinished(std::chrono::steady_clock::time_point startTime);
/**
* @brief Register that a write operation was retried
*/
void
registerWriteRetry();
/**
* @brief Register that one or more read operations were started
*
* @param count The number of operations started
*/
void
registerReadStarted(std::uint64_t count = 1u);
/**
* @brief Register that one or more read operations were finished
*
* @param startTime The time the operations were started
* @param count The number of operations finished
*/
void
registerReadFinished(std::uint64_t count = 1u);
registerReadFinished(std::chrono::steady_clock::time_point startTime, std::uint64_t count = 1u);
/**
* @brief Register that one or more read operations were retried
*
* @param count The number of operations retried
*/
void
registerReadRetry(std::uint64_t count = 1u);
/**
* @brief Register that one or more read operations had an error
*
* @param count The number of operations with an error
*/
void
registerReadError(std::uint64_t count = 1u);
/**
* @brief Get a report of the backend counters
*
* @return The report
*/
boost::json::object
report() const;
@@ -133,6 +209,9 @@ private:
AsyncOperationCounters asyncWriteCounters_{"write_async"};
AsyncOperationCounters asyncReadCounters_{"read_async"};
std::reference_wrapper<util::prometheus::HistogramInt> readDurationHistogram_;
std::reference_wrapper<util::prometheus::HistogramInt> writeDurationHistogram_;
};
} // namespace data

View File

@@ -19,12 +19,18 @@
#pragma once
#include <data/BackendInterface.h>
#include <data/CassandraBackend.h>
#include <util/config/Config.h>
#include <util/log/Logger.h>
#include "data/BackendInterface.hpp"
#include "data/CassandraBackend.hpp"
#include "data/cassandra/SettingsProvider.hpp"
#include "util/config/Config.hpp"
#include "util/log/Logger.hpp"
#include <boost/algorithm/string.hpp>
#include <boost/algorithm/string/predicate.hpp>
#include <memory>
#include <stdexcept>
#include <string>
namespace data {
@@ -55,10 +61,8 @@ make_Backend(util::Config const& config)
throw std::runtime_error("Invalid database type");
auto const rng = backend->hardFetchLedgerRangeNoThrow();
if (rng) {
backend->updateRange(rng->minSequence);
backend->updateRange(rng->maxSequence);
}
if (rng)
backend->setRange(rng->minSequence, rng->maxSequence);
LOG(log.info()) << "Constructed BackendInterface Successfully";
return backend;

View File

@@ -17,17 +17,43 @@
*/
//==============================================================================
#include <data/BackendInterface.h>
#include <util/log/Logger.h>
#include "data/BackendInterface.hpp"
#include "data/Types.hpp"
#include "util/Assert.hpp"
#include "util/log/Logger.hpp"
#include <boost/asio/spawn.hpp>
#include <ripple/basics/base_uint.h>
#include <ripple/basics/strHex.h>
#include <ripple/protocol/Fees.h>
#include <ripple/protocol/Indexes.h>
#include <ripple/protocol/SField.h>
#include <ripple/protocol/STLedgerEntry.h>
#include <ripple/protocol/Serializer.h>
#include <chrono>
#include <cstddef>
#include <cstdint>
#include <mutex>
#include <optional>
#include <shared_mutex>
#include <sstream>
#include <string>
#include <utility>
#include <vector>
// local to compilation unit loggers
namespace {
util::Logger gLog{"Backend"};
} // namespace
/**
* @brief This namespace implements the data access layer and related components.
*
* The data layer is responsible for fetching and storing data from the database.
* Cassandra and ScyllaDB are currently supported via the `CassandraBackend` implementation.
*/
namespace data {
bool
BackendInterface::finishWrites(std::uint32_t const ledgerSequence)
@@ -43,7 +69,7 @@ BackendInterface::finishWrites(std::uint32_t const ledgerSequence)
void
BackendInterface::writeLedgerObject(std::string&& key, std::uint32_t const seq, std::string&& blob)
{
assert(key.size() == sizeof(ripple::uint256));
ASSERT(key.size() == sizeof(ripple::uint256), "Key must be 256 bits");
doWriteLedgerObject(std::move(key), seq, std::move(blob));
}
@@ -109,6 +135,7 @@ BackendInterface::fetchLedgerObjects(
return results;
}
// Fetches the successor to key/index
std::optional<ripple::uint256>
BackendInterface::fetchSuccessorKey(
@@ -155,7 +182,7 @@ BackendInterface::fetchBookOffers(
// TODO try to speed this up. This can take a few seconds. The goal is
// to get it down to a few hundred milliseconds.
BookOffersPage page;
const ripple::uint256 bookEnd = ripple::getQualityNext(book);
ripple::uint256 const bookEnd = ripple::getQualityNext(book);
ripple::uint256 uTipIndex = book;
std::vector<ripple::uint256> keys;
auto getMillis = [](auto diff) { return std::chrono::duration_cast<std::chrono::milliseconds>(diff).count(); };
@@ -178,7 +205,8 @@ BackendInterface::fetchBookOffers(
while (keys.size() < limit) {
++numPages;
ripple::STLedgerEntry const sle{
ripple::SerialIter{offerDir->blob.data(), offerDir->blob.size()}, offerDir->key};
ripple::SerialIter{offerDir->blob.data(), offerDir->blob.size()}, offerDir->key
};
auto indexes = sle.getFieldV256(ripple::sfIndexes);
keys.insert(keys.end(), indexes.begin(), indexes.end());
auto next = sle.getFieldU64(ripple::sfIndexNext);
@@ -188,7 +216,7 @@ BackendInterface::fetchBookOffers(
}
auto nextKey = ripple::keylet::page(uTipIndex, next);
auto nextDir = fetchLedgerObject(nextKey.key, ledgerSequence, yield);
assert(nextDir);
ASSERT(nextDir.has_value(), "Next dir must exist");
offerDir->blob = *nextDir;
offerDir->key = nextKey.key;
}
@@ -200,7 +228,7 @@ BackendInterface::fetchBookOffers(
for (size_t i = 0; i < keys.size() && i < limit; ++i) {
LOG(gLog.trace()) << "Key = " << ripple::strHex(keys[i]) << " blob = " << ripple::strHex(objs[i])
<< " ledgerSequence = " << ledgerSequence;
assert(!objs[i].empty());
ASSERT(!objs[i].empty(), "Ledger object can't be empty");
page.offers.push_back({keys[i], objs[i]});
}
auto end = std::chrono::system_clock::now();
@@ -234,7 +262,14 @@ void
BackendInterface::updateRange(uint32_t newMax)
{
std::scoped_lock const lck(rngMtx_);
assert(!range || newMax >= range->maxSequence);
ASSERT(
!range || newMax >= range->maxSequence,
"Range shouldn't exist yet or newMax should be greater. newMax = {}, range->maxSequence = {}",
newMax,
range->maxSequence
);
if (!range) {
range = {newMax, newMax};
} else {
@@ -242,6 +277,19 @@ BackendInterface::updateRange(uint32_t newMax)
}
}
void
BackendInterface::setRange(uint32_t min, uint32_t max, bool force)
{
std::scoped_lock const lck(rngMtx_);
if (!force) {
ASSERT(min <= max, "Range min must be less than or equal to max");
ASSERT(not range.has_value(), "Range was already set");
}
range = {min, max};
}
LedgerPage
BackendInterface::fetchLedgerPage(
std::optional<ripple::uint256> const& cursor,

View File

@@ -19,19 +19,32 @@
#pragma once
#include <data/DBHelpers.h>
#include <data/LedgerCache.h>
#include <data/Types.h>
#include <util/config/Config.h>
#include <util/log/Logger.h>
#include "data/DBHelpers.hpp"
#include "data/LedgerCache.hpp"
#include "data/Types.hpp"
#include "util/log/Logger.hpp"
#include <ripple/protocol/Fees.h>
#include <ripple/protocol/LedgerHeader.h>
#include <boost/asio/executor_work_guard.hpp>
#include <boost/asio/io_context.hpp>
#include <boost/asio/spawn.hpp>
#include <boost/json.hpp>
#include <boost/json/object.hpp>
#include <boost/utility/result_of.hpp>
#include <ripple/basics/base_uint.h>
#include <ripple/protocol/AccountID.h>
#include <ripple/protocol/Fees.h>
#include <ripple/protocol/LedgerHeader.h>
#include <chrono>
#include <cstddef>
#include <cstdint>
#include <exception>
#include <optional>
#include <shared_mutex>
#include <string>
#include <thread>
#include <type_traits>
#include <vector>
namespace data {
@@ -40,6 +53,9 @@ namespace data {
*/
class DatabaseTimeout : public std::exception {
public:
/**
* @return The error message as a C string
*/
char const*
what() const throw() override
{
@@ -54,9 +70,9 @@ static constexpr std::size_t DEFAULT_WAIT_BETWEEN_RETRY = 500;
* @tparam FnType The type of function object to execute
* @param func The function object to execute
* @param waitMs Delay between retry attempts
* @return auto The same as the return type of func
* @return The same as the return type of func
*/
template <class FnType>
template <typename FnType>
auto
retryOnTimeout(FnType func, size_t waitMs = DEFAULT_WAIT_BETWEEN_RETRY)
{
@@ -77,16 +93,16 @@ retryOnTimeout(FnType func, size_t waitMs = DEFAULT_WAIT_BETWEEN_RETRY)
*
* @tparam FnType The type of function object to execute
* @param func The function object to execute
* @return auto The same as the return type of func
* @return The same as the return type of func
*/
template <class FnType>
template <typename FnType>
auto
synchronous(FnType&& func)
{
boost::asio::io_context ctx;
using R = typename boost::result_of<FnType(boost::asio::yield_context)>::type;
if constexpr (!std::is_same<R, void>::value) {
if constexpr (!std::is_same_v<R, void>) {
R res;
boost::asio::spawn(ctx, [_ = boost::asio::make_work_guard(ctx), &func, &res](auto yield) {
res = func(yield);
@@ -105,9 +121,9 @@ synchronous(FnType&& func)
*
* @tparam FnType The type of function object to execute
* @param func The function object to execute
* @return auto The same as the return type of func
* @return The same as the return type of func
*/
template <class FnType>
template <typename FnType>
auto
synchronousAndRetryOnTimeout(FnType&& func)
{
@@ -191,12 +207,22 @@ public:
void
updateRange(uint32_t newMax);
/**
* @brief Sets the range of sequences that are stored in the DB.
*
* @param min The new minimum sequence available
* @param max The new maximum sequence available
* @param force If set to true, the range will be set even if it's already set
*/
void
setRange(uint32_t min, uint32_t max, bool force = false);
/**
* @brief Fetch the fees from a specific ledger sequence.
*
* @param seq The sequence to fetch for
* @param yield The coroutine context
* @return ripple::Fees if fees are found; nullopt otherwise
* @return Fees if fees are found; nullopt otherwise
*/
std::optional<ripple::Fees>
fetchFees(std::uint32_t seq, boost::asio::yield_context yield) const;
@@ -295,12 +321,11 @@ public:
*
* @param issuer AccountID of issuer you wish you query.
* @param taxon Optional taxon of NFTs by which you wish to filter.
* @param ledgerSequence The ledger sequence to fetch for
* @param limit Paging limit.
* @param cursorIn Optional cursor to allow us to pick up from where we
* last left off.
* @param cursorIn Optional cursor to allow us to pick up from where we last left off.
* @param yield Currently executing coroutine.
* @return std::vector<NFT> of NFTs issued by this account, or
* this issuer/taxon combination if taxon is passed and an optional marker
* @return NFTs issued by this account, or this issuer/taxon combination if taxon is passed and an optional marker
*/
virtual NFTsAndCursor
fetchNFTsByIssuer(
@@ -465,6 +490,7 @@ public:
/**
* @brief Fetches the ledger range from DB.
*
* @param yield The coroutine context
* @return The ledger range if available; nullopt otherwise
*/
virtual std::optional<LedgerRange>
@@ -521,7 +547,7 @@ public:
* @param data A vector of NFTsData objects representing the NFTs
*/
virtual void
writeNFTs(std::vector<NFTsData>&& data) = 0;
writeNFTs(std::vector<NFTsData> const& data) = 0;
/**
* @brief Write a new set of account transactions.
@@ -529,7 +555,7 @@ public:
* @param data A vector of AccountTransactionsData objects representing the account transactions
*/
virtual void
writeAccountTransactions(std::vector<AccountTransactionsData>&& data) = 0;
writeAccountTransactions(std::vector<AccountTransactionsData> data) = 0;
/**
* @brief Write NFTs transactions.
@@ -537,7 +563,7 @@ public:
* @param data A vector of NFTTransactionsData objects
*/
virtual void
writeNFTTransactions(std::vector<NFTTransactionsData>&& data) = 0;
writeNFTTransactions(std::vector<NFTTransactionsData> const& data) = 0;
/**
* @brief Write a new successor.
@@ -575,15 +601,27 @@ public:
isTooBusy() const = 0;
/**
* @return json object containing backend usage statistics
* @return A JSON object containing backend usage statistics
*/
virtual boost::json::object
stats() const = 0;
private:
/**
* @brief Writes a ledger object to the database
*
* @param key The key of the object
* @param seq The sequence of the ledger
* @param blob The data
*/
virtual void
doWriteLedgerObject(std::string&& key, std::uint32_t seq, std::string&& blob) = 0;
/**
* @brief The implementation should wait for all pending writes to finish
*
* @return true on success; false otherwise
*/
virtual bool
doFinishWrites() = 0;
};

View File

@@ -19,19 +19,42 @@
#pragma once
#include <data/BackendInterface.h>
#include <data/cassandra/Concepts.h>
#include <data/cassandra/Handle.h>
#include <data/cassandra/Schema.h>
#include <data/cassandra/SettingsProvider.h>
#include <data/cassandra/impl/ExecutionStrategy.h>
#include <util/LedgerUtils.h>
#include <util/Profiler.h>
#include <util/log/Logger.h>
#include "data/BackendInterface.hpp"
#include "data/DBHelpers.hpp"
#include "data/Types.hpp"
#include "data/cassandra/Concepts.hpp"
#include "data/cassandra/Handle.hpp"
#include "data/cassandra/Schema.hpp"
#include "data/cassandra/SettingsProvider.hpp"
#include "data/cassandra/Types.hpp"
#include "data/cassandra/impl/ExecutionStrategy.hpp"
#include "util/Assert.hpp"
#include "util/LedgerUtils.hpp"
#include "util/Profiler.hpp"
#include "util/log/Logger.hpp"
#include <boost/asio/spawn.hpp>
#include <boost/json/object.hpp>
#include <cassandra.h>
#include <ripple/basics/Blob.h>
#include <ripple/basics/base_uint.h>
#include <ripple/basics/strHex.h>
#include <ripple/protocol/AccountID.h>
#include <ripple/protocol/LedgerHeader.h>
#include <ripple/protocol/nft.h>
#include <boost/asio/spawn.hpp>
#include <atomic>
#include <chrono>
#include <cstddef>
#include <cstdint>
#include <iterator>
#include <limits>
#include <optional>
#include <stdexcept>
#include <string>
#include <tuple>
#include <utility>
#include <vector>
namespace data::cassandra {
@@ -493,41 +516,39 @@ public:
if (nftIDs.size() == limit)
ret.cursor = nftIDs.back();
auto const nftQueryStatement = schema_->selectNFTBulk.bind(nftIDs);
nftQueryStatement.bindAt(1, ledgerSequence);
std::vector<Statement> selectNFTStatements;
selectNFTStatements.reserve(nftIDs.size());
// Fetch all the NFT data, meanwhile filtering out the NFTs that are not within the ledger range
auto const nftRes = executor_.read(yield, nftQueryStatement);
auto const& nftQueryResults = nftRes.value();
std::transform(
std::cbegin(nftIDs),
std::cend(nftIDs),
std::back_inserter(selectNFTStatements),
[&](auto const& nftID) { return schema_->selectNFT.bind(nftID, ledgerSequence); }
);
if (not nftQueryResults.hasRows()) {
LOG(log_.debug()) << "No rows returned";
return {};
auto const nftInfos = executor_.readEach(yield, selectNFTStatements);
std::vector<Statement> selectNFTURIStatements;
selectNFTURIStatements.reserve(nftIDs.size());
std::transform(
std::cbegin(nftIDs),
std::cend(nftIDs),
std::back_inserter(selectNFTURIStatements),
[&](auto const& nftID) { return schema_->selectNFTURI.bind(nftID, ledgerSequence); }
);
auto const nftUris = executor_.readEach(yield, selectNFTURIStatements);
for (auto i = 0u; i < nftIDs.size(); i++) {
if (auto const maybeRow = nftInfos[i].template get<uint32_t, ripple::AccountID, bool>(); maybeRow) {
auto [seq, owner, isBurned] = *maybeRow;
NFT nft(nftIDs[i], seq, owner, isBurned);
if (auto const maybeUri = nftUris[i].template get<ripple::Blob>(); maybeUri)
nft.uri = *maybeUri;
ret.nfts.push_back(nft);
}
}
auto const nftURIQueryStatement = schema_->selectNFTURIBulk.bind(nftIDs);
nftURIQueryStatement.bindAt(1, ledgerSequence);
// Get the URI for each NFT, but it's possible that URI doesn't exist
auto const uriRes = executor_.read(yield, nftURIQueryStatement);
auto const& nftURIQueryResults = uriRes.value();
std::unordered_map<std::string, Blob> nftURIMap;
for (auto const [nftID, uri] : extract<ripple::uint256, Blob>(nftURIQueryResults))
nftURIMap.insert({ripple::strHex(nftID), uri});
for (auto const [nftID, seq, owner, isBurned] :
extract<ripple::uint256, std::uint32_t, ripple::AccountID, bool>(nftQueryResults)) {
NFT nft;
nft.tokenID = nftID;
nft.ledgerSequence = seq;
nft.owner = owner;
nft.isBurned = isBurned;
if (nftURIMap.contains(ripple::strHex(nft.tokenID)))
nft.uri = nftURIMap.at(ripple::strHex(nft.tokenID));
ret.nfts.push_back(nft);
}
return ret;
}
@@ -622,7 +643,7 @@ public:
);
});
assert(numHashes == results.size());
ASSERT(numHashes == results.size(), "Number of hashes and results must match");
LOG(log_.debug()) << "Fetched " << numHashes << " transactions from Cassandra in " << timeDiff
<< " milliseconds";
return results;
@@ -735,14 +756,14 @@ public:
{
LOG(log_.trace()) << "Writing successor. key = " << key.size() << " bytes. "
<< " seq = " << std::to_string(seq) << " successor = " << successor.size() << " bytes.";
assert(!key.empty());
assert(!successor.empty());
ASSERT(!key.empty(), "Key must not be empty");
ASSERT(!successor.empty(), "Successor must not be empty");
executor_.write(schema_->insertSuccessor, std::move(key), seq, std::move(successor));
}
void
writeAccountTransactions(std::vector<AccountTransactionsData>&& data) override
writeAccountTransactions(std::vector<AccountTransactionsData> data) override
{
std::vector<Statement> statements;
statements.reserve(data.size() * 10); // assume 10 transactions avg
@@ -766,7 +787,7 @@ public:
}
void
writeNFTTransactions(std::vector<NFTTransactionsData>&& data) override
writeNFTTransactions(std::vector<NFTTransactionsData> const& data) override
{
std::vector<Statement> statements;
statements.reserve(data.size());
@@ -798,7 +819,7 @@ public:
}
void
writeNFTs(std::vector<NFTsData>&& data) override
writeNFTs(std::vector<NFTsData> const& data) override
{
std::vector<Statement> statements;
statements.reserve(data.size() * 3);
@@ -873,6 +894,6 @@ private:
}
};
using CassandraBackend = BasicCassandraBackend<SettingsProvider, detail::DefaultExecutionStrategy<>>;
using CassandraBackend = BasicCassandraBackend<SettingsProvider, impl::DefaultExecutionStrategy<>>;
} // namespace data::cassandra

View File

@@ -20,15 +20,24 @@
/** @file */
#pragma once
#include <ripple/basics/Log.h>
#include <ripple/basics/StringUtilities.h>
#include <ripple/protocol/SField.h>
#include <ripple/protocol/STAccount.h>
#include <ripple/protocol/TxMeta.h>
#include "util/Assert.hpp"
#include <boost/container/flat_set.hpp>
#include <ripple/basics/Blob.h>
#include <ripple/basics/Log.h>
#include <ripple/basics/StringUtilities.h>
#include <ripple/basics/base_uint.h>
#include <ripple/protocol/AccountID.h>
#include <ripple/protocol/SField.h>
#include <ripple/protocol/STAccount.h>
#include <ripple/protocol/STLedgerEntry.h>
#include <ripple/protocol/Serializer.h>
#include <ripple/protocol/TxMeta.h>
#include <data/Types.h>
#include <cstddef>
#include <cstdint>
#include <optional>
#include <string>
/**
* @brief Struct used to keep track of what to write to account_transactions/account_tx tables.
@@ -39,6 +48,12 @@ struct AccountTransactionsData {
std::uint32_t transactionIndex{};
ripple::uint256 txHash;
/**
* @brief Construct a new AccountTransactionsData object
*
* @param meta The transaction metadata
* @param txHash The transaction hash
*/
AccountTransactionsData(ripple::TxMeta& meta, ripple::uint256 const& txHash)
: accounts(meta.getAffectedAccounts())
, ledgerSequence(meta.getLgrSeq())
@@ -61,6 +76,13 @@ struct NFTTransactionsData {
std::uint32_t transactionIndex;
ripple::uint256 txHash;
/**
* @brief Construct a new NFTTransactionsData object
*
* @param tokenID The token ID
* @param meta The transaction metadata
* @param txHash The transaction hash
*/
NFTTransactionsData(ripple::uint256 const& tokenID, ripple::TxMeta const& meta, ripple::uint256 const& txHash)
: tokenID(tokenID), ledgerSequence(meta.getLgrSeq()), transactionIndex(meta.getIndex()), txHash(txHash)
{
@@ -71,35 +93,33 @@ struct NFTTransactionsData {
* @brief Represents an NFT state at a particular ledger.
*
* Gets written to nf_tokens table and the like.
*
* The transaction index is only stored because we want to store only the final state of an NFT per ledger.
* Since we pull this from transactions we keep track of which tx index created this so we can de-duplicate, as it is
* possible for one ledger to have multiple txs that change the state of the same NFT.
*
* We only set the uri if this is a mint tx, or if we are loading initial state from NFTokenPage objects.
*/
struct NFTsData {
ripple::uint256 tokenID;
std::uint32_t ledgerSequence;
// The transaction index is only stored because we want to store only the
// final state of an NFT per ledger. Since we pull this from transactions
// we keep track of which tx index created this so we can de-duplicate, as
// it is possible for one ledger to have multiple txs that change the
// state of the same NFT. This field is not applicable when we are loading
// initial NFT state via ledger objects, since we do not have to tiebreak
// NFT state for a given ledger in that case.
std::optional<std::uint32_t> transactionIndex;
ripple::AccountID owner;
// We only set the uri if this is a mint tx, or if we are
// loading initial state from NFTokenPage objects. In other words,
// uri should only be set if the etl process believes this NFT hasn't
// been seen before in our local database. We do this so that we don't
// write to the the nf_token_uris table every
// time the same NFT changes hands. We also can infer if there is a URI
// that we need to write to the issuer_nf_tokens table.
std::optional<ripple::Blob> uri;
bool isBurned = false;
// This constructor is used when parsing an NFTokenMint tx.
// Unfortunately because of the extreme edge case of being able to
// re-mint an NFT with the same ID, we must explicitly record a null
// URI. For this reason, we _always_ write this field as a result of
// this tx.
/**
* @brief Construct a new NFTsData object
*
* @note This constructor is used when parsing an NFTokenMint tx
* Unfortunately because of the extreme edge case of being able to re-mint an NFT with the same ID, we must
* explicitly record a null URI. For this reason, we _always_ write this field as a result of this tx.
*
* @param tokenID The token ID
* @param owner The owner
* @param uri The URI
* @param meta The transaction metadata
*/
NFTsData(
ripple::uint256 const& tokenID,
ripple::AccountID const& owner,
@@ -110,8 +130,16 @@ struct NFTsData {
{
}
// This constructor is used when parsing an NFTokenBurn or
// NFTokenAcceptOffer tx
/**
* @brief Construct a new NFTsData object
*
* @note This constructor is used when parsing an NFTokenBurn or NFTokenAcceptOffer tx
*
* @param tokenID The token ID
* @param owner The owner
* @param meta The transaction metadata
* @param isBurned Whether the NFT is burned
*/
NFTsData(ripple::uint256 const& tokenID, ripple::AccountID const& owner, ripple::TxMeta const& meta, bool isBurned)
: tokenID(tokenID)
, ledgerSequence(meta.getLgrSeq())
@@ -121,12 +149,18 @@ struct NFTsData {
{
}
// This constructor is used when parsing an NFTokenPage directly from
// ledger state.
// Unfortunately because of the extreme edge case of being able to
// re-mint an NFT with the same ID, we must explicitly record a null
// URI. For this reason, we _always_ write this field as a result of
// this tx.
/**
* @brief Construct a new NFTsData object
*
* @note This constructor is used when parsing an NFTokenPage directly from ledger state.
* Unfortunately because of the extreme edge case of being able to re-mint an NFT with the same ID, we must
* explicitly record a null URI. For this reason, we _always_ write this field as a result of this tx.
*
* @param tokenID The token ID
* @param ledgerSequence The ledger sequence
* @param owner The owner
* @param uri The URI
*/
NFTsData(
ripple::uint256 const& tokenID,
std::uint32_t const ledgerSequence,
@@ -144,7 +178,7 @@ struct NFTsData {
* @param object The object to check
* @return true if the object is an offer; false otherwise
*/
template <class T>
template <typename T>
inline bool
isOffer(T const& object)
{
@@ -161,7 +195,7 @@ isOffer(T const& object)
* @param object The object to check
* @return true if the object is an offer; false otherwise
*/
template <class T>
template <typename T>
inline bool
isOfferHex(T const& object)
{
@@ -177,7 +211,7 @@ isOfferHex(T const& object)
* @param object The object to check
* @return true if the object is a dir node; false otherwise
*/
template <class T>
template <typename T>
inline bool
isDirNode(T const& object)
{
@@ -193,7 +227,7 @@ isDirNode(T const& object)
* @param object The object to check
* @return true if the object is a book dir; false otherwise
*/
template <class T, class R>
template <typename T, typename R>
inline bool
isBookDir(T const& key, R const& object)
{
@@ -210,7 +244,7 @@ isBookDir(T const& key, R const& object)
* @param offer The offer to get the book for
* @return Book as ripple::uint256
*/
template <class T>
template <typename T>
inline ripple::uint256
getBook(T const& offer)
{
@@ -227,13 +261,13 @@ getBook(T const& offer)
* @param key The key to get the book base out of
* @return Book base as ripple::uint256
*/
template <class T>
template <typename T>
inline ripple::uint256
getBookBase(T const& key)
{
static constexpr size_t KEY_SIZE = 24;
assert(key.size() == ripple::uint256::size());
ASSERT(key.size() == ripple::uint256::size(), "Invalid key size {}", key.size());
ripple::uint256 ret;
for (size_t i = 0; i < KEY_SIZE; ++i)

View File

@@ -17,7 +17,19 @@
*/
//==============================================================================
#include <data/LedgerCache.h>
#include "data/LedgerCache.hpp"
#include "data/Types.hpp"
#include "util/Assert.hpp"
#include <ripple/basics/base_uint.h>
#include <cstddef>
#include <cstdint>
#include <mutex>
#include <optional>
#include <shared_mutex>
#include <vector>
namespace data {
@@ -28,6 +40,16 @@ LedgerCache::latestLedgerSequence() const
return latestSeq_;
}
void
LedgerCache::waitUntilCacheContainsSeq(uint32_t seq)
{
if (disabled_)
return;
std::unique_lock lock(mtx_);
cv_.wait(lock, [this, seq] { return latestSeq_ >= seq; });
return;
}
void
LedgerCache::update(std::vector<LedgerObject> const& objs, uint32_t seq, bool isBackground)
{
@@ -37,7 +59,12 @@ LedgerCache::update(std::vector<LedgerObject> const& objs, uint32_t seq, bool is
{
std::scoped_lock const lck{mtx_};
if (seq > latestSeq_) {
assert(seq == latestSeq_ + 1 || latestSeq_ == 0);
ASSERT(
seq == latestSeq_ + 1 || latestSeq_ == 0,
"New sequense must be either next or first. seq = {}, latestSeq_ = {}",
seq,
latestSeq_
);
latestSeq_ = seq;
}
for (auto const& obj : objs) {
@@ -55,6 +82,7 @@ LedgerCache::update(std::vector<LedgerObject> const& objs, uint32_t seq, bool is
deletes_.insert(obj.key);
}
}
cv_.notify_all();
}
}

View File

@@ -19,14 +19,23 @@
#pragma once
#include "data/Types.hpp"
#include "util/prometheus/Counter.hpp"
#include "util/prometheus/Label.hpp"
#include "util/prometheus/Prometheus.hpp"
#include <ripple/basics/base_uint.h>
#include <ripple/basics/hardened_hash.h>
#include <data/Types.h>
#include <atomic>
#include <condition_variable>
#include <cstddef>
#include <cstdint>
#include <functional>
#include <map>
#include <mutex>
#include <optional>
#include <shared_mutex>
#include <util/prometheus/Prometheus.h>
#include <utility>
#include <unordered_set>
#include <vector>
namespace data {
@@ -65,6 +74,7 @@ class LedgerCache {
std::map<ripple::uint256, CacheEntry> map_;
mutable std::shared_mutex mtx_;
std::condition_variable_any cv_;
uint32_t latestSeq_ = 0;
std::atomic_bool full_ = false;
std::atomic_bool disabled_ = false;
@@ -76,7 +86,7 @@ public:
/**
* @brief Update the cache with new ledger objects.
*
* @param blobs The ledger objects to update cache with
* @param objs The ledger objects to update cache with
* @param seq The sequence to update cache for
* @param isBackground Should be set to true when writing old data from a background thread
*/
@@ -162,6 +172,14 @@ public:
*/
float
getSuccessorHitRate() const;
/**
* @brief Waits until the cache contains a specific sequence.
*
* @param seq The sequence to wait for
*/
void
waitUntilCacheContainsSeq(uint32_t seq);
};
} // namespace data

View File

@@ -26,7 +26,7 @@ In Cassandra, Clio will be creating 9 tables to store the ledger data, they are
*Note, if you would like visually explore the data structure of the Cassandra database, you can first run Clio server with database `type` configured as `cassandra` to fill ledger data from Rippled nodes into Cassandra, then use a GUI database management tool like [Datastax's Opcenter](https://docs.datastax.com/en/install/6.0/install/opscInstallOpsc.html) to interactively view it.*
### `ledger_transactions`
### ledger_transactions
```
CREATE TABLE clio.ledger_transactions (
ledger_sequence bigint, # The sequence number of the ledger version
@@ -36,7 +36,7 @@ CREATE TABLE clio.ledger_transactions (
```
This table stores the hashes of all transactions in a given ledger sequence ordered by the hash value in ascending order.
### `transactions`
### transactions
```
CREATE TABLE clio.transactions (
hash blob PRIMARY KEY, # The transaction hash
@@ -50,7 +50,7 @@ This table stores the full transaction and metadata of each ledger version with
To look up all the transactions that were validated in a ledger version with sequence `n`, one can first get the all the transaction hashes in that ledger version by querying `SELECT * FROM ledger_transactions WHERE ledger_sequence = n;`. Then, iterate through the list of hashes and query `SELECT * FROM transactions WHERE hash = one_of_the_hash_from_the_list;` to get the detailed transaction data.
### `ledger_hashes`
### ledger_hashes
```
CREATE TABLE clio.ledger_hashes (
hash blob PRIMARY KEY, # Hash of entire ledger version's data
@@ -58,7 +58,7 @@ CREATE TABLE clio.ledger_hashes (
) ...
```
This table stores the hash of all ledger versions by their sequences.
### `ledger_range`
### ledger_range
```
CREATE TABLE clio.ledger_range (
is_latest boolean PRIMARY KEY, # Whether this sequence is the stopping range
@@ -67,7 +67,7 @@ CREATE TABLE clio.ledger_range (
```
This table marks the range of ledger versions that is stored on this specific Cassandra node. Because of its nature, there are only two records in this table with `false` and `true` values for `is_latest`, marking the starting and ending sequence of the ledger range.
### `objects`
### objects
```
CREATE TABLE clio.objects (
key blob, # Object index of the object
@@ -80,7 +80,7 @@ This table stores the specific data of all objects that ever existed on the XRP
This table is updated when all data for a given ledger sequence has been written to the various tables in the database. For each ledger, many associated records are written to different tables. This table is used as a synchronization mechanism, to prevent the application from reading data from a ledger for which all data has not yet been fully written.
### `ledgers`
### ledgers
```
CREATE TABLE clio.ledgers (
sequence bigint PRIMARY KEY, # Sequence of the ledger version
@@ -89,7 +89,7 @@ CREATE TABLE clio.ledgers (
```
This table stores the ledger header data of specific ledger versions by their sequence.
### `diff`
### diff
```
CREATE TABLE clio.diff (
seq bigint, # Sequence of the ledger version
@@ -99,7 +99,7 @@ CREATE TABLE clio.diff (
```
This table stores the object index of all the changes in each ledger version.
### `account_tx`
### account_tx
```
CREATE TABLE clio.account_tx (
account blob,
@@ -110,8 +110,7 @@ CREATE TABLE clio.account_tx (
```
This table stores the list of transactions affecting a given account. This includes transactions made by the account, as well as transactions received.
### `successor`
### successor
```
CREATE TABLE clio.successor (
key blob, # Object index
@@ -142,7 +141,7 @@ ledger. Because of this tradeoff, clio implements a special NFT indexing data
structure that allows clio users to query NFTs quickly, while keeping
rippled's space-saving optimizations.
#### `nf_tokens`
#### nf_tokens
```
CREATE TABLE clio.nf_tokens (
token_id blob, # The NFT's ID
@@ -166,7 +165,7 @@ use the `nft_history` API, which will give you the NFTokenBurn transaction
that burned this token, along with the account that submitted that
transaction.
#### `issuer_nf_tokens_v2`
#### issuer_nf_tokens_v2
```
CREATE TABLE clio.issuer_nf_tokens_v2 (
issuer blob, # The NFT issuer's account ID
@@ -181,7 +180,7 @@ issued, or all the NFTs a specific account issued with a specific taxon. It is
not useful to know all the NFTs with a given taxon while excluding issuer, since the
meaning of a taxon is left to an issuer.
#### `nf_token_uris`
#### nf_token_uris
```
CREATE TABLE clio.nf_token_uris (
token_id blob, # The NFT's ID
@@ -206,7 +205,7 @@ version to rippled, but just in case we can handle that edge case by allowing
a given NFT ID to have a new URI assigned in this case, without removing the
prior URI.
#### `nf_token_transactions`
#### nf_token_transactions
```
CREATE TABLE clio.nf_token_transactions (
token_id blob, # The NFT's ID

View File

@@ -22,8 +22,10 @@
#include <ripple/basics/base_uint.h>
#include <ripple/protocol/AccountID.h>
#include <cstdint>
#include <optional>
#include <string>
#include <string_view>
#include <tuple>
#include <utility>
#include <vector>
@@ -39,10 +41,7 @@ struct LedgerObject {
Blob blob;
bool
operator==(LedgerObject const& other) const
{
return key == other.key && blob == other.blob;
}
operator==(LedgerObject const& other) const = default;
};
/**
@@ -71,11 +70,25 @@ struct TransactionAndMetadata {
std::uint32_t date = 0;
TransactionAndMetadata() = default;
/**
* @brief Construct a new Transaction And Metadata object
*
* @param transaction The transaction
* @param metadata The metadata
* @param ledgerSequence The ledger sequence
* @param date The date
*/
TransactionAndMetadata(Blob transaction, Blob metadata, std::uint32_t ledgerSequence, std::uint32_t date)
: transaction{std::move(transaction)}, metadata{std::move(metadata)}, ledgerSequence{ledgerSequence}, date{date}
{
}
/**
* @brief Construct a new Transaction And Metadata object
*
* @param data The data to construct from
*/
TransactionAndMetadata(std::tuple<Blob, Blob, std::uint32_t, std::uint32_t> data)
: transaction{std::get<0>(data)}
, metadata{std::get<1>(data)}
@@ -84,6 +97,12 @@ struct TransactionAndMetadata {
{
}
/**
* @brief Check if the transaction and metadata are the same as another
*
* @param other The other transaction and metadata
* @return true if they are the same; false otherwise
*/
bool
operator==(TransactionAndMetadata const& other) const
{
@@ -100,11 +119,23 @@ struct TransactionsCursor {
std::uint32_t transactionIndex = 0;
TransactionsCursor() = default;
/**
* @brief Construct a new Transactions Cursor object
*
* @param ledgerSequence The ledger sequence
* @param transactionIndex The transaction index
*/
TransactionsCursor(std::uint32_t ledgerSequence, std::uint32_t transactionIndex)
: ledgerSequence{ledgerSequence}, transactionIndex{transactionIndex}
{
}
/**
* @brief Construct a new Transactions Cursor object
*
* @param data The data to construct from
*/
TransactionsCursor(std::tuple<std::uint32_t, std::uint32_t> data)
: ledgerSequence{std::get<0>(data)}, transactionIndex{std::get<1>(data)}
{
@@ -113,6 +144,11 @@ struct TransactionsCursor {
bool
operator==(TransactionsCursor const& other) const = default;
/**
* @brief Convert the cursor to a tuple of seq and index
*
* @return The cursor as a tuple
*/
[[nodiscard]] std::tuple<std::uint32_t, std::uint32_t>
asTuple() const
{
@@ -139,6 +175,16 @@ struct NFT {
bool isBurned{};
NFT() = default;
/**
* @brief Construct a new NFT object
*
* @param tokenID The token ID
* @param ledgerSequence The ledger sequence
* @param owner The owner
* @param uri The URI
* @param isBurned Whether the token is burned
*/
NFT(ripple::uint256 const& tokenID,
std::uint32_t ledgerSequence,
ripple::AccountID const& owner,
@@ -148,13 +194,28 @@ struct NFT {
{
}
/**
* @brief Construct a new NFT object
*
* @param tokenID The token ID
* @param ledgerSequence The ledger sequence
* @param owner The owner
* @param isBurned Whether the token is burned
*/
NFT(ripple::uint256 const& tokenID, std::uint32_t ledgerSequence, ripple::AccountID const& owner, bool isBurned)
: NFT(tokenID, ledgerSequence, owner, {}, isBurned)
{
}
// clearly two tokens are the same if they have the same ID, but this struct stores the state of a given token at a
// given ledger sequence, so we also need to compare with ledgerSequence.
/**
* @brief Check if the NFT is the same as another
*
* Clearly two tokens are the same if they have the same ID, but this struct stores the state of a given
* token at a given ledger sequence, so we also need to compare with ledgerSequence.
*
* @param other The other NFT
* @return true if they are the same; false otherwise
*/
bool
operator==(NFT const& other) const
{
@@ -162,6 +223,9 @@ struct NFT {
}
};
/**
* @brief Represents a bundle of NFTs with a cursor to the next page
*/
struct NFTsAndCursor {
std::vector<NFT> nfts;
std::optional<ripple::uint256> cursor;

View File

@@ -19,15 +19,20 @@
#pragma once
#include <data/cassandra/Types.h>
#include "data/cassandra/Types.hpp"
#include <boost/asio/io_context.hpp>
#include <boost/asio/spawn.hpp>
#include <boost/json.hpp>
#include <boost/json/object.hpp>
#include <chrono>
#include <concepts>
#include <cstdint>
#include <optional>
#include <string>
#include <utility>
#include <vector>
namespace data::cassandra {
@@ -118,9 +123,6 @@ concept SomeRetryPolicy = requires(T a, boost::asio::io_context ioc, CassandraEr
{
a.retry([]() {})
} -> std::same_as<void>;
{
a.calculateDelay(attempt)
} -> std::same_as<std::chrono::milliseconds>;
};
} // namespace data::cassandra

View File

@@ -21,6 +21,8 @@
#include <cassandra.h>
#include <cstdint>
#include <ostream>
#include <string>
#include <utility>
@@ -34,11 +36,20 @@ class CassandraError {
uint32_t code_{};
public:
CassandraError() = default; // default constructible required by Expected
// default constructible required by Expected
CassandraError() = default;
/**
* @brief Construct a new CassandraError object
*
* @param message The error message
* @param code The error code
*/
CassandraError(std::string message, uint32_t code) : message_{std::move(message)}, code_{code}
{
}
/** @cond */
template <typename T>
friend std::string
operator+(T const& lhs, CassandraError const& rhs)
@@ -68,6 +79,7 @@ public:
os << err.message();
return os;
}
/** @endcond */
/**
* @return The final error message as a std::string

View File

@@ -17,7 +17,18 @@
*/
//==============================================================================
#include <data/cassandra/Handle.h>
#include "data/cassandra/Handle.hpp"
#include "data/cassandra/Types.hpp"
#include <cassandra.h>
#include <functional>
#include <stdexcept>
#include <string>
#include <string_view>
#include <utility>
#include <vector>
namespace data::cassandra {
@@ -85,7 +96,7 @@ Handle::reconnect(std::string_view keyspace) const
}
std::vector<Handle::FutureType>
Handle::asyncExecuteEach(std::vector<Statement> const& statements) const
Handle::asyncExecuteEach(std::vector<StatementType> const& statements) const
{
std::vector<Handle::FutureType> futures;
futures.reserve(statements.size());
@@ -95,7 +106,7 @@ Handle::asyncExecuteEach(std::vector<Statement> const& statements) const
}
Handle::MaybeErrorType
Handle::executeEach(std::vector<Statement> const& statements) const
Handle::executeEach(std::vector<StatementType> const& statements) const
{
for (auto futures = asyncExecuteEach(statements); auto const& future : futures) {
if (auto rc = future.await(); not rc)
@@ -106,38 +117,37 @@ Handle::executeEach(std::vector<Statement> const& statements) const
}
Handle::FutureType
Handle::asyncExecute(Statement const& statement) const
Handle::asyncExecute(StatementType const& statement) const
{
return cass_session_execute(session_, statement);
}
Handle::FutureWithCallbackType
Handle::asyncExecute(Statement const& statement, std::function<void(Handle::ResultOrErrorType)>&& cb) const
Handle::asyncExecute(StatementType const& statement, std::function<void(ResultOrErrorType)>&& cb) const
{
return Handle::FutureWithCallbackType{cass_session_execute(session_, statement), std::move(cb)};
}
Handle::ResultOrErrorType
Handle::execute(Statement const& statement) const
Handle::execute(StatementType const& statement) const
{
return asyncExecute(statement).get();
}
Handle::FutureType
Handle::asyncExecute(std::vector<Statement> const& statements) const
Handle::asyncExecute(std::vector<StatementType> const& statements) const
{
return cass_session_execute_batch(session_, Batch{statements});
}
Handle::MaybeErrorType
Handle::execute(std::vector<Statement> const& statements) const
Handle::execute(std::vector<StatementType> const& statements) const
{
return asyncExecute(statements).await();
}
Handle::FutureWithCallbackType
Handle::asyncExecute(std::vector<Statement> const& statements, std::function<void(Handle::ResultOrErrorType)>&& cb)
const
Handle::asyncExecute(std::vector<StatementType> const& statements, std::function<void(ResultOrErrorType)>&& cb) const
{
return Handle::FutureWithCallbackType{cass_session_execute_batch(session_, Batch{statements}), std::move(cb)};
}

Some files were not shown because too many files have changed in this diff Show More