mirror of
https://github.com/XRPLF/rippled.git
synced 2025-11-04 19:25:51 +00:00
Compare commits
511 Commits
2.2.0-rc1
...
bthomee/pr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
91e1147cec | ||
|
|
0e7b1a29d9 | ||
|
|
1845b1c656 | ||
|
|
e192ffe964 | ||
|
|
2bf77cc8f6 | ||
|
|
5e33ca56fd | ||
|
|
7c39c810eb | ||
|
|
a7792ebcae | ||
|
|
83ee3788e1 | ||
|
|
ae719b86d3 | ||
|
|
dd722f8b3f | ||
|
|
30190a5feb | ||
|
|
37742ebd85 | ||
|
|
76593e00b6 | ||
|
|
e4a38fa11c | ||
|
|
afb6e0e41b | ||
|
|
5523557226 | ||
|
|
b64707f53b | ||
|
|
0b113f371f | ||
|
|
b4c894c1ba | ||
|
|
92281a4ede | ||
|
|
ccc0a45858 | ||
|
|
1125b25556 | ||
|
|
447dbf5e62 | ||
|
|
e80642fc12 | ||
|
|
640ce4988f | ||
|
|
a422855ea7 | ||
|
|
108f90586c | ||
|
|
519d1dbc34 | ||
|
|
4d3427e19c | ||
|
|
3d44758e5a | ||
|
|
97bc94a7f6 | ||
|
|
34619f2504 | ||
|
|
3509de9c5f | ||
|
|
459d0da010 | ||
|
|
3cf90aaf5f | ||
|
|
c267000b32 | ||
|
|
a930e82684 | ||
|
|
3b8c372075 | ||
|
|
8637d606a4 | ||
|
|
8456b8275e | ||
|
|
3c88786bb0 | ||
|
|
46ba8a28fe | ||
|
|
5ecde3cf39 | ||
|
|
620fb26823 | ||
|
|
6b6b213cf5 | ||
|
|
f61086b43c | ||
|
|
176fd2b6e4 | ||
|
|
2df730438d | ||
|
|
5d79bfc531 | ||
|
|
51ef35ab55 | ||
|
|
330a3215bc | ||
|
|
85c2ceacde | ||
|
|
70d5c624e8 | ||
|
|
8e4fda160d | ||
|
|
072b1c442c | ||
|
|
294e03ecf5 | ||
|
|
550f90a75e | ||
|
|
d67dcfe3c4 | ||
|
|
0fd2f715bb | ||
|
|
807462b191 | ||
|
|
19c4226d3d | ||
|
|
5467d3cbec | ||
|
|
d02c306f1e | ||
|
|
cfd26f444c | ||
|
|
2c3024716b | ||
|
|
a12f5de68d | ||
|
|
51c5f2bfc9 | ||
|
|
73ff54143d | ||
|
|
08b136528e | ||
|
|
6b8a589447 | ||
|
|
ffeabc9642 | ||
|
|
3cbdf818a7 | ||
|
|
c46888f8f7 | ||
|
|
2ae65d2fdb | ||
|
|
bd834c87e0 | ||
|
|
dc8b37a524 | ||
|
|
617a895af5 | ||
|
|
1af1048c58 | ||
|
|
f07ba87e51 | ||
|
|
e66558a883 | ||
|
|
510314d344 | ||
|
|
37b951859c | ||
|
|
9494fc9668 | ||
|
|
8d01f35eb9 | ||
|
|
1020a32d76 | ||
|
|
17a2606591 | ||
|
|
ccb9f1e42d | ||
|
|
3e4e9a2ddc | ||
|
|
4caebfbd0e | ||
|
|
37c377a1b6 | ||
|
|
bd182c0a3e | ||
|
|
406c26cc72 | ||
|
|
9bd1ce436a | ||
|
|
f69ad4eff6 | ||
|
|
6fe0599cc2 | ||
|
|
e6f8bc720f | ||
|
|
fbd60fc000 | ||
|
|
61d628d654 | ||
|
|
3d92375d12 | ||
|
|
cdbe70b2a7 | ||
|
|
f6426ca183 | ||
|
|
e5f7a8442d | ||
|
|
e67e0395df | ||
|
|
148f669a25 | ||
|
|
f1eaa6a264 | ||
|
|
da4c8c9550 | ||
|
|
bcde2790a4 | ||
|
|
9ebeb413e4 | ||
|
|
6d40b882a4 | ||
|
|
9fe0a154f1 | ||
|
|
cb52c9af00 | ||
|
|
6bf8338038 | ||
|
|
b0f4174e47 | ||
|
|
3865dde0b8 | ||
|
|
811c980821 | ||
|
|
cf5f65b68e | ||
|
|
c38f2a3f2e | ||
|
|
16c2ff97cc | ||
|
|
32043463a8 | ||
|
|
724e9b1313 | ||
|
|
2e6f00aef2 | ||
|
|
e0b9812fc5 | ||
|
|
e4fdf33158 | ||
|
|
6e814d7ebd | ||
|
|
1e37d00d6c | ||
|
|
87ea3ba65d | ||
|
|
dedf3d3983 | ||
|
|
2df7dcfdeb | ||
|
|
1506e65558 | ||
|
|
808c86663c | ||
|
|
92431a4238 | ||
|
|
285120684c | ||
|
|
77fef8732b | ||
|
|
7775c725f3 | ||
|
|
c61096239c | ||
|
|
c5fe970646 | ||
|
|
c57cd8b23e | ||
|
|
c14ce956ad | ||
|
|
095dc4d9cc | ||
|
|
2e255812ae | ||
|
|
896b8c3b54 | ||
|
|
58dd07bbdf | ||
|
|
b13370ac0d | ||
|
|
f847e3287c | ||
|
|
56c1e078f2 | ||
|
|
afc05659ed | ||
|
|
b04d239926 | ||
|
|
dc1caa41b2 | ||
|
|
ceb0ce5634 | ||
|
|
fb89213d4d | ||
|
|
d8628d481d | ||
|
|
a14551b151 | ||
|
|
de33a6a241 | ||
|
|
28eec6ce1b | ||
|
|
9c9362ae5a | ||
|
|
c9a723128a | ||
|
|
da82e52613 | ||
|
|
c9d73b6135 | ||
|
|
b7ed99426b | ||
|
|
97f0747e10 | ||
|
|
abf12db788 | ||
|
|
bdfc376951 | ||
|
|
b40a3684ae | ||
|
|
86ef16dbeb | ||
|
|
39b5031ab5 | ||
|
|
94decc753b | ||
|
|
991891625a | ||
|
|
a50fbcce01 | ||
|
|
69314e6832 | ||
|
|
9844a53404 | ||
|
|
b915e0cb4b | ||
|
|
9d40f42500 | ||
|
|
8985e8fce9 | ||
|
|
6606676aeb | ||
|
|
5fefc85d9b | ||
|
|
df3df54806 | ||
|
|
5ae0560829 | ||
|
|
dbeb841b5a | ||
|
|
b12eff90cb | ||
|
|
4eae037fee | ||
|
|
b5a63b39d3 | ||
|
|
da7558a2e4 | ||
|
|
6419f9a253 | ||
|
|
c301a86e8d | ||
|
|
88361aa9ed | ||
|
|
780963d5db | ||
|
|
2977cbb411 | ||
|
|
a33ae199d9 | ||
|
|
62f15223aa | ||
|
|
2faa453ea7 | ||
|
|
11d29572eb | ||
|
|
3c020b8827 | ||
|
|
1aec231cff | ||
|
|
0970c52735 | ||
|
|
69194f97d5 | ||
|
|
31c99caa65 | ||
|
|
d835e97490 | ||
|
|
baf4b8381f | ||
|
|
9b45b6888b | ||
|
|
7179ce9c58 | ||
|
|
8725e4c0f9 | ||
|
|
951eeb65f3 | ||
|
|
921aef9934 | ||
|
|
e7a7bb83c1 | ||
|
|
343be83572 | ||
|
|
0cbd52b939 | ||
|
|
5c2a3a2779 | ||
|
|
66c726556d | ||
|
|
8e93e204c7 | ||
|
|
b2960b9e7f | ||
|
|
5713f9782a | ||
|
|
60e340d356 | ||
|
|
80d82c5b2b | ||
|
|
a9a99ebad1 | ||
|
|
433eeabfa5 | ||
|
|
faa781b71f | ||
|
|
c233df720a | ||
|
|
7ff4f79d30 | ||
|
|
60909655d3 | ||
|
|
95a8d84f1f | ||
|
|
5a2004332b | ||
|
|
9a56a5b788 | ||
|
|
605c8bd377 | ||
|
|
900131d09f | ||
|
|
01bdb87b39 | ||
|
|
274a7303ff | ||
|
|
03e46cd026 | ||
|
|
e95683a0fb | ||
|
|
13353ae36d | ||
|
|
1a40f18bdd | ||
|
|
90e6380383 | ||
|
|
8bfaa7fe0a | ||
|
|
c9135a63cd | ||
|
|
452263eaa5 | ||
|
|
8aa94ea09a | ||
|
|
258ba71363 | ||
|
|
b8626ea3c6 | ||
|
|
6534757d85 | ||
|
|
8e94ea3154 | ||
|
|
b113190563 | ||
|
|
358b7f50a7 | ||
|
|
f47e2f4e82 | ||
|
|
a7eea9546f | ||
|
|
9874d47d7f | ||
|
|
c2f3e2e263 | ||
|
|
e18f27f5f7 | ||
|
|
df6daf0d8f | ||
|
|
e9d46f0bfc | ||
|
|
42fd74b77b | ||
|
|
c55ea56c5e | ||
|
|
1e01cd34f7 | ||
|
|
e2fa5c1b7c | ||
|
|
fc0984d286 | ||
|
|
8b3dcd41f7 | ||
|
|
8f2f5310e2 | ||
|
|
edb4f0342c | ||
|
|
ea17abb92a | ||
|
|
35a40a8e62 | ||
|
|
d494bf45b2 | ||
|
|
8bf4a5cbff | ||
|
|
58c2c82a30 | ||
|
|
11edaa441d | ||
|
|
a5e953b191 | ||
|
|
506ae12a8c | ||
|
|
0310c5cbe0 | ||
|
|
053e1af7ff | ||
|
|
7e24adbdd0 | ||
|
|
621df422a7 | ||
|
|
0a34b5c691 | ||
|
|
e0bc3dd51f | ||
|
|
dacecd24ba | ||
|
|
05105743e9 | ||
|
|
9e1fe9a85e | ||
|
|
d71ce51901 | ||
|
|
be668ee26d | ||
|
|
cae5294b4e | ||
|
|
cd777f79ef | ||
|
|
8b9e21e3f5 | ||
|
|
2a61aee562 | ||
|
|
40ce8a8833 | ||
|
|
7713ff8c5c | ||
|
|
70371a4344 | ||
|
|
e514de76ed | ||
|
|
dd62cfcc22 | ||
|
|
09690f1b38 | ||
|
|
380ba9f1c1 | ||
|
|
c3e9380fb4 | ||
|
|
e3ebc253fa | ||
|
|
c6c7c84355 | ||
|
|
28f50cb7cf | ||
|
|
3e152fec74 | ||
|
|
2db2791805 | ||
|
|
9ec2d7f8ff | ||
|
|
4a084ce34c | ||
|
|
3502df2174 | ||
|
|
fa1e25abef | ||
|
|
217ba8dd4d | ||
|
|
405f4613d8 | ||
|
|
cba512068b | ||
|
|
1c99ea23d1 | ||
|
|
c4308b216f | ||
|
|
aafd2d8525 | ||
|
|
a574ec6023 | ||
|
|
e429455f4d | ||
|
|
7692eeb9a0 | ||
|
|
a099f5a804 | ||
|
|
ca0bc767fe | ||
|
|
4ba9288935 | ||
|
|
e923ec6d36 | ||
|
|
851d99d99e | ||
|
|
f608e653ca | ||
|
|
72e076b694 | ||
|
|
6cf37c4abe | ||
|
|
fc204773d6 | ||
|
|
2bc5cb240f | ||
|
|
67028d6ea6 | ||
|
|
d22a5057b9 | ||
|
|
75a20194c5 | ||
|
|
7fe81fe62e | ||
|
|
345ddc7234 | ||
|
|
d167d4864f | ||
|
|
bf504912a4 | ||
|
|
a7fb8ae915 | ||
|
|
d9b7a2688f | ||
|
|
c0299dba88 | ||
|
|
c3ecdb4746 | ||
|
|
c17676a9be | ||
|
|
ed8e32cc92 | ||
|
|
2406b28e64 | ||
|
|
2216e5a13f | ||
|
|
5bf3a308d5 | ||
|
|
53ea31c69a | ||
|
|
c1c2b5bf52 | ||
|
|
af018c7b0b | ||
|
|
0a1ca0600f | ||
|
|
cd7c62818b | ||
|
|
37d06bcce8 | ||
|
|
9745718467 | ||
|
|
ab44cc31e2 | ||
|
|
dce3e1efa6 | ||
|
|
159dfb5acb | ||
|
|
844646dc50 | ||
|
|
01fc8f2209 | ||
|
|
43e1d4440e | ||
|
|
466849efe8 | ||
|
|
db0fad6826 | ||
|
|
dd5e6559dd | ||
|
|
7c9d652d9b | ||
|
|
dc9e6c37fe | ||
|
|
01fe9477f4 | ||
|
|
97e3dae6f4 | ||
|
|
e8e7888a23 | ||
|
|
b02b8d016c | ||
|
|
a079bac153 | ||
|
|
3a55a64e1c | ||
|
|
fa5a85439f | ||
|
|
81034596a8 | ||
|
|
0968cdf340 | ||
|
|
d9e4009e33 | ||
|
|
02387fd227 | ||
|
|
fb3713bc25 | ||
|
|
f6d63082c0 | ||
|
|
33e1c42599 | ||
|
|
1b75dc8bcd | ||
|
|
3d02580c09 | ||
|
|
8458233a31 | ||
|
|
cb0ddbf863 | ||
|
|
dcc4581220 | ||
|
|
50b8f19cb5 | ||
|
|
f3e201f983 | ||
|
|
b14c24960b | ||
|
|
b6e3453f49 | ||
|
|
ed4870cdb4 | ||
|
|
5fbee8c824 | ||
|
|
3868c04e99 | ||
|
|
409c1d5aa2 | ||
|
|
20710f5232 | ||
|
|
870882f567 | ||
|
|
e1e67b2c9e | ||
|
|
eac3abdca9 | ||
|
|
ebd8e63276 | ||
|
|
839d17e7bd | ||
|
|
7be5c31bc6 | ||
|
|
9e4a7d5871 | ||
|
|
ff8b9aa439 | ||
|
|
ccc0889803 | ||
|
|
07f118caec | ||
|
|
58af62f388 | ||
|
|
040cd23e4a | ||
|
|
0324764a83 | ||
|
|
679e35fd46 | ||
|
|
49e0d54c76 | ||
|
|
7506852a99 | ||
|
|
bcbfb04992 | ||
|
|
5cd72f2431 | ||
|
|
eabca8439f | ||
|
|
ea1fffeebf | ||
|
|
6d58065909 | ||
|
|
47b0543461 | ||
|
|
8215c605b4 | ||
|
|
d7e949193f | ||
|
|
f64cf9187a | ||
|
|
b54d85d862 | ||
|
|
f419c18056 | ||
|
|
0ec17b6026 | ||
|
|
838978b869 | ||
|
|
8186253707 | ||
|
|
2316d843d7 | ||
|
|
9d58f11a60 | ||
|
|
7b18006193 | ||
|
|
9e48fc0c83 | ||
|
|
8e827e32ac | ||
|
|
c5c0e70e23 | ||
|
|
ec61f5e9d3 | ||
|
|
d57cced17b | ||
|
|
54a350be79 | ||
|
|
d6dbf0e0a6 | ||
|
|
0d887ad815 | ||
|
|
d4a5f8390e | ||
|
|
ab5d450d3c | ||
|
|
23c37fa506 | ||
|
|
63209c2646 | ||
|
|
f0dabd1446 | ||
|
|
552377c76f | ||
|
|
e7cd03325b | ||
|
|
decb3c178e | ||
|
|
f6d647d6c3 | ||
|
|
bf4a7b6ce8 | ||
|
|
8e2c85d14d | ||
|
|
1fbf8da79f | ||
|
|
a75309919e | ||
|
|
0ece395c24 | ||
|
|
b6391fe011 | ||
|
|
9a6af9c431 | ||
|
|
fa1cbb0746 | ||
|
|
68e1be3cf5 | ||
|
|
9abc4868d6 | ||
|
|
23991c99c3 | ||
|
|
cc0177be87 | ||
|
|
37b3e96b04 | ||
|
|
85214bdf81 | ||
|
|
fbbea9e6e2 | ||
|
|
7741483894 | ||
|
|
2f432e812c | ||
|
|
cad8970a57 | ||
|
|
4d7aed84ec | ||
|
|
00ed7c9424 | ||
|
|
d9bd75e683 | ||
|
|
93d8bafb24 | ||
|
|
c19a88fee9 | ||
|
|
0a331ea72e | ||
|
|
7d27b11190 | ||
|
|
eedfec015e | ||
|
|
ffc343a2bc | ||
|
|
e5aa605742 | ||
|
|
8b181ed818 | ||
|
|
f5a349558e | ||
|
|
b9b75ddcf5 | ||
|
|
a39720e94a | ||
|
|
2820feb02a | ||
|
|
8fc805d2e2 | ||
|
|
d54151e7c4 | ||
|
|
21a0a64648 | ||
|
|
20707fac4a | ||
|
|
e6ef0fc26c | ||
|
|
c157816017 | ||
|
|
eba5d19377 | ||
|
|
ad14d09a2b | ||
|
|
f3bcc651c7 | ||
|
|
e8602b81fa | ||
|
|
0f32109993 | ||
|
|
a17ccca615 | ||
|
|
7a1b238035 | ||
|
|
e1534a3200 | ||
|
|
9fec615dca | ||
|
|
ef02893f2f | ||
|
|
7cf4611d7c | ||
|
|
d028005aa6 | ||
|
|
1d23148e6d | ||
|
|
e416ee72ca | ||
|
|
2e902dee53 | ||
|
|
f6879da6c9 | ||
|
|
ae20a3ad3f | ||
|
|
c706926ee3 | ||
|
|
223e6c7590 | ||
|
|
825864032a | ||
|
|
06733ec21a | ||
|
|
9f7c619e4f | ||
|
|
3f5e3212fe | ||
|
|
20d05492d2 | ||
|
|
ae7ea33b75 | ||
|
|
263e984bf4 | ||
|
|
58f3abe3c6 | ||
|
|
d576416953 | ||
|
|
e3d1bb271f | ||
|
|
2df635693d | ||
|
|
40b4adc9cc | ||
|
|
0c971b4415 | ||
|
|
f2d37da4ca | ||
|
|
d5e5c3c220 | ||
|
|
15390bedd5 | ||
|
|
7f6a079aa4 | ||
|
|
2a25f58d40 | ||
|
|
2705109592 | ||
|
|
244ac5e024 | ||
|
|
f4da2e31d9 | ||
|
|
f650949573 | ||
|
|
76128051c0 | ||
|
|
5aa1106ba1 | ||
|
|
dccf3f49ef |
@@ -1,5 +1,21 @@
|
||||
---
|
||||
Language: Cpp
|
||||
BreakBeforeBraces: Custom
|
||||
BraceWrapping:
|
||||
AfterClass: true
|
||||
AfterControlStatement: true
|
||||
AfterEnum: false
|
||||
AfterFunction: true
|
||||
AfterNamespace: false
|
||||
AfterObjCDeclaration: true
|
||||
AfterStruct: true
|
||||
AfterUnion: true
|
||||
BeforeCatch: true
|
||||
BeforeElse: true
|
||||
IndentBraces: false
|
||||
KeepEmptyLinesAtTheStartOfBlocks: false
|
||||
MaxEmptyLinesToKeep: 1
|
||||
---
|
||||
Language: Cpp
|
||||
AccessModifierOffset: -4
|
||||
AlignAfterOpenBracket: AlwaysBreak
|
||||
AlignConsecutiveAssignments: false
|
||||
@@ -18,48 +34,41 @@ AlwaysBreakBeforeMultilineStrings: true
|
||||
AlwaysBreakTemplateDeclarations: true
|
||||
BinPackArguments: false
|
||||
BinPackParameters: false
|
||||
BraceWrapping:
|
||||
AfterClass: true
|
||||
AfterControlStatement: true
|
||||
AfterEnum: false
|
||||
AfterFunction: true
|
||||
AfterNamespace: false
|
||||
AfterObjCDeclaration: true
|
||||
AfterStruct: true
|
||||
AfterUnion: true
|
||||
BeforeCatch: true
|
||||
BeforeElse: true
|
||||
IndentBraces: false
|
||||
BreakBeforeBinaryOperators: false
|
||||
BreakBeforeBraces: Custom
|
||||
BreakBeforeTernaryOperators: true
|
||||
BreakConstructorInitializersBeforeComma: true
|
||||
ColumnLimit: 80
|
||||
CommentPragmas: '^ IWYU pragma:'
|
||||
ColumnLimit: 80
|
||||
CommentPragmas: "^ IWYU pragma:"
|
||||
ConstructorInitializerAllOnOneLineOrOnePerLine: true
|
||||
ConstructorInitializerIndentWidth: 4
|
||||
ContinuationIndentWidth: 4
|
||||
Cpp11BracedListStyle: true
|
||||
DerivePointerAlignment: false
|
||||
DisableFormat: false
|
||||
DisableFormat: false
|
||||
ExperimentalAutoDetectBinPacking: false
|
||||
ForEachMacros: [ Q_FOREACH, BOOST_FOREACH ]
|
||||
ForEachMacros: [Q_FOREACH, BOOST_FOREACH]
|
||||
IncludeBlocks: Regroup
|
||||
IncludeCategories:
|
||||
- Regex: '^<(BeastConfig)'
|
||||
Priority: 0
|
||||
- Regex: '^<(ripple)/'
|
||||
Priority: 2
|
||||
- Regex: '^<(boost)/'
|
||||
Priority: 3
|
||||
- Regex: '.*'
|
||||
Priority: 4
|
||||
IncludeIsMainRegex: '$'
|
||||
- Regex: "^<(test)/"
|
||||
Priority: 0
|
||||
- Regex: "^<(xrpld)/"
|
||||
Priority: 1
|
||||
- Regex: "^<(xrpl)/"
|
||||
Priority: 2
|
||||
- Regex: "^<(boost)/"
|
||||
Priority: 3
|
||||
- Regex: "^.*/"
|
||||
Priority: 4
|
||||
- Regex: '^.*\.h'
|
||||
Priority: 5
|
||||
- Regex: ".*"
|
||||
Priority: 6
|
||||
IncludeIsMainRegex: "$"
|
||||
IndentCaseLabels: true
|
||||
IndentFunctionDeclarationAfterType: false
|
||||
IndentWidth: 4
|
||||
IndentRequiresClause: true
|
||||
IndentWidth: 4
|
||||
IndentWrappedFunctionNames: false
|
||||
KeepEmptyLinesAtTheStartOfBlocks: false
|
||||
MaxEmptyLinesToKeep: 1
|
||||
NamespaceIndentation: None
|
||||
ObjCSpaceAfterProperty: false
|
||||
ObjCSpaceBeforeProtocolList: false
|
||||
@@ -70,18 +79,25 @@ PenaltyBreakString: 1000
|
||||
PenaltyExcessCharacter: 1000000
|
||||
PenaltyReturnTypeOnItsOwnLine: 200
|
||||
PointerAlignment: Left
|
||||
ReflowComments: true
|
||||
SortIncludes: true
|
||||
ReflowComments: true
|
||||
RequiresClausePosition: OwnLine
|
||||
SortIncludes: true
|
||||
SpaceAfterCStyleCast: false
|
||||
SpaceBeforeAssignmentOperators: true
|
||||
SpaceBeforeParens: ControlStatements
|
||||
SpaceInEmptyParentheses: false
|
||||
SpacesBeforeTrailingComments: 2
|
||||
SpacesInAngles: false
|
||||
SpacesInAngles: false
|
||||
SpacesInContainerLiterals: true
|
||||
SpacesInCStyleCastParentheses: false
|
||||
SpacesInParentheses: false
|
||||
SpacesInSquareBrackets: false
|
||||
Standard: Cpp11
|
||||
TabWidth: 8
|
||||
UseTab: Never
|
||||
Standard: Cpp11
|
||||
TabWidth: 8
|
||||
UseTab: Never
|
||||
QualifierAlignment: Right
|
||||
---
|
||||
Language: Proto
|
||||
BasedOnStyle: Google
|
||||
ColumnLimit: 0
|
||||
IndentWidth: 2
|
||||
|
||||
11
.codecov.yml
11
.codecov.yml
@@ -7,13 +7,13 @@ comment:
|
||||
show_carryforward_flags: false
|
||||
|
||||
coverage:
|
||||
range: "60..80"
|
||||
range: "70..85"
|
||||
precision: 1
|
||||
round: nearest
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: 60%
|
||||
target: 75%
|
||||
threshold: 2%
|
||||
patch:
|
||||
default:
|
||||
@@ -27,11 +27,12 @@ github_checks:
|
||||
parsers:
|
||||
cobertura:
|
||||
partials_as_hits: true
|
||||
handle_missing_conditions : true
|
||||
handle_missing_conditions: true
|
||||
|
||||
slack_app: false
|
||||
|
||||
ignore:
|
||||
- "src/test/"
|
||||
- "src/ripple/beast/test/"
|
||||
- "src/ripple/beast/unit_test/"
|
||||
- "src/tests/"
|
||||
- "include/xrpl/beast/test/"
|
||||
- "include/xrpl/beast/unit_test/"
|
||||
|
||||
@@ -6,3 +6,11 @@ e2384885f5f630c8f0ffe4bf21a169b433a16858
|
||||
241b9ddde9e11beb7480600fd5ed90e1ef109b21
|
||||
760f16f56835663d9286bd29294d074de26a7ba6
|
||||
0eebe6a5f4246fced516d52b83ec4e7f47373edd
|
||||
2189cc950c0cebb89e4e2fa3b2d8817205bf7cef
|
||||
b9d007813378ad0ff45660dc07285b823c7e9855
|
||||
fe9a5365b8a52d4acc42eb27369247e6f238a4f9
|
||||
9a93577314e6a8d4b4a8368cc9d2b15a5d8303e8
|
||||
552377c76f55b403a1c876df873a23d780fcc81c
|
||||
97f0747e103f13e26e45b731731059b32f7679ac
|
||||
b13370ac0d207217354f1fc1c29aef87769fb8a1
|
||||
896b8c3b54a22b0497cb0d1ce95e1095f9a227ce
|
||||
|
||||
8
.github/CODEOWNERS
vendored
Normal file
8
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
# Allow anyone to review any change by default.
|
||||
*
|
||||
|
||||
# Require the rpc-reviewers team to review changes to the rpc code.
|
||||
include/xrpl/protocol/ @xrplf/rpc-reviewers
|
||||
src/libxrpl/protocol/ @xrplf/rpc-reviewers
|
||||
src/xrpld/rpc/ @xrplf/rpc-reviewers
|
||||
src/xrpld/app/misc/ @xrplf/rpc-reviewers
|
||||
13
.github/ISSUE_TEMPLATE/bug_report.md
vendored
13
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -2,30 +2,35 @@
|
||||
name: Bug Report
|
||||
about: Create a report to help us improve rippled
|
||||
title: "[Title with short description] (Version: [rippled version])"
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
labels: ""
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
<!-- Please search existing issues to avoid creating duplicates.-->
|
||||
|
||||
## Issue Description
|
||||
|
||||
<!--Provide a summary for your issue/bug.-->
|
||||
|
||||
## Steps to Reproduce
|
||||
|
||||
<!--List in detail the exact steps to reproduce the unexpected behavior of the software.-->
|
||||
|
||||
## Expected Result
|
||||
|
||||
<!--Explain in detail what behavior you expected to happen.-->
|
||||
|
||||
## Actual Result
|
||||
|
||||
<!--Explain in detail what behavior actually happened.-->
|
||||
|
||||
## Environment
|
||||
|
||||
<!--Please describe your environment setup (such as Ubuntu 18.04 with Boost 1.70).-->
|
||||
<!-- If you are using a formal release, please use the version returned by './rippled --version' as the version number-->
|
||||
<!-- If you are working off of develop, please add the git hash via 'git rev-parse HEAD'-->
|
||||
|
||||
## Supporting Files
|
||||
|
||||
<!--If you have supporting files such as a log, feel free to post a link here using Github Gist.-->
|
||||
<!--Consider adding configuration files with private information removed via Github Gist. -->
|
||||
|
||||
|
||||
8
.github/ISSUE_TEMPLATE/feature_request.md
vendored
8
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -3,19 +3,23 @@ name: Feature Request
|
||||
about: Suggest a new feature for the rippled project
|
||||
title: "[Title with short description] (Version: [rippled version])"
|
||||
labels: Feature Request
|
||||
assignees: ''
|
||||
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
<!-- Please search existing issues to avoid creating duplicates.-->
|
||||
|
||||
## Summary
|
||||
|
||||
<!-- Provide a summary to the feature request-->
|
||||
|
||||
## Motivation
|
||||
|
||||
<!-- Why do we need this feature?-->
|
||||
|
||||
## Solution
|
||||
|
||||
<!-- What is the solution?-->
|
||||
|
||||
## Paths Not Taken
|
||||
|
||||
<!-- What other alternatives have been considered?-->
|
||||
|
||||
44
.github/actions/build-deps/action.yml
vendored
Normal file
44
.github/actions/build-deps/action.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
name: Build Conan dependencies
|
||||
description: "Install Conan dependencies, optionally forcing a rebuild of all dependencies."
|
||||
|
||||
# Note that actions do not support 'type' and all inputs are strings, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
||||
inputs:
|
||||
verbosity:
|
||||
description: "The build verbosity."
|
||||
required: false
|
||||
default: "verbose"
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
force_build:
|
||||
description: 'Force building of all dependencies ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install Conan dependencies
|
||||
shell: bash
|
||||
env:
|
||||
BUILD_DIR: ${{ inputs.build_dir }}
|
||||
BUILD_OPTION: ${{ inputs.force_build == 'true' && '*' || 'missing' }}
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
VERBOSITY: ${{ inputs.verbosity }}
|
||||
run: |
|
||||
echo 'Installing dependencies.'
|
||||
mkdir -p "${BUILD_DIR}"
|
||||
cd "${BUILD_DIR}"
|
||||
conan install \
|
||||
--output-folder . \
|
||||
--build="${BUILD_OPTION}" \
|
||||
--options:host='&:tests=True' \
|
||||
--options:host='&:xrpld=True' \
|
||||
--settings:all build_type="${BUILD_TYPE}" \
|
||||
--conf:all tools.build:verbosity="${VERBOSITY}" \
|
||||
--conf:all tools.compilation:verbosity="${VERBOSITY}" \
|
||||
..
|
||||
32
.github/actions/build/action.yml
vendored
32
.github/actions/build/action.yml
vendored
@@ -1,32 +0,0 @@
|
||||
name: build
|
||||
inputs:
|
||||
generator:
|
||||
default: null
|
||||
configuration:
|
||||
required: true
|
||||
cmake-args:
|
||||
default: null
|
||||
cmake-target:
|
||||
default: all
|
||||
# An implicit input is the environment variable `build_dir`.
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: configure
|
||||
shell: bash
|
||||
run: |
|
||||
cd ${build_dir}
|
||||
cmake \
|
||||
${{ inputs.generator && format('-G "{0}"', inputs.generator) || '' }} \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }} \
|
||||
${{ inputs.cmake-args }} \
|
||||
..
|
||||
- name: build
|
||||
shell: bash
|
||||
run: |
|
||||
cmake \
|
||||
--build ${build_dir} \
|
||||
--config ${{ inputs.configuration }} \
|
||||
--parallel ${NUM_PROCESSORS:-$(nproc)} \
|
||||
--target ${{ inputs.cmake-target }}
|
||||
58
.github/actions/dependencies/action.yml
vendored
58
.github/actions/dependencies/action.yml
vendored
@@ -1,58 +0,0 @@
|
||||
name: dependencies
|
||||
inputs:
|
||||
configuration:
|
||||
required: true
|
||||
# An implicit input is the environment variable `build_dir`.
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: unlock Conan
|
||||
shell: bash
|
||||
run: conan remove --locks
|
||||
- name: export custom recipes
|
||||
shell: bash
|
||||
run: |
|
||||
conan config set general.revisions_enabled=1
|
||||
conan export external/snappy snappy/1.1.10@
|
||||
conan export external/rocksdb rocksdb/6.29.5@
|
||||
conan export external/soci soci/4.0.3@
|
||||
- name: add Ripple Conan remote
|
||||
shell: bash
|
||||
run: |
|
||||
conan remote list
|
||||
conan remote remove ripple || true
|
||||
# Do not quote the URL. An empty string will be accepted (with
|
||||
# a non-fatal warning), but a missing argument will not.
|
||||
conan remote add ripple ${{ env.CONAN_URL }} --insert 0
|
||||
- name: try to authenticate to Ripple Conan remote
|
||||
id: remote
|
||||
shell: bash
|
||||
run: |
|
||||
# `conan user` implicitly uses the environment variables
|
||||
# CONAN_LOGIN_USERNAME_<REMOTE> and CONAN_PASSWORD_<REMOTE>.
|
||||
# https://docs.conan.io/1/reference/commands/misc/user.html#using-environment-variables
|
||||
# https://docs.conan.io/1/reference/env_vars.html#conan-login-username-conan-login-username-remote-name
|
||||
# https://docs.conan.io/1/reference/env_vars.html#conan-password-conan-password-remote-name
|
||||
echo outcome=$(conan user --remote ripple --password >&2 \
|
||||
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
|
||||
- name: list missing binaries
|
||||
id: binaries
|
||||
shell: bash
|
||||
# Print the list of dependencies that would need to be built locally.
|
||||
# A non-empty list means we have "failed" to cache binaries remotely.
|
||||
run: |
|
||||
echo missing=$(conan info . --build missing --settings build_type=${{ inputs.configuration }} --json 2>/dev/null | grep '^\[') | tee ${GITHUB_OUTPUT}
|
||||
- name: install dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir ${build_dir}
|
||||
cd ${build_dir}
|
||||
conan install \
|
||||
--output-folder . \
|
||||
--build missing \
|
||||
--settings build_type=${{ inputs.configuration }} \
|
||||
..
|
||||
- name: upload dependencies to remote
|
||||
if: (steps.binaries.outputs.missing != '[]') && (steps.remote.outputs.outcome == 'success')
|
||||
shell: bash
|
||||
run: conan upload --remote ripple '*' --all --parallel --confirm
|
||||
43
.github/actions/print-env/action.yml
vendored
Normal file
43
.github/actions/print-env/action.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: Print build environment
|
||||
description: "Print environment and some tooling versions"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Check configuration (Windows)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Checking environment variables.'
|
||||
set
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
- name: Check configuration (Linux and macOS)
|
||||
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking compiler version.'
|
||||
${{ runner.os == 'Linux' && '${CC}' || 'clang' }} --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
echo 'Checking Ninja version.'
|
||||
ninja --version
|
||||
|
||||
echo 'Checking nproc version.'
|
||||
nproc --version
|
||||
46
.github/actions/setup-conan/action.yml
vendored
Normal file
46
.github/actions/setup-conan/action.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
name: Setup Conan
|
||||
description: "Set up Conan configuration, profile, and remote."
|
||||
|
||||
inputs:
|
||||
conan_remote_name:
|
||||
description: "The name of the Conan remote to use."
|
||||
required: false
|
||||
default: xrplf
|
||||
conan_remote_url:
|
||||
description: "The URL of the Conan endpoint to use."
|
||||
required: false
|
||||
default: https://conan.ripplex.io
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
|
||||
steps:
|
||||
- name: Set up Conan configuration
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Installing configuration.'
|
||||
cat conan/global.conf ${{ runner.os == 'Linux' && '>>' || '>' }} $(conan config home)/global.conf
|
||||
|
||||
echo 'Conan configuration:'
|
||||
conan config show '*'
|
||||
|
||||
- name: Set up Conan profile
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Installing profile.'
|
||||
conan config install conan/profiles/default -tf $(conan config home)/profiles/
|
||||
|
||||
echo 'Conan profile:'
|
||||
conan profile show
|
||||
|
||||
- name: Set up Conan remote
|
||||
shell: bash
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
CONAN_REMOTE_URL: ${{ inputs.conan_remote_url }}
|
||||
run: |
|
||||
echo "Adding Conan remote '${CONAN_REMOTE_NAME}' at '${CONAN_REMOTE_URL}'."
|
||||
conan remote add --index 0 --force "${CONAN_REMOTE_NAME}" "${CONAN_REMOTE_URL}"
|
||||
|
||||
echo 'Listing Conan remotes.'
|
||||
conan remote list
|
||||
@@ -25,32 +25,32 @@ more dependencies listed later.
|
||||
**tl;dr:** The modules listed first are more independent than the modules
|
||||
listed later.
|
||||
|
||||
| Level / Tier | Module(s) |
|
||||
|--------------|-----------------------------------------------|
|
||||
| 01 | ripple/beast ripple/unity
|
||||
| 02 | ripple/basics
|
||||
| 03 | ripple/json ripple/crypto
|
||||
| 04 | ripple/protocol
|
||||
| 05 | ripple/core ripple/conditions ripple/consensus ripple/resource ripple/server
|
||||
| 06 | ripple/peerfinder ripple/ledger ripple/nodestore ripple/net
|
||||
| 07 | ripple/shamap ripple/overlay
|
||||
| 08 | ripple/app
|
||||
| 09 | ripple/rpc
|
||||
| 10 | ripple/perflog
|
||||
| 11 | test/jtx test/beast test/csf
|
||||
| 12 | test/unit_test
|
||||
| 13 | test/crypto test/conditions test/json test/resource test/shamap test/peerfinder test/basics test/overlay
|
||||
| 14 | test
|
||||
| 15 | test/net test/protocol test/ledger test/consensus test/core test/server test/nodestore
|
||||
| 16 | test/rpc test/app
|
||||
| Level / Tier | Module(s) |
|
||||
| ------------ | -------------------------------------------------------------------------------------------------------- |
|
||||
| 01 | ripple/beast ripple/unity |
|
||||
| 02 | ripple/basics |
|
||||
| 03 | ripple/json ripple/crypto |
|
||||
| 04 | ripple/protocol |
|
||||
| 05 | ripple/core ripple/conditions ripple/consensus ripple/resource ripple/server |
|
||||
| 06 | ripple/peerfinder ripple/ledger ripple/nodestore ripple/net |
|
||||
| 07 | ripple/shamap ripple/overlay |
|
||||
| 08 | ripple/app |
|
||||
| 09 | ripple/rpc |
|
||||
| 10 | ripple/perflog |
|
||||
| 11 | test/jtx test/beast test/csf |
|
||||
| 12 | test/unit_test |
|
||||
| 13 | test/crypto test/conditions test/json test/resource test/shamap test/peerfinder test/basics test/overlay |
|
||||
| 14 | test |
|
||||
| 15 | test/net test/protocol test/ledger test/consensus test/core test/server test/nodestore |
|
||||
| 16 | test/rpc test/app |
|
||||
|
||||
(Note that `test` levelization is *much* less important and *much* less
|
||||
(Note that `test` levelization is _much_ less important and _much_ less
|
||||
strictly enforced than `ripple` levelization, other than the requirement
|
||||
that `test` code should *never* be included in `ripple` code.)
|
||||
that `test` code should _never_ be included in `ripple` code.)
|
||||
|
||||
## Validation
|
||||
|
||||
The [levelization.sh](levelization.sh) script takes no parameters,
|
||||
The [levelization](generate.sh) script takes no parameters,
|
||||
reads no environment variables, and can be run from any directory,
|
||||
as long as it is in the expected location in the rippled repo.
|
||||
It can be run at any time from within a checked out repo, and will
|
||||
@@ -59,48 +59,48 @@ the rippled source. The only caveat is that it runs much slower
|
||||
under Windows than in Linux. It hasn't yet been tested under MacOS.
|
||||
It generates many files of [results](results):
|
||||
|
||||
* `rawincludes.txt`: The raw dump of the `#includes`
|
||||
* `paths.txt`: A second dump grouping the source module
|
||||
- `rawincludes.txt`: The raw dump of the `#includes`
|
||||
- `paths.txt`: A second dump grouping the source module
|
||||
to the destination module, deduped, and with frequency counts.
|
||||
* `includes/`: A directory where each file represents a module and
|
||||
- `includes/`: A directory where each file represents a module and
|
||||
contains a list of modules and counts that the module _includes_.
|
||||
* `includedby/`: Similar to `includes/`, but the other way around. Each
|
||||
- `includedby/`: Similar to `includes/`, but the other way around. Each
|
||||
file represents a module and contains a list of modules and counts
|
||||
that _include_ the module.
|
||||
* [`loops.txt`](results/loops.txt): A list of direct loops detected
|
||||
- [`loops.txt`](results/loops.txt): A list of direct loops detected
|
||||
between modules as they actually exist, as opposed to how they are
|
||||
desired as described above. In a perfect repo, this file will be
|
||||
empty.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../.github/workflows/levelization.yml) to validate
|
||||
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
* [`ordering.txt`](results/ordering.txt): A list showing relationships
|
||||
- [`ordering.txt`](results/ordering.txt): A list showing relationships
|
||||
between modules where there are no loops as they actually exist, as
|
||||
opposed to how they are desired as described above.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../.github/workflows/levelization.yml) to validate
|
||||
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
* [`levelization.yml`](../../.github/workflows/levelization.yml)
|
||||
- [`levelization.yml`](../../workflows/reusable-check-levelization.yml)
|
||||
Github Actions workflow to test that levelization loops haven't
|
||||
changed. Unfortunately, if changes are detected, it can't tell if
|
||||
changed. Unfortunately, if changes are detected, it can't tell if
|
||||
they are improvements or not, so if you have resolved any issues or
|
||||
done anything else to improve levelization, run `levelization.sh`,
|
||||
and commit the updated results.
|
||||
|
||||
The `loops.txt` and `ordering.txt` files relate the modules
|
||||
The `loops.txt` and `ordering.txt` files relate the modules
|
||||
using comparison signs, which indicate the number of times each
|
||||
module is included in the other.
|
||||
|
||||
* `A > B` means that A should probably be at a higher level than B,
|
||||
- `A > B` means that A should probably be at a higher level than B,
|
||||
because B is included in A significantly more than A is included in B.
|
||||
These results can be included in both `loops.txt` and `ordering.txt`.
|
||||
Because `ordering.txt`only includes relationships where B is not
|
||||
included in A at all, it will only include these types of results.
|
||||
* `A ~= B` means that A and B are included in each other a different
|
||||
- `A ~= B` means that A and B are included in each other a different
|
||||
number of times, but the values are so close that the script can't
|
||||
definitively say that one should be above the other. These results
|
||||
will only be included in `loops.txt`.
|
||||
* `A == B` means that A and B include each other the same number of
|
||||
- `A == B` means that A and B include each other the same number of
|
||||
times, so the script has no clue which should be higher. These results
|
||||
will only be included in `loops.txt`.
|
||||
|
||||
@@ -110,5 +110,5 @@ get those details locally.
|
||||
|
||||
1. Run `levelization.sh`
|
||||
2. Grep the modules in `paths.txt`.
|
||||
* For example, if a cycle is found `A ~= B`, simply `grep -w
|
||||
A Builds/levelization/results/paths.txt | grep -w B`
|
||||
- For example, if a cycle is found `A ~= B`, simply `grep -w
|
||||
A .github/scripts/levelization/results/paths.txt | grep -w B`
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Usage: levelization.sh
|
||||
# Usage: generate.sh
|
||||
# This script takes no parameters, reads no environment variables,
|
||||
# and can be run from any directory, as long as it is in the expected
|
||||
# location in the repo.
|
||||
@@ -13,12 +13,15 @@ then
|
||||
git clean -ix
|
||||
fi
|
||||
|
||||
# Ensure all sorting is ASCII-order consistently across platforms.
|
||||
export LANG=C
|
||||
|
||||
rm -rfv results
|
||||
mkdir results
|
||||
includes="$( pwd )/results/rawincludes.txt"
|
||||
pushd ../..
|
||||
pushd ../../..
|
||||
echo Raw includes:
|
||||
grep -r '#include.*/.*\.h' src/ripple/ src/test/ | \
|
||||
grep -r '^[ ]*#include.*/.*\.h' include src | \
|
||||
grep -v boost | tee ${includes}
|
||||
popd
|
||||
pushd results
|
||||
30
.github/scripts/levelization/results/loops.txt
vendored
Normal file
30
.github/scripts/levelization/results/loops.txt
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
Loop: test.jtx test.toplevel
|
||||
test.toplevel > test.jtx
|
||||
|
||||
Loop: test.jtx test.unit_test
|
||||
test.unit_test == test.jtx
|
||||
|
||||
Loop: xrpld.app xrpld.core
|
||||
xrpld.app > xrpld.core
|
||||
|
||||
Loop: xrpld.app xrpld.overlay
|
||||
xrpld.overlay > xrpld.app
|
||||
|
||||
Loop: xrpld.app xrpld.peerfinder
|
||||
xrpld.peerfinder ~= xrpld.app
|
||||
|
||||
Loop: xrpld.app xrpld.rpc
|
||||
xrpld.rpc > xrpld.app
|
||||
|
||||
Loop: xrpld.app xrpld.shamap
|
||||
xrpld.app > xrpld.shamap
|
||||
|
||||
Loop: xrpld.core xrpld.perflog
|
||||
xrpld.perflog == xrpld.core
|
||||
|
||||
Loop: xrpld.overlay xrpld.rpc
|
||||
xrpld.rpc ~= xrpld.overlay
|
||||
|
||||
Loop: xrpld.perflog xrpld.rpc
|
||||
xrpld.rpc ~= xrpld.perflog
|
||||
|
||||
203
.github/scripts/levelization/results/ordering.txt
vendored
Normal file
203
.github/scripts/levelization/results/ordering.txt
vendored
Normal file
@@ -0,0 +1,203 @@
|
||||
libxrpl.basics > xrpl.basics
|
||||
libxrpl.crypto > xrpl.basics
|
||||
libxrpl.json > xrpl.basics
|
||||
libxrpl.json > xrpl.json
|
||||
libxrpl.ledger > xrpl.basics
|
||||
libxrpl.ledger > xrpl.json
|
||||
libxrpl.ledger > xrpl.ledger
|
||||
libxrpl.ledger > xrpl.protocol
|
||||
libxrpl.net > xrpl.basics
|
||||
libxrpl.net > xrpl.net
|
||||
libxrpl.protocol > xrpl.basics
|
||||
libxrpl.protocol > xrpl.json
|
||||
libxrpl.protocol > xrpl.protocol
|
||||
libxrpl.resource > xrpl.basics
|
||||
libxrpl.resource > xrpl.json
|
||||
libxrpl.resource > xrpl.resource
|
||||
libxrpl.server > xrpl.basics
|
||||
libxrpl.server > xrpl.json
|
||||
libxrpl.server > xrpl.protocol
|
||||
libxrpl.server > xrpl.server
|
||||
test.app > test.jtx
|
||||
test.app > test.rpc
|
||||
test.app > test.toplevel
|
||||
test.app > test.unit_test
|
||||
test.app > xrpl.basics
|
||||
test.app > xrpld.app
|
||||
test.app > xrpld.core
|
||||
test.app > xrpld.nodestore
|
||||
test.app > xrpld.overlay
|
||||
test.app > xrpld.rpc
|
||||
test.app > xrpl.json
|
||||
test.app > xrpl.ledger
|
||||
test.app > xrpl.protocol
|
||||
test.app > xrpl.resource
|
||||
test.basics > test.jtx
|
||||
test.basics > test.unit_test
|
||||
test.basics > xrpl.basics
|
||||
test.basics > xrpld.perflog
|
||||
test.basics > xrpld.rpc
|
||||
test.basics > xrpl.json
|
||||
test.basics > xrpl.protocol
|
||||
test.beast > xrpl.basics
|
||||
test.conditions > xrpl.basics
|
||||
test.conditions > xrpld.conditions
|
||||
test.consensus > test.csf
|
||||
test.consensus > test.toplevel
|
||||
test.consensus > test.unit_test
|
||||
test.consensus > xrpl.basics
|
||||
test.consensus > xrpld.app
|
||||
test.consensus > xrpld.consensus
|
||||
test.consensus > xrpl.json
|
||||
test.consensus > xrpl.ledger
|
||||
test.core > test.jtx
|
||||
test.core > test.toplevel
|
||||
test.core > test.unit_test
|
||||
test.core > xrpl.basics
|
||||
test.core > xrpld.core
|
||||
test.core > xrpld.perflog
|
||||
test.core > xrpl.json
|
||||
test.core > xrpl.server
|
||||
test.csf > xrpl.basics
|
||||
test.csf > xrpld.consensus
|
||||
test.csf > xrpl.json
|
||||
test.csf > xrpl.protocol
|
||||
test.json > test.jtx
|
||||
test.json > xrpl.json
|
||||
test.jtx > xrpl.basics
|
||||
test.jtx > xrpld.app
|
||||
test.jtx > xrpld.core
|
||||
test.jtx > xrpld.rpc
|
||||
test.jtx > xrpl.json
|
||||
test.jtx > xrpl.ledger
|
||||
test.jtx > xrpl.net
|
||||
test.jtx > xrpl.protocol
|
||||
test.jtx > xrpl.resource
|
||||
test.jtx > xrpl.server
|
||||
test.ledger > test.jtx
|
||||
test.ledger > test.toplevel
|
||||
test.ledger > xrpl.basics
|
||||
test.ledger > xrpld.app
|
||||
test.ledger > xrpld.core
|
||||
test.ledger > xrpl.ledger
|
||||
test.ledger > xrpl.protocol
|
||||
test.nodestore > test.jtx
|
||||
test.nodestore > test.toplevel
|
||||
test.nodestore > test.unit_test
|
||||
test.nodestore > xrpl.basics
|
||||
test.nodestore > xrpld.core
|
||||
test.nodestore > xrpld.nodestore
|
||||
test.nodestore > xrpld.unity
|
||||
test.overlay > test.jtx
|
||||
test.overlay > test.toplevel
|
||||
test.overlay > test.unit_test
|
||||
test.overlay > xrpl.basics
|
||||
test.overlay > xrpld.app
|
||||
test.overlay > xrpld.overlay
|
||||
test.overlay > xrpld.peerfinder
|
||||
test.overlay > xrpld.shamap
|
||||
test.overlay > xrpl.protocol
|
||||
test.peerfinder > test.beast
|
||||
test.peerfinder > test.unit_test
|
||||
test.peerfinder > xrpl.basics
|
||||
test.peerfinder > xrpld.core
|
||||
test.peerfinder > xrpld.peerfinder
|
||||
test.peerfinder > xrpl.protocol
|
||||
test.protocol > test.toplevel
|
||||
test.protocol > xrpl.basics
|
||||
test.protocol > xrpl.json
|
||||
test.protocol > xrpl.protocol
|
||||
test.resource > test.unit_test
|
||||
test.resource > xrpl.basics
|
||||
test.resource > xrpl.resource
|
||||
test.rpc > test.jtx
|
||||
test.rpc > test.toplevel
|
||||
test.rpc > xrpl.basics
|
||||
test.rpc > xrpld.app
|
||||
test.rpc > xrpld.core
|
||||
test.rpc > xrpld.overlay
|
||||
test.rpc > xrpld.rpc
|
||||
test.rpc > xrpl.json
|
||||
test.rpc > xrpl.protocol
|
||||
test.rpc > xrpl.resource
|
||||
test.server > test.jtx
|
||||
test.server > test.toplevel
|
||||
test.server > test.unit_test
|
||||
test.server > xrpl.basics
|
||||
test.server > xrpld.app
|
||||
test.server > xrpld.core
|
||||
test.server > xrpld.rpc
|
||||
test.server > xrpl.json
|
||||
test.server > xrpl.server
|
||||
test.shamap > test.unit_test
|
||||
test.shamap > xrpl.basics
|
||||
test.shamap > xrpld.nodestore
|
||||
test.shamap > xrpld.shamap
|
||||
test.shamap > xrpl.protocol
|
||||
test.toplevel > test.csf
|
||||
test.toplevel > xrpl.json
|
||||
test.unit_test > xrpl.basics
|
||||
tests.libxrpl > xrpl.basics
|
||||
tests.libxrpl > xrpl.net
|
||||
xrpl.json > xrpl.basics
|
||||
xrpl.ledger > xrpl.basics
|
||||
xrpl.ledger > xrpl.protocol
|
||||
xrpl.net > xrpl.basics
|
||||
xrpl.protocol > xrpl.basics
|
||||
xrpl.protocol > xrpl.json
|
||||
xrpl.resource > xrpl.basics
|
||||
xrpl.resource > xrpl.json
|
||||
xrpl.resource > xrpl.protocol
|
||||
xrpl.server > xrpl.basics
|
||||
xrpl.server > xrpl.json
|
||||
xrpl.server > xrpl.protocol
|
||||
xrpld.app > test.unit_test
|
||||
xrpld.app > xrpl.basics
|
||||
xrpld.app > xrpld.conditions
|
||||
xrpld.app > xrpld.consensus
|
||||
xrpld.app > xrpld.nodestore
|
||||
xrpld.app > xrpld.perflog
|
||||
xrpld.app > xrpl.json
|
||||
xrpld.app > xrpl.ledger
|
||||
xrpld.app > xrpl.net
|
||||
xrpld.app > xrpl.protocol
|
||||
xrpld.app > xrpl.resource
|
||||
xrpld.conditions > xrpl.basics
|
||||
xrpld.conditions > xrpl.protocol
|
||||
xrpld.consensus > xrpl.basics
|
||||
xrpld.consensus > xrpl.json
|
||||
xrpld.consensus > xrpl.protocol
|
||||
xrpld.core > xrpl.basics
|
||||
xrpld.core > xrpl.json
|
||||
xrpld.core > xrpl.net
|
||||
xrpld.core > xrpl.protocol
|
||||
xrpld.nodestore > xrpl.basics
|
||||
xrpld.nodestore > xrpld.core
|
||||
xrpld.nodestore > xrpld.unity
|
||||
xrpld.nodestore > xrpl.json
|
||||
xrpld.nodestore > xrpl.protocol
|
||||
xrpld.overlay > xrpl.basics
|
||||
xrpld.overlay > xrpld.core
|
||||
xrpld.overlay > xrpld.peerfinder
|
||||
xrpld.overlay > xrpld.perflog
|
||||
xrpld.overlay > xrpl.json
|
||||
xrpld.overlay > xrpl.protocol
|
||||
xrpld.overlay > xrpl.resource
|
||||
xrpld.overlay > xrpl.server
|
||||
xrpld.peerfinder > xrpl.basics
|
||||
xrpld.peerfinder > xrpld.core
|
||||
xrpld.peerfinder > xrpl.protocol
|
||||
xrpld.perflog > xrpl.basics
|
||||
xrpld.perflog > xrpl.json
|
||||
xrpld.rpc > xrpl.basics
|
||||
xrpld.rpc > xrpld.core
|
||||
xrpld.rpc > xrpld.nodestore
|
||||
xrpld.rpc > xrpl.json
|
||||
xrpld.rpc > xrpl.ledger
|
||||
xrpld.rpc > xrpl.net
|
||||
xrpld.rpc > xrpl.protocol
|
||||
xrpld.rpc > xrpl.resource
|
||||
xrpld.rpc > xrpl.server
|
||||
xrpld.shamap > xrpl.basics
|
||||
xrpld.shamap > xrpld.nodestore
|
||||
xrpld.shamap > xrpl.protocol
|
||||
197
.github/scripts/strategy-matrix/generate.py
vendored
Executable file
197
.github/scripts/strategy-matrix/generate.py
vendored
Executable file
@@ -0,0 +1,197 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import itertools
|
||||
import json
|
||||
from pathlib import Path
|
||||
from dataclasses import dataclass
|
||||
|
||||
THIS_DIR = Path(__file__).parent.resolve()
|
||||
|
||||
@dataclass
|
||||
class Config:
|
||||
architecture: list[dict]
|
||||
os: list[dict]
|
||||
build_type: list[str]
|
||||
cmake_args: list[str]
|
||||
|
||||
'''
|
||||
Generate a strategy matrix for GitHub Actions CI.
|
||||
|
||||
On each PR commit we will build a selection of Debian, RHEL, Ubuntu, MacOS, and
|
||||
Windows configurations, while upon merge into the develop, release, or master
|
||||
branches, we will build all configurations, and test most of them.
|
||||
|
||||
We will further set additional CMake arguments as follows:
|
||||
- All builds will have the `tests`, `werr`, and `xrpld` options.
|
||||
- All builds will have the `wextra` option except for GCC 12 and Clang 16.
|
||||
- All release builds will have the `assert` option.
|
||||
- Certain Debian Bookworm configurations will change the reference fee, enable
|
||||
codecov, and enable voidstar in PRs.
|
||||
'''
|
||||
def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
configurations = []
|
||||
for architecture, os, build_type, cmake_args in itertools.product(config.architecture, config.os, config.build_type, config.cmake_args):
|
||||
# The default CMake target is 'all' for Linux and MacOS and 'install'
|
||||
# for Windows, but it can get overridden for certain configurations.
|
||||
cmake_target = 'install' if os["distro_name"] == 'windows' else 'all'
|
||||
|
||||
# We build and test all configurations by default, except for Windows in
|
||||
# Debug, because it is too slow, as well as when code coverage is
|
||||
# enabled as that mode already runs the tests.
|
||||
build_only = False
|
||||
if os['distro_name'] == 'windows' and build_type == 'Debug':
|
||||
build_only = True
|
||||
|
||||
# Only generate a subset of configurations in PRs.
|
||||
if not all:
|
||||
# Debian:
|
||||
# - Bookworm using GCC 13: Release and Unity on linux/amd64, set
|
||||
# the reference fee to 500.
|
||||
# - Bookworm using GCC 15: Debug and no Unity on linux/amd64, enable
|
||||
# code coverage (which will be done below).
|
||||
# - Bookworm using Clang 16: Debug and no Unity on linux/arm64,
|
||||
# enable voidstar.
|
||||
# - Bookworm using Clang 17: Release and no Unity on linux/amd64,
|
||||
# set the reference fee to 1000.
|
||||
# - Bookworm using Clang 20: Debug and Unity on linux/amd64.
|
||||
if os['distro_name'] == 'debian':
|
||||
skip = True
|
||||
if os['distro_version'] == 'bookworm':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-13' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
cmake_args = f'-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}'
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-15' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-16' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/arm64':
|
||||
cmake_args = f'-Dvoidstar=ON {cmake_args}'
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-17' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
cmake_args = f'-DUNIT_TEST_REFERENCE_FEE=1000 {cmake_args}'
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-20' and build_type == 'Debug' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# RHEL:
|
||||
# - 9 using GCC 12: Debug and Unity on linux/amd64.
|
||||
# - 10 using Clang: Release and no Unity on linux/amd64.
|
||||
if os['distro_name'] == 'rhel':
|
||||
skip = True
|
||||
if os['distro_version'] == '9':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-12' and build_type == 'Debug' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
elif os['distro_version'] == '10':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-any' and build_type == 'Release' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# Ubuntu:
|
||||
# - Jammy using GCC 12: Debug and no Unity on linux/arm64.
|
||||
# - Noble using GCC 14: Release and Unity on linux/amd64.
|
||||
# - Noble using Clang 18: Debug and no Unity on linux/amd64.
|
||||
# - Noble using Clang 19: Release and Unity on linux/arm64.
|
||||
if os['distro_name'] == 'ubuntu':
|
||||
skip = True
|
||||
if os['distro_version'] == 'jammy':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-12' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/arm64':
|
||||
skip = False
|
||||
elif os['distro_version'] == 'noble':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-14' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-18' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-19' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/arm64':
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# MacOS:
|
||||
# - Debug and no Unity on macos/arm64.
|
||||
if os['distro_name'] == 'macos' and not (build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'macos/arm64'):
|
||||
continue
|
||||
|
||||
# Windows:
|
||||
# - Release and Unity on windows/amd64.
|
||||
if os['distro_name'] == 'windows' and not (build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'windows/amd64'):
|
||||
continue
|
||||
|
||||
|
||||
# Additional CMake arguments.
|
||||
cmake_args = f'{cmake_args} -Dtests=ON -Dwerr=ON -Dxrpld=ON'
|
||||
if not f'{os['compiler_name']}-{os['compiler_version']}' in ['gcc-12', 'clang-16']:
|
||||
cmake_args = f'{cmake_args} -Dwextra=ON'
|
||||
if build_type == 'Release':
|
||||
cmake_args = f'{cmake_args} -Dassert=ON'
|
||||
|
||||
# We skip all RHEL on arm64 due to a build failure that needs further
|
||||
# investigation.
|
||||
if os['distro_name'] == 'rhel' and architecture['platform'] == 'linux/arm64':
|
||||
continue
|
||||
|
||||
# We skip all clang-20 on arm64 due to boost 1.86 build error
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-20' and architecture['platform'] == 'linux/arm64':
|
||||
continue
|
||||
|
||||
# Enable code coverage for Debian Bookworm using GCC 15 in Debug and no
|
||||
# Unity on linux/amd64
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-15' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
cmake_args = f'-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0 {cmake_args}'
|
||||
cmake_target = 'coverage'
|
||||
build_only = True
|
||||
|
||||
# Generate a unique name for the configuration, e.g. macos-arm64-debug
|
||||
# or debian-bookworm-gcc-12-amd64-release-unity.
|
||||
config_name = os['distro_name']
|
||||
if (n := os['distro_version']) != '':
|
||||
config_name += f'-{n}'
|
||||
if (n := os['compiler_name']) != '':
|
||||
config_name += f'-{n}'
|
||||
if (n := os['compiler_version']) != '':
|
||||
config_name += f'-{n}'
|
||||
config_name += f'-{architecture['platform'][architecture['platform'].find('/')+1:]}'
|
||||
config_name += f'-{build_type.lower()}'
|
||||
if '-Dunity=ON' in cmake_args:
|
||||
config_name += '-unity'
|
||||
|
||||
# Add the configuration to the list, with the most unique fields first,
|
||||
# so that they are easier to identify in the GitHub Actions UI, as long
|
||||
# names get truncated.
|
||||
configurations.append({
|
||||
'config_name': config_name,
|
||||
'cmake_args': cmake_args,
|
||||
'cmake_target': cmake_target,
|
||||
'build_only': build_only,
|
||||
'build_type': build_type,
|
||||
'os': os,
|
||||
'architecture': architecture,
|
||||
})
|
||||
|
||||
return configurations
|
||||
|
||||
|
||||
def read_config(file: Path) -> Config:
|
||||
config = json.loads(file.read_text())
|
||||
if config['architecture'] is None or config['os'] is None or config['build_type'] is None or config['cmake_args'] is None:
|
||||
raise Exception('Invalid configuration file.')
|
||||
|
||||
return Config(**config)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-a', '--all', help='Set to generate all configurations (generally used when merging a PR) or leave unset to generate a subset of configurations (generally used when committing to a PR).', action="store_true")
|
||||
parser.add_argument('-c', '--config', help='Path to the JSON file containing the strategy matrix configurations.', required=False, type=Path)
|
||||
args = parser.parse_args()
|
||||
|
||||
matrix = []
|
||||
if args.config is None or args.config == '':
|
||||
matrix += generate_strategy_matrix(args.all, read_config(THIS_DIR / "linux.json"))
|
||||
matrix += generate_strategy_matrix(args.all, read_config(THIS_DIR / "macos.json"))
|
||||
matrix += generate_strategy_matrix(args.all, read_config(THIS_DIR / "windows.json"))
|
||||
else:
|
||||
matrix += generate_strategy_matrix(args.all, read_config(args.config))
|
||||
|
||||
# Generate the strategy matrix.
|
||||
print(f'matrix={json.dumps({"include": matrix})}')
|
||||
184
.github/scripts/strategy-matrix/linux.json
vendored
Normal file
184
.github/scripts/strategy-matrix/linux.json
vendored
Normal file
@@ -0,0 +1,184 @@
|
||||
{
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "linux/amd64",
|
||||
"runner": ["self-hosted", "Linux", "X64", "heavy"]
|
||||
},
|
||||
{
|
||||
"platform": "linux/arm64",
|
||||
"runner": ["self-hosted", "Linux", "ARM64", "heavy-arm64"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "15",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "20",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "8",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "8",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "10e69b4"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "jammy",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18",
|
||||
"image_sha": "6948666"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19",
|
||||
"image_sha": "6948666"
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
|
||||
}
|
||||
22
.github/scripts/strategy-matrix/macos.json
vendored
Normal file
22
.github/scripts/strategy-matrix/macos.json
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "macos/arm64",
|
||||
"runner": ["self-hosted", "macOS", "ARM64", "mac-runner-m1"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
{
|
||||
"distro_name": "macos",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": "",
|
||||
"image_sha": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": [
|
||||
"-Dunity=OFF -DCMAKE_POLICY_VERSION_MINIMUM=3.5",
|
||||
"-Dunity=ON -DCMAKE_POLICY_VERSION_MINIMUM=3.5"
|
||||
]
|
||||
}
|
||||
19
.github/scripts/strategy-matrix/windows.json
vendored
Normal file
19
.github/scripts/strategy-matrix/windows.json
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "windows/amd64",
|
||||
"runner": ["self-hosted", "Windows", "devbox"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
{
|
||||
"distro_name": "windows",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": "",
|
||||
"image_sha": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
|
||||
}
|
||||
61
.github/workflows/clang-format.yml
vendored
61
.github/workflows/clang-format.yml
vendored
@@ -1,61 +0,0 @@
|
||||
name: clang-format
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
CLANG_VERSION: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install clang-format
|
||||
run: |
|
||||
codename=$( lsb_release --codename --short )
|
||||
sudo tee /etc/apt/sources.list.d/llvm.list >/dev/null <<EOF
|
||||
deb http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
||||
deb-src http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
||||
EOF
|
||||
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add
|
||||
sudo apt-get update
|
||||
sudo apt-get install clang-format-${CLANG_VERSION}
|
||||
- name: Format src/ripple
|
||||
run: find src/ripple -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -print0 | xargs -0 clang-format-${CLANG_VERSION} -i
|
||||
- name: Format src/test
|
||||
run: find src/test -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -print0 | xargs -0 clang-format-${CLANG_VERSION} -i
|
||||
- name: Check for differences
|
||||
id: assert
|
||||
run: |
|
||||
set -o pipefail
|
||||
git diff --exit-code | tee "clang-format.patch"
|
||||
- name: Upload patch
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
uses: actions/upload-artifact@v3
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: clang-format.patch
|
||||
if-no-files-found: ignore
|
||||
path: clang-format.patch
|
||||
- name: What happened?
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
env:
|
||||
PREAMBLE: |
|
||||
If you are reading this, you are looking at a failed Github Actions
|
||||
job. That means you pushed one or more files that did not conform
|
||||
to the formatting specified in .clang-format. That may be because
|
||||
you neglected to run 'git clang-format' or 'clang-format' before
|
||||
committing, or that your version of clang-format has an
|
||||
incompatibility with the one on this
|
||||
machine, which is:
|
||||
SUGGESTION: |
|
||||
|
||||
To fix it, you can do one of two things:
|
||||
1. Download and apply the patch generated as an artifact of this
|
||||
job to your repo, commit, and push.
|
||||
2. Run 'git-clang-format --extensions c,cpp,h,cxx,ipp develop'
|
||||
in your repo, commit, and push.
|
||||
run: |
|
||||
echo "${PREAMBLE}"
|
||||
clang-format-${CLANG_VERSION} --version
|
||||
echo "${SUGGESTION}"
|
||||
exit 1
|
||||
36
.github/workflows/doxygen.yml
vendored
36
.github/workflows/doxygen.yml
vendored
@@ -1,36 +0,0 @@
|
||||
name: Build and publish Doxygen documentation
|
||||
# To test this workflow, push your changes to your fork's `develop` branch.
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
container: rippleci/rippled-build-ubuntu:aaf5e3e
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: check environment
|
||||
run: |
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
cmake --version
|
||||
doxygen --version
|
||||
env | sort
|
||||
- name: build
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -Donly_docs=TRUE ..
|
||||
cmake --build . --target docs --parallel $(nproc)
|
||||
- name: publish
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: build/docs/html
|
||||
49
.github/workflows/levelization.yml
vendored
49
.github/workflows/levelization.yml
vendored
@@ -1,49 +0,0 @@
|
||||
name: levelization
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CLANG_VERSION: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Check levelization
|
||||
run: Builds/levelization/levelization.sh
|
||||
- name: Check for differences
|
||||
id: assert
|
||||
run: |
|
||||
set -o pipefail
|
||||
git diff --exit-code | tee "levelization.patch"
|
||||
- name: Upload patch
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
uses: actions/upload-artifact@v3
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: levelization.patch
|
||||
if-no-files-found: ignore
|
||||
path: levelization.patch
|
||||
- name: What happened?
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
env:
|
||||
MESSAGE: |
|
||||
If you are reading this, you are looking at a failed Github
|
||||
Actions job. That means you changed the dependency relationships
|
||||
between the modules in rippled. That may be an improvement or a
|
||||
regression. This check doesn't judge.
|
||||
|
||||
A rule of thumb, though, is that if your changes caused
|
||||
something to be removed from loops.txt, that's probably an
|
||||
improvement. If something was added, it's probably a regression.
|
||||
|
||||
To fix it, you can do one of two things:
|
||||
1. Download and apply the patch generated as an artifact of this
|
||||
job to your repo, commit, and push.
|
||||
2. Run './Builds/levelization/levelization.sh' in your repo,
|
||||
commit, and push.
|
||||
|
||||
See Builds/levelization/README.md for more info.
|
||||
run: |
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
71
.github/workflows/macos.yml
vendored
71
.github/workflows/macos.yml
vendored
@@ -1,71 +0,0 @@
|
||||
name: macos
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
# If the branches list is ever changed, be sure to change it on all
|
||||
# build/test jobs (nix, macos, windows)
|
||||
branches:
|
||||
# Always build the package branches
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
# Branches that opt-in to running
|
||||
- 'ci/**'
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
test:
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- macos
|
||||
generator:
|
||||
- Ninja
|
||||
configuration:
|
||||
- Release
|
||||
runs-on: [self-hosted, macOS]
|
||||
env:
|
||||
# The `build` action requires these variables.
|
||||
build_dir: .build
|
||||
NUM_PROCESSORS: 12
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: install Conan
|
||||
run: |
|
||||
brew install conan@1
|
||||
echo '/opt/homebrew/opt/conan@1/bin' >> $GITHUB_PATH
|
||||
- name: install Ninja
|
||||
if: matrix.generator == 'Ninja'
|
||||
run: brew install ninja
|
||||
- name: check environment
|
||||
run: |
|
||||
env | sort
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
python --version
|
||||
conan --version
|
||||
cmake --version
|
||||
- name: configure Conan
|
||||
run : |
|
||||
conan profile new default --detect || true
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
|
||||
- name: build dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
env:
|
||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: build
|
||||
uses: ./.github/actions/build
|
||||
with:
|
||||
generator: ${{ matrix.generator }}
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: test
|
||||
run: |
|
||||
${build_dir}/rippled --unittest
|
||||
234
.github/workflows/nix.yml
vendored
234
.github/workflows/nix.yml
vendored
@@ -1,234 +0,0 @@
|
||||
name: nix
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
# If the branches list is ever changed, be sure to change it on all
|
||||
# build/test jobs (nix, macos, windows)
|
||||
branches:
|
||||
# Always build the package branches
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
# Branches that opt-in to running
|
||||
- 'ci/**'
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# This workflow has two job matrixes.
|
||||
# They can be considered phases because the second matrix ("test")
|
||||
# depends on the first ("dependencies").
|
||||
#
|
||||
# The first phase has a job in the matrix for each combination of
|
||||
# variables that affects dependency ABI:
|
||||
# platform, compiler, and configuration.
|
||||
# It creates a GitHub artifact holding the Conan profile,
|
||||
# and builds and caches binaries for all the dependencies.
|
||||
# If an Artifactory remote is configured, they are cached there.
|
||||
# If not, they are added to the GitHub artifact.
|
||||
# GitHub's "cache" action has a size limit (10 GB) that is too small
|
||||
# to hold the binaries if they are built locally.
|
||||
# We must use the "{upload,download}-artifact" actions instead.
|
||||
#
|
||||
# The second phase has a job in the matrix for each test configuration.
|
||||
# It installs dependency binaries from the cache, whichever was used,
|
||||
# and builds and tests rippled.
|
||||
|
||||
jobs:
|
||||
|
||||
dependencies:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux
|
||||
compiler:
|
||||
- gcc
|
||||
- clang
|
||||
configuration:
|
||||
- Debug
|
||||
- Release
|
||||
include:
|
||||
- compiler: gcc
|
||||
profile:
|
||||
version: 11
|
||||
cc: /usr/bin/gcc
|
||||
cxx: /usr/bin/g++
|
||||
- compiler: clang
|
||||
profile:
|
||||
version: 14
|
||||
cc: /usr/bin/clang-14
|
||||
cxx: /usr/bin/clang++-14
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: rippleci/rippled-build-ubuntu:aaf5e3e
|
||||
env:
|
||||
build_dir: .build
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: check environment
|
||||
run: |
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
conan --version
|
||||
cmake --version
|
||||
env | sort
|
||||
- name: configure Conan
|
||||
run: |
|
||||
conan profile new default --detect
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
conan profile update settings.compiler=${{ matrix.compiler }} default
|
||||
conan profile update settings.compiler.version=${{ matrix.profile.version }} default
|
||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
||||
conan profile update env.CC=${{ matrix.profile.cc }} default
|
||||
conan profile update env.CXX=${{ matrix.profile.cxx }} default
|
||||
conan profile update conf.tools.build:compiler_executables='{"c": "${{ matrix.profile.cc }}", "cpp": "${{ matrix.profile.cxx }}"}' default
|
||||
- name: archive profile
|
||||
# Create this archive before dependencies are added to the local cache.
|
||||
run: tar -czf conan.tar -C ~/.conan .
|
||||
- name: build dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
env:
|
||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: upload archive
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
|
||||
path: conan.tar
|
||||
if-no-files-found: error
|
||||
|
||||
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux
|
||||
compiler:
|
||||
- gcc
|
||||
- clang
|
||||
configuration:
|
||||
- Debug
|
||||
- Release
|
||||
cmake-args:
|
||||
-
|
||||
- "-Dunity=ON"
|
||||
needs: dependencies
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: rippleci/rippled-build-ubuntu:aaf5e3e
|
||||
env:
|
||||
build_dir: .build
|
||||
steps:
|
||||
- name: download cache
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
|
||||
- name: extract cache
|
||||
run: |
|
||||
mkdir -p ~/.conan
|
||||
tar -xzf conan.tar -C ~/.conan
|
||||
- name: check environment
|
||||
run: |
|
||||
env | sort
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
conan --version
|
||||
cmake --version
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
env:
|
||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: build
|
||||
uses: ./.github/actions/build
|
||||
with:
|
||||
generator: Ninja
|
||||
configuration: ${{ matrix.configuration }}
|
||||
cmake-args: ${{ matrix.cmake-args }}
|
||||
- name: test
|
||||
run: |
|
||||
${build_dir}/rippled --unittest --unittest-jobs $(nproc)
|
||||
|
||||
|
||||
coverage:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux
|
||||
compiler:
|
||||
- gcc
|
||||
configuration:
|
||||
- Debug
|
||||
needs: dependencies
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: rippleci/rippled-build-ubuntu:aaf5e3e
|
||||
env:
|
||||
build_dir: .build
|
||||
steps:
|
||||
- name: download cache
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
|
||||
- name: extract cache
|
||||
run: |
|
||||
mkdir -p ~/.conan
|
||||
tar -xzf conan.tar -C ~/.conan
|
||||
- name: install gcovr
|
||||
run: pip install "gcovr>=7,<8"
|
||||
- name: check environment
|
||||
run: |
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
conan --version
|
||||
cmake --version
|
||||
gcovr --version
|
||||
env | sort
|
||||
ls ~/.conan
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
env:
|
||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: build
|
||||
uses: ./.github/actions/build
|
||||
with:
|
||||
generator: Ninja
|
||||
configuration: ${{ matrix.configuration }}
|
||||
cmake-args: >-
|
||||
-Dcoverage=ON
|
||||
-Dcoverage_format=xml
|
||||
-DCODE_COVERAGE_VERBOSE=ON
|
||||
-DCMAKE_CXX_FLAGS="-O0"
|
||||
-DCMAKE_C_FLAGS="-O0"
|
||||
cmake-target: coverage
|
||||
- name: move coverage report
|
||||
shell: bash
|
||||
run: |
|
||||
mv "${build_dir}/coverage.xml" ./
|
||||
- name: archive coverage report
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: coverage.xml
|
||||
path: coverage.xml
|
||||
retention-days: 30
|
||||
- name: upload coverage report
|
||||
uses: wandalen/wretry.action@v1.4.10
|
||||
with:
|
||||
action: codecov/codecov-action@v4.3.0
|
||||
with: |
|
||||
files: coverage.xml
|
||||
fail_ci_if_error: true
|
||||
disable_search: true
|
||||
verbose: true
|
||||
plugin: noop
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
attempt_limit: 5
|
||||
attempt_delay: 210000 # in milliseconds
|
||||
133
.github/workflows/on-pr.yml
vendored
Normal file
133
.github/workflows/on-pr.yml
vendored
Normal file
@@ -0,0 +1,133 @@
|
||||
# This workflow runs all workflows to check, build and test the project on
|
||||
# various Linux flavors, as well as on MacOS and Windows, on every push to a
|
||||
# user branch. However, it will not run if the pull request is a draft unless it
|
||||
# has the 'DraftRunCI' label.
|
||||
name: PR
|
||||
|
||||
on:
|
||||
merge_group:
|
||||
types:
|
||||
- checks_requested
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
- ready_for_review
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# This job determines whether the rest of the workflow should run. It runs
|
||||
# when the PR is not a draft (which should also cover merge-group) or
|
||||
# has the 'DraftRunCI' label.
|
||||
should-run:
|
||||
if: ${{ !github.event.pull_request.draft || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Determine changed files
|
||||
# This step checks whether any files have changed that should
|
||||
# cause the next jobs to run. We do it this way rather than
|
||||
# using `paths` in the `on:` section, because all required
|
||||
# checks must pass, even for changes that do not modify anything
|
||||
# that affects those checks. We would therefore like to make the
|
||||
# checks required only if the job runs, but GitHub does not
|
||||
# support that directly. By always executing the workflow on new
|
||||
# commits and by using the changed-files action below, we ensure
|
||||
# that Github considers any skipped jobs to have passed, and in
|
||||
# turn the required checks as well.
|
||||
id: changes
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
# These paths are unique to `on-pr.yml`.
|
||||
.github/scripts/levelization/**
|
||||
.github/workflows/reusable-check-levelization.yml
|
||||
.github/workflows/reusable-notify-clio.yml
|
||||
.github/workflows/on-pr.yml
|
||||
|
||||
# Keep the paths below in sync with those in `on-trigger.yml`.
|
||||
.github/actions/build-deps/**
|
||||
.github/actions/build-test/**
|
||||
.github/actions/setup-conan/**
|
||||
.github/scripts/strategy-matrix/**
|
||||
.github/workflows/reusable-build.yml
|
||||
.github/workflows/reusable-build-test-config.yml
|
||||
.github/workflows/reusable-build-test.yml
|
||||
.github/workflows/reusable-strategy-matrix.yml
|
||||
.github/workflows/reusable-test.yml
|
||||
.codecov.yml
|
||||
cmake/**
|
||||
conan/**
|
||||
external/**
|
||||
include/**
|
||||
src/**
|
||||
tests/**
|
||||
CMakeLists.txt
|
||||
conanfile.py
|
||||
conan.lock
|
||||
- name: Check whether to run
|
||||
# This step determines whether the rest of the workflow should
|
||||
# run. The rest of the workflow will run if this job runs AND at
|
||||
# least one of:
|
||||
# * Any of the files checked in the `changes` step were modified
|
||||
# * The PR is NOT a draft and is labeled "Ready to merge"
|
||||
# * The workflow is running from the merge queue
|
||||
id: go
|
||||
env:
|
||||
FILES: ${{ steps.changes.outputs.any_changed }}
|
||||
DRAFT: ${{ github.event.pull_request.draft }}
|
||||
READY: ${{ contains(github.event.pull_request.labels.*.name, 'Ready to merge') }}
|
||||
MERGE: ${{ github.event_name == 'merge_group' }}
|
||||
run: |
|
||||
echo "go=${{ (env.DRAFT != 'true' && env.READY == 'true') || env.FILES == 'true' || env.MERGE == 'true' }}" >> "${GITHUB_OUTPUT}"
|
||||
cat "${GITHUB_OUTPUT}"
|
||||
outputs:
|
||||
go: ${{ steps.go.outputs.go == 'true' }}
|
||||
|
||||
check-levelization:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/reusable-check-levelization.yml
|
||||
|
||||
build-test:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/reusable-build-test.yml
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
with:
|
||||
os: ${{ matrix.os }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
notify-clio:
|
||||
needs:
|
||||
- should-run
|
||||
- build-test
|
||||
if: ${{ needs.should-run.outputs.go == 'true' && contains(fromJSON('["release", "master"]'), github.ref_name) }}
|
||||
uses: ./.github/workflows/reusable-notify-clio.yml
|
||||
secrets:
|
||||
clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
|
||||
passed:
|
||||
if: failure() || cancelled()
|
||||
needs:
|
||||
- build-test
|
||||
- check-levelization
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Fail
|
||||
run: false
|
||||
75
.github/workflows/on-trigger.yml
vendored
Normal file
75
.github/workflows/on-trigger.yml
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
# This workflow runs all workflows to build the dependencies required for the
|
||||
# project on various Linux flavors, as well as on MacOS and Windows, on a
|
||||
# scheduled basis, on merge into the 'develop', 'release', or 'master' branches,
|
||||
# or manually. The missing commits check is only run when the code is merged
|
||||
# into the 'develop' or 'release' branches, and the documentation is built when
|
||||
# the code is merged into the 'develop' branch.
|
||||
name: Trigger
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "develop"
|
||||
- "release*"
|
||||
- "master"
|
||||
paths:
|
||||
# These paths are unique to `on-trigger.yml`.
|
||||
- ".github/workflows/reusable-check-missing-commits.yml"
|
||||
- ".github/workflows/on-trigger.yml"
|
||||
- ".github/workflows/publish-docs.yml"
|
||||
|
||||
# Keep the paths below in sync with those in `on-pr.yml`.
|
||||
- ".github/actions/build-deps/**"
|
||||
- ".github/actions/build-test/**"
|
||||
- ".github/actions/setup-conan/**"
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
- ".github/workflows/reusable-build.yml"
|
||||
- ".github/workflows/reusable-build-test-config.yml"
|
||||
- ".github/workflows/reusable-build-test.yml"
|
||||
- ".github/workflows/reusable-strategy-matrix.yml"
|
||||
- ".github/workflows/reusable-test.yml"
|
||||
- ".codecov.yml"
|
||||
- "cmake/**"
|
||||
- "conan/**"
|
||||
- "external/**"
|
||||
- "include/**"
|
||||
- "src/**"
|
||||
- "tests/**"
|
||||
- "CMakeLists.txt"
|
||||
- "conanfile.py"
|
||||
- "conan.lock"
|
||||
|
||||
# Run at 06:32 UTC on every day of the week from Monday through Friday. This
|
||||
# will force all dependencies to be rebuilt, which is useful to verify that
|
||||
# all dependencies can be built successfully. Only the dependencies that
|
||||
# are actually missing from the remote will be uploaded.
|
||||
schedule:
|
||||
- cron: "32 6 * * 1-5"
|
||||
|
||||
# Run when manually triggered via the GitHub UI or API.
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
check-missing-commits:
|
||||
if: ${{ github.event_name == 'push' && github.ref_type == 'branch' && contains(fromJSON('["develop", "release"]'), github.ref_name) }}
|
||||
uses: ./.github/workflows/reusable-check-missing-commits.yml
|
||||
|
||||
build-test:
|
||||
uses: ./.github/workflows/reusable-build-test.yml
|
||||
strategy:
|
||||
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
with:
|
||||
os: ${{ matrix.os }}
|
||||
strategy_matrix: ${{ github.event_name == 'schedule' && 'all' || 'minimal' }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
15
.github/workflows/pre-commit.yml
vendored
Normal file
15
.github/workflows/pre-commit.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
name: Run pre-commit hooks
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches: [develop, release, master]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
# Call the workflow in the XRPLF/actions repo that runs the pre-commit hooks.
|
||||
run-hooks:
|
||||
uses: XRPLF/actions/.github/workflows/pre-commit.yml@34790936fae4c6c751f62ec8c06696f9c1a5753a
|
||||
with:
|
||||
runs_on: ubuntu-latest
|
||||
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-a8c7be1" }'
|
||||
60
.github/workflows/publish-docs.yml
vendored
Normal file
60
.github/workflows/publish-docs.yml
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
# This workflow builds the documentation for the repository, and publishes it to
|
||||
# GitHub Pages when changes are merged into the default branch.
|
||||
name: Build and publish documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- ".github/workflows/publish-docs.yml"
|
||||
- "*.md"
|
||||
- "**/*.md"
|
||||
- "docs/**"
|
||||
- "include/**"
|
||||
- "src/libxrpl/**"
|
||||
- "src/xrpld/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
BUILD_DIR: .build
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-documentation:sha-a8c7be1
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Check configuration
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Doxygen version.'
|
||||
doxygen --version
|
||||
- name: Build documentation
|
||||
run: |
|
||||
mkdir -p "${BUILD_DIR}"
|
||||
cd "${BUILD_DIR}"
|
||||
cmake -Donly_docs=ON ..
|
||||
cmake --build . --target docs --parallel $(nproc)
|
||||
- name: Publish documentation
|
||||
if: ${{ github.ref_type == 'branch' && github.ref_name == github.event.repository.default_branch }}
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ${{ env.BUILD_DIR }}/docs/html
|
||||
69
.github/workflows/reusable-build-test-config.yml
vendored
Normal file
69
.github/workflows/reusable-build-test-config.yml
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
name: Build and test configuration
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
type: string
|
||||
build_only:
|
||||
description: 'Whether to only build or to build and test the code ("true", "false").'
|
||||
required: true
|
||||
type: boolean
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
type: string
|
||||
required: true
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
type: string
|
||||
required: true
|
||||
|
||||
runs_on:
|
||||
description: Runner to run the job on as a JSON string
|
||||
required: true
|
||||
type: string
|
||||
image:
|
||||
description: "The image to run in (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
config_name:
|
||||
description: "The configuration string (used for naming artifacts and such)."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
uses: ./.github/workflows/reusable-build.yml
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
cmake_args: ${{ inputs.cmake_args }}
|
||||
cmake_target: ${{ inputs.cmake_target }}
|
||||
runs_on: ${{ inputs.runs_on }}
|
||||
image: ${{ inputs.image }}
|
||||
config_name: ${{ inputs.config_name }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
test:
|
||||
needs: build
|
||||
uses: ./.github/workflows/reusable-test.yml
|
||||
with:
|
||||
run_tests: ${{ !inputs.build_only }}
|
||||
verify_voidstar: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
|
||||
runs_on: ${{ inputs.runs_on }}
|
||||
image: ${{ inputs.image }}
|
||||
config_name: ${{ inputs.config_name }}
|
||||
58
.github/workflows/reusable-build-test.yml
vendored
Normal file
58
.github/workflows/reusable-build-test.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
# This workflow builds and tests the binary for various configurations.
|
||||
name: Build and test
|
||||
|
||||
# This workflow can only be triggered by other workflows. Note that the
|
||||
# workflow_call event does not support the 'choice' input type, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
|
||||
# so we use 'string' instead.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: false
|
||||
type: string
|
||||
default: ".build"
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
type: string
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
os: ${{ inputs.os }}
|
||||
strategy_matrix: ${{ inputs.strategy_matrix }}
|
||||
|
||||
# Build and test the binary for each configuration.
|
||||
build-test-config:
|
||||
needs:
|
||||
- generate-matrix
|
||||
uses: ./.github/workflows/reusable-build-test-config.yml
|
||||
strategy:
|
||||
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_only: ${{ matrix.build_only }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_target: ${{ matrix.cmake_target }}
|
||||
runs_on: ${{ toJSON(matrix.architecture.runner) }}
|
||||
image: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || '' }}
|
||||
config_name: ${{ matrix.config_name }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
122
.github/workflows/reusable-build.yml
vendored
Normal file
122
.github/workflows/reusable-build.yml
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
name: Build rippled
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
type: string
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
type: string
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: true
|
||||
type: string
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
runs_on:
|
||||
description: Runner to run the job on as a JSON string
|
||||
required: true
|
||||
type: string
|
||||
image:
|
||||
description: "The image to run in (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
config_name:
|
||||
description: "The name of the configuration."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build ${{ inputs.config_name }}
|
||||
runs-on: ${{ fromJSON(inputs.runs_on) }}
|
||||
container: ${{ inputs.image != '' && inputs.image || null }}
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: Cleanup workspace
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@638e0dc11ea230f91bd26622fb542116bb5254d5
|
||||
with:
|
||||
disable_ccache: false
|
||||
|
||||
- name: Print build environment
|
||||
uses: ./.github/actions/print-env
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
|
||||
- name: Configure CMake
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
env:
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
CMAKE_ARGS: ${{ inputs.cmake_args }}
|
||||
run: |
|
||||
cmake \
|
||||
-G '${{ runner.os == 'Windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
|
||||
${CMAKE_ARGS} \
|
||||
..
|
||||
|
||||
- name: Build the binary
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
env:
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
CMAKE_TARGET: ${{ inputs.cmake_target }}
|
||||
run: |
|
||||
cmake \
|
||||
--build . \
|
||||
--config "${BUILD_TYPE}" \
|
||||
--parallel $(nproc) \
|
||||
--target "${CMAKE_TARGET}"
|
||||
|
||||
- name: Upload rippled artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: rippled-${{ inputs.config_name }}
|
||||
path: ${{ inputs.build_dir }}/${{ runner.os == 'Windows' && inputs.build_type || '' }}/rippled${{ runner.os == 'Windows' && '.exe' || '' }}
|
||||
retention-days: 3
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload coverage report
|
||||
if: ${{ inputs.cmake_target == 'coverage' }}
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
with:
|
||||
disable_search: true
|
||||
disable_telem: true
|
||||
fail_ci_if_error: true
|
||||
files: ${{ inputs.build_dir }}/coverage.xml
|
||||
plugins: noop
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
46
.github/workflows/reusable-check-levelization.yml
vendored
Normal file
46
.github/workflows/reusable-check-levelization.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
# This workflow checks if the dependencies between the modules are correctly
|
||||
# indexed.
|
||||
name: Check levelization
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on: workflow_call
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-levelization
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
levelization:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Check levelization
|
||||
run: .github/scripts/levelization/generate.sh
|
||||
- name: Check for differences
|
||||
env:
|
||||
MESSAGE: |
|
||||
|
||||
The dependency relationships between the modules in rippled have
|
||||
changed, which may be an improvement or a regression.
|
||||
|
||||
A rule of thumb is that if your changes caused something to be
|
||||
removed from loops.txt, it's probably an improvement, while if
|
||||
something was added, it's probably a regression.
|
||||
|
||||
Run '.github/scripts/levelization/generate.sh' in your repo, commit
|
||||
and push the changes. See .github/scripts/levelization/README.md for
|
||||
more info.
|
||||
run: |
|
||||
DIFF=$(git status --porcelain)
|
||||
if [ -n "${DIFF}" ]; then
|
||||
# Print the differences to give the contributor a hint about what to
|
||||
# expect when running levelization on their own machine.
|
||||
git diff
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
fi
|
||||
62
.github/workflows/reusable-check-missing-commits.yml
vendored
Normal file
62
.github/workflows/reusable-check-missing-commits.yml
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
# This workflow checks that all commits in the "master" branch are also in the
|
||||
# "release" and "develop" branches, and that all commits in the "release" branch
|
||||
# are also in the "develop" branch.
|
||||
name: Check for missing commits
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on: workflow_call
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-missing-commits
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check for missing commits
|
||||
env:
|
||||
MESSAGE: |
|
||||
|
||||
If you are reading this, then the commits indicated above are missing
|
||||
from the "develop" and/or "release" branch. Do a reverse-merge as soon
|
||||
as possible. See CONTRIBUTING.md for instructions.
|
||||
run: |
|
||||
set -o pipefail
|
||||
# Branches are ordered by how "canonical" they are. Every commit in one
|
||||
# branch should be in all the branches behind it.
|
||||
order=(master release develop)
|
||||
branches=()
|
||||
for branch in "${order[@]}"; do
|
||||
# Check that the branches exist so that this job will work on forked
|
||||
# repos, which don't necessarily have master and release branches.
|
||||
echo "Checking if ${branch} exists."
|
||||
if git ls-remote --exit-code --heads origin \
|
||||
refs/heads/${branch} > /dev/null; then
|
||||
branches+=(origin/${branch})
|
||||
fi
|
||||
done
|
||||
|
||||
prior=()
|
||||
for branch in "${branches[@]}"; do
|
||||
if [[ ${#prior[@]} -ne 0 ]]; then
|
||||
echo "Checking ${prior[@]} for commits missing from ${branch}."
|
||||
git log --oneline --no-merges "${prior[@]}" \
|
||||
^$branch | tee -a "missing-commits.txt"
|
||||
echo
|
||||
fi
|
||||
prior+=("${branch}")
|
||||
done
|
||||
|
||||
if [[ $(cat missing-commits.txt | wc -l) -ne 0 ]]; then
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
fi
|
||||
91
.github/workflows/reusable-notify-clio.yml
vendored
Normal file
91
.github/workflows/reusable-notify-clio.yml
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
# This workflow exports the built libxrpl package to the Conan remote on a
|
||||
# a channel named after the pull request, and notifies the Clio repository about
|
||||
# the new version so it can check for compatibility.
|
||||
name: Notify Clio
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
conan_remote_name:
|
||||
description: "The name of the Conan remote to use."
|
||||
required: false
|
||||
type: string
|
||||
default: xrplf
|
||||
conan_remote_url:
|
||||
description: "The URL of the Conan endpoint to use."
|
||||
required: false
|
||||
type: string
|
||||
default: https://conan.ripplex.io
|
||||
secrets:
|
||||
clio_notify_token:
|
||||
description: "The GitHub token to notify Clio about new versions."
|
||||
required: true
|
||||
conan_remote_username:
|
||||
description: "The username for logging into the Conan remote."
|
||||
required: true
|
||||
conan_remote_password:
|
||||
description: "The password for logging into the Conan remote."
|
||||
required: true
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-clio
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13-sha-5dd7158
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Generate outputs
|
||||
id: generate
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run: |
|
||||
echo 'Generating user and channel.'
|
||||
echo "user=clio" >> "${GITHUB_OUTPUT}"
|
||||
echo "channel=pr_${PR_NUMBER}" >> "${GITHUB_OUTPUT}"
|
||||
echo 'Extracting version.'
|
||||
echo "version=$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')" >> "${GITHUB_OUTPUT}"
|
||||
- name: Calculate conan reference
|
||||
id: conan_ref
|
||||
run: |
|
||||
echo "conan_ref=${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}" >> "${GITHUB_OUTPUT}"
|
||||
- name: Set up Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
with:
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
- name: Log into Conan remote
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
|
||||
- name: Upload package
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
run: |
|
||||
conan export --user=${{ steps.generate.outputs.user }} --channel=${{ steps.generate.outputs.channel }} .
|
||||
conan upload --confirm --check --remote="${CONAN_REMOTE_NAME}" xrpl/${{ steps.conan_ref.outputs.conan_ref }}
|
||||
outputs:
|
||||
conan_ref: ${{ steps.conan_ref.outputs.conan_ref }}
|
||||
|
||||
notify:
|
||||
needs: upload
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Notify Clio
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.clio_notify_token }}
|
||||
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||
run: |
|
||||
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
|
||||
-F "client_payload[conan_ref]=${{ needs.upload.outputs.conan_ref }}" \
|
||||
-F "client_payload[pr_url]=${PR_URL}"
|
||||
41
.github/workflows/reusable-strategy-matrix.yml
vendored
Normal file
41
.github/workflows/reusable-strategy-matrix.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
name: Generate strategy matrix
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: false
|
||||
type: string
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
outputs:
|
||||
matrix:
|
||||
description: "The generated strategy matrix."
|
||||
value: ${{ jobs.generate-matrix.outputs.matrix }}
|
||||
|
||||
jobs:
|
||||
generate-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
matrix: ${{ steps.generate.outputs.matrix }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: 3.13
|
||||
|
||||
- name: Generate strategy matrix
|
||||
working-directory: .github/scripts/strategy-matrix
|
||||
id: generate
|
||||
env:
|
||||
GENERATE_CONFIG: ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }}
|
||||
GENERATE_OPTION: ${{ inputs.strategy_matrix == 'all' && '--all' || '' }}
|
||||
run: ./generate.py ${GENERATE_OPTION} ${GENERATE_CONFIG} >> "${GITHUB_OUTPUT}"
|
||||
70
.github/workflows/reusable-test.yml
vendored
Normal file
70
.github/workflows/reusable-test.yml
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
name: Test rippled
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
verify_voidstar:
|
||||
description: "Whether to verify the presence of voidstar instrumentation."
|
||||
required: true
|
||||
type: boolean
|
||||
run_tests:
|
||||
description: "Whether to run unit tests"
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
runs_on:
|
||||
description: Runner to run the job on as a JSON string
|
||||
required: true
|
||||
type: string
|
||||
image:
|
||||
description: "The image to run in (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
config_name:
|
||||
description: "The name of the configuration."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test ${{ inputs.config_name }}
|
||||
runs-on: ${{ fromJSON(inputs.runs_on) }}
|
||||
container: ${{ inputs.image != '' && inputs.image || null }}
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- name: Download rippled artifact
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: rippled-${{ inputs.config_name }}
|
||||
|
||||
- name: Make binary executable (Linux and macOS)
|
||||
shell: bash
|
||||
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
|
||||
run: |
|
||||
chmod +x ./rippled
|
||||
|
||||
- name: Check linking (Linux)
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
shell: bash
|
||||
run: |
|
||||
ldd ./rippled
|
||||
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
|
||||
echo 'The binary is statically linked.'
|
||||
else
|
||||
echo 'The binary is dynamically linked.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verifying presence of instrumentation
|
||||
if: ${{ inputs.verify_voidstar }}
|
||||
shell: bash
|
||||
run: |
|
||||
./rippled --version | grep libvoidstar
|
||||
|
||||
- name: Test the binary
|
||||
if: ${{ inputs.run_tests }}
|
||||
shell: bash
|
||||
run: |
|
||||
./rippled --unittest --unittest-jobs $(nproc)
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
94
.github/workflows/upload-conan-deps.yml
vendored
Normal file
94
.github/workflows/upload-conan-deps.yml
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
name: Upload Conan Dependencies
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 3 * * 2-6"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
force_source_build:
|
||||
description: "Force source build of all dependencies"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
force_upload:
|
||||
description: "Force upload of all dependencies"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
pull_request:
|
||||
branches: [develop]
|
||||
paths:
|
||||
# This allows testing changes to the upload workflow in a PR
|
||||
- .github/workflows/upload-conan-deps.yml
|
||||
push:
|
||||
branches: [develop]
|
||||
paths:
|
||||
- .github/workflows/upload-conan-deps.yml
|
||||
- .github/workflows/reusable-strategy-matrix.yml
|
||||
- .github/actions/build-deps/action.yml
|
||||
- .github/actions/setup-conan/action.yml
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
- conanfile.py
|
||||
- conan.lock
|
||||
|
||||
env:
|
||||
CONAN_REMOTE_NAME: xrplf
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
strategy_matrix: ${{ github.event_name == 'pull_request' && 'minimal' || 'all' }}
|
||||
|
||||
# Build and upload the dependencies for each configuration.
|
||||
run-upload-conan-deps:
|
||||
needs:
|
||||
- generate-matrix
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || null }}
|
||||
steps:
|
||||
- name: Cleanup workspace
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@638e0dc11ea230f91bd26622fb542116bb5254d5
|
||||
with:
|
||||
disable_ccache: false
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
with:
|
||||
conan_remote_name: ${{ env.CONAN_REMOTE_NAME }}
|
||||
conan_remote_url: ${{ env.CONAN_REMOTE_URL }}
|
||||
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: .build
|
||||
build_type: ${{ matrix.build_type }}
|
||||
force_build: ${{ github.event_name == 'schedule' || github.event.inputs.force_source_build == 'true' }}
|
||||
# The verbosity is set to "quiet" for Windows to avoid an excessive amount of logs, while it
|
||||
# is set to "verbose" otherwise to provide more information during the build process.
|
||||
verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
|
||||
|
||||
- name: Log into Conan remote
|
||||
if: ${{ github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' }}
|
||||
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}"
|
||||
|
||||
- name: Upload Conan packages
|
||||
if: ${{ github.repository_owner == 'XRPLF' && github.event_name != 'pull_request' && github.event_name != 'schedule' }}
|
||||
env:
|
||||
FORCE_OPTION: ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
|
||||
run: conan upload "*" --remote="${CONAN_REMOTE_NAME}" --confirm ${FORCE_OPTION}
|
||||
91
.github/workflows/windows.yml
vendored
91
.github/workflows/windows.yml
vendored
@@ -1,91 +0,0 @@
|
||||
name: windows
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
# If the branches list is ever changed, be sure to change it on all
|
||||
# build/test jobs (nix, macos, windows)
|
||||
branches:
|
||||
# Always build the package branches
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
# Branches that opt-in to running
|
||||
- 'ci/**'
|
||||
|
||||
# https://docs.github.com/en/actions/using-jobs/using-concurrency
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
generator:
|
||||
- Visual Studio 16 2019
|
||||
configuration:
|
||||
- Release
|
||||
# Github hosted runners tend to hang when running Debug unit tests.
|
||||
# Instead of trying to work around it, disable the Debug job until
|
||||
# something beefier (i.e. a heavy self-hosted runner) becomes
|
||||
# available.
|
||||
# - Debug
|
||||
runs-on: windows-2019
|
||||
env:
|
||||
build_dir: .build
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: choose Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: learn Python cache directory
|
||||
id: pip-cache
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
echo "dir=$(pip cache dir)" | tee ${GITHUB_OUTPUT}
|
||||
- name: restore Python cache directory
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ steps.pip-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-${{ hashFiles('.github/workflows/windows.yml') }}
|
||||
- name: install Conan
|
||||
run: pip install wheel 'conan<2'
|
||||
- name: check environment
|
||||
run: |
|
||||
dir env:
|
||||
$env:PATH -split ';'
|
||||
python --version
|
||||
conan --version
|
||||
cmake --version
|
||||
- name: configure Conan
|
||||
shell: bash
|
||||
run: |
|
||||
conan profile new default --detect
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
conan profile update settings.compiler.runtime=MT${{ matrix.configuration == 'Debug' && 'd' || '' }} default
|
||||
- name: build dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
env:
|
||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: build
|
||||
uses: ./.github/actions/build
|
||||
with:
|
||||
generator: '${{ matrix.generator }}'
|
||||
configuration: ${{ matrix.configuration }}
|
||||
# Hard code for now. Move to the matrix if varied options are needed
|
||||
cmake-args: '-Dassert=ON -Dreporting=OFF -Dunity=ON'
|
||||
cmake-target: install
|
||||
- name: test
|
||||
shell: bash
|
||||
run: |
|
||||
${build_dir}/${{ matrix.configuration }}/rippled --unittest --unittest-jobs $(nproc)
|
||||
12
.gitignore
vendored
12
.gitignore
vendored
@@ -37,10 +37,9 @@ Release/*.*
|
||||
*.gcov
|
||||
|
||||
# Levelization checking
|
||||
Builds/levelization/results/rawincludes.txt
|
||||
Builds/levelization/results/paths.txt
|
||||
Builds/levelization/results/includes/
|
||||
Builds/levelization/results/includedby/
|
||||
.github/scripts/levelization/results/*
|
||||
!.github/scripts/levelization/results/loops.txt
|
||||
!.github/scripts/levelization/results/ordering.txt
|
||||
|
||||
# Ignore tmp directory.
|
||||
tmp
|
||||
@@ -111,4 +110,7 @@ bld.rippled/
|
||||
.vscode
|
||||
|
||||
# Suggested in-tree build directory
|
||||
/.build/
|
||||
/.build*/
|
||||
|
||||
# Locally patched Conan recipes
|
||||
external/conan-center-index/
|
||||
|
||||
@@ -1,6 +1,39 @@
|
||||
# .pre-commit-config.yaml
|
||||
# To run pre-commit hooks, first install pre-commit:
|
||||
# - `pip install pre-commit==${PRE_COMMIT_VERSION}`
|
||||
#
|
||||
# Then, run the following command to install the git hook scripts:
|
||||
# - `pre-commit install`
|
||||
# You can run all configured hooks against all files with:
|
||||
# - `pre-commit run --all-files`
|
||||
# To manually run a specific hook, use:
|
||||
# - `pre-commit run <hook_id> --all-files`
|
||||
# To run the hooks against only the staged files, use:
|
||||
# - `pre-commit run`
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||
rev: v10.0.1
|
||||
hooks:
|
||||
- id: clang-format
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # frozen: v6.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: mixed-line-ending
|
||||
- id: check-merge-conflict
|
||||
args: [--assume-in-merge]
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||
rev: 7d85583be209cb547946c82fbe51f4bc5dd1d017 # frozen: v18.1.8
|
||||
hooks:
|
||||
- id: clang-format
|
||||
args: [--style=file]
|
||||
"types_or": [c++, c, proto]
|
||||
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: 5ba47274f9b181bce26a5150a725577f3c336011 # frozen: v3.6.2
|
||||
hooks:
|
||||
- id: prettier
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
external/.*|
|
||||
.github/scripts/levelization/results/.*\.txt|
|
||||
conan\.lock
|
||||
)$
|
||||
|
||||
1
.prettierignore
Normal file
1
.prettierignore
Normal file
@@ -0,0 +1 @@
|
||||
external
|
||||
200
API-CHANGELOG.md
200
API-CHANGELOG.md
@@ -8,47 +8,138 @@ The API version controls the API behavior you see. This includes what properties
|
||||
|
||||
For a log of breaking changes, see the **API Version [number]** headings. In general, breaking changes are associated with a particular API Version number. For non-breaking changes, scroll to the **XRP Ledger version [x.y.z]** headings. Non-breaking changes are associated with a particular XRP Ledger (`rippled`) release.
|
||||
|
||||
## API Version 2
|
||||
|
||||
API version 2 is available in `rippled` version 2.0.0 and later. To use this API, clients specify `"api_version" : 2` in each request.
|
||||
|
||||
#### Removed methods
|
||||
|
||||
In API version 2, the following deprecated methods are no longer available: (https://github.com/XRPLF/rippled/pull/4759)
|
||||
|
||||
- `tx_history` - Instead, use other methods such as `account_tx` or `ledger` with the `transactions` field set to `true`.
|
||||
- `ledger_header` - Instead, use the `ledger` method.
|
||||
|
||||
#### Modifications to JSON transaction element in V2
|
||||
|
||||
In API version 2, JSON elements for transaction output have been changed and made consistent for all methods which output transactions. (https://github.com/XRPLF/rippled/pull/4775)
|
||||
This helps to unify the JSON serialization format of transactions. (https://github.com/XRPLF/clio/issues/722, https://github.com/XRPLF/rippled/issues/4727)
|
||||
|
||||
- JSON transaction element is named `tx_json`
|
||||
- Binary transaction element is named `tx_blob`
|
||||
- JSON transaction metadata element is named `meta`
|
||||
- Binary transaction metadata element is named `meta_blob`
|
||||
|
||||
Additionally, these elements are now consistently available next to `tx_json` (i.e. sibling elements), where possible:
|
||||
|
||||
- `hash` - Transaction ID. This data was stored inside transaction output in API version 1, but in API version 2 is a sibling element.
|
||||
- `ledger_index` - Ledger index (only set on validated ledgers)
|
||||
- `ledger_hash` - Ledger hash (only set on closed or validated ledgers)
|
||||
- `close_time_iso` - Ledger close time expressed in ISO 8601 time format (only set on validated ledgers)
|
||||
- `validated` - Bool element set to `true` if the transaction is in a validated ledger, otherwise `false`
|
||||
|
||||
This change affects the following methods:
|
||||
|
||||
- `tx` - Transaction data moved into element `tx_json` (was inline inside `result`) or, if binary output was requested, moved from `tx` to `tx_blob`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
|
||||
- `account_tx` - Renamed transaction element from `tx` to `tx_json`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
|
||||
- `transaction_entry` - Renamed transaction metadata element from `metadata` to `meta`. Changed location of `hash` and added new elements
|
||||
- `subscribe` - Renamed transaction element from `transaction` to `tx_json`. Changed location of `hash` and added new elements
|
||||
- `sign`, `sign_for`, `submit` and `submit_multisigned` - Changed location of `hash` element.
|
||||
|
||||
#### Modification to `Payment` transaction JSON schema
|
||||
|
||||
When reading Payments, the `Amount` field should generally **not** be used. Instead, use [delivered_amount](https://xrpl.org/partial-payments.html#the-delivered_amount-field) to see the amount that the Payment delivered. To clarify its meaning, the `Amount` field is being renamed to `DeliverMax`. (https://github.com/XRPLF/rippled/pull/4733)
|
||||
|
||||
- In `Payment` transaction type, JSON RPC field `Amount` is renamed to `DeliverMax`. To enable smooth client transition, `Amount` is still handled, as described below: (https://github.com/XRPLF/rippled/pull/4733)
|
||||
- On JSON RPC input (e.g. `submit_multisigned` etc. methods), `Amount` is recognized as an alias to `DeliverMax` for both API version 1 and version 2 clients.
|
||||
- On JSON RPC input, submitting both `Amount` and `DeliverMax` fields is allowed _only_ if they are identical; otherwise such input is rejected with `rpcINVALID_PARAMS` error.
|
||||
- On JSON RPC output (e.g. `subscribe`, `account_tx` etc. methods), `DeliverMax` is present in both API version 1 and version 2.
|
||||
- On JSON RPC output, `Amount` is only present in API version 1 and _not_ in version 2.
|
||||
|
||||
#### Modifications to account_info response
|
||||
|
||||
- `signer_lists` is returned in the root of the response. (In API version 1, it was nested under `account_data`.) (https://github.com/XRPLF/rippled/pull/3770)
|
||||
- When using an invalid `signer_lists` value, the API now returns an "invalidParams" error. (https://github.com/XRPLF/rippled/pull/4585)
|
||||
- (`signer_lists` must be a boolean. In API version 1, strings were accepted and may return a normal response - i.e. as if `signer_lists` were `true`.)
|
||||
|
||||
#### Modifications to [account_tx](https://xrpl.org/account_tx.html#account_tx) response
|
||||
|
||||
- Using `ledger_index_min`, `ledger_index_max`, and `ledger_index` returns `invalidParams` because if you use `ledger_index_min` or `ledger_index_max`, then it does not make sense to also specify `ledger_index`. In API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4571)
|
||||
- The same applies for `ledger_index_min`, `ledger_index_max`, and `ledger_hash`. (https://github.com/XRPLF/rippled/issues/4545#issuecomment-1565065579)
|
||||
- Using a `ledger_index_min` or `ledger_index_max` beyond the range of ledgers that the server has:
|
||||
- returns `lgrIdxMalformed` in API version 2. Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/issues/4288)
|
||||
- Attempting to use a non-boolean value (such as a string) for the `binary` or `forward` parameters returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
|
||||
|
||||
#### Modifications to [noripple_check](https://xrpl.org/noripple_check.html#noripple_check) response
|
||||
|
||||
- Attempting to use a non-boolean value (such as a string) for the `transactions` parameter returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
|
||||
|
||||
## API Version 1
|
||||
|
||||
This version is supported by all `rippled` versions. At time of writing, it is the default API version, used when no `api_version` is specified. When a new API version is introduced, the command line interface will default to the latest API version. The command line is intended for ad-hoc usage by humans, not programs or automated scripts. The command line is not meant for use in production code.
|
||||
This version is supported by all `rippled` versions. For WebSocket and HTTP JSON-RPC requests, it is currently the default API version used when no `api_version` is specified.
|
||||
|
||||
### Idiosyncrasies
|
||||
The [commandline](https://xrpl.org/docs/references/http-websocket-apis/api-conventions/request-formatting/#commandline-format) always uses the latest API version. The command line is intended for ad-hoc usage by humans, not programs or automated scripts. The command line is not meant for use in production code.
|
||||
|
||||
#### V1 account_info response
|
||||
### Inconsistency: server_info - network_id
|
||||
|
||||
In [the response to the `account_info` command](https://xrpl.org/account_info.html#response-format), there is `account_data` - which is supposed to be an `AccountRoot` object - and `signer_lists` is returned in this object. However, the docs say that `signer_lists` should be at the root level of the reponse.
|
||||
The `network_id` field was added in the `server_info` response in version 1.5.0 (2019), but it is not returned in [reporting mode](https://xrpl.org/rippled-server-modes.html#reporting-mode). However, use of reporting mode is now discouraged, in favor of using [Clio](https://github.com/XRPLF/clio) instead.
|
||||
|
||||
It makes sense for `signer_lists` to be at the root level because signer lists are not part of the AccountRoot object. (First reported in [xrpl-dev-portal#938](https://github.com/XRPLF/xrpl-dev-portal/issues/938).)
|
||||
## XRP Ledger server version 2.5.0
|
||||
|
||||
In `api_version: 2`, the `signer_lists` field [will be moved](#modifications-to-account_info-response-in-v2) to the root level of the account_info response. (https://github.com/XRPLF/rippled/pull/3770)
|
||||
As of 2025-04-04, version 2.5.0 is in development. You can use a pre-release version by building from source or [using the `nightly` package](https://xrpl.org/docs/infrastructure/installation/install-rippled-on-ubuntu).
|
||||
|
||||
#### server_info - network_id
|
||||
### Additions and bugfixes in 2.5.0
|
||||
|
||||
The `network_id` field was added in the `server_info` response in version 1.5.0 (2019), but it is not returned in [reporting mode](https://xrpl.org/rippled-server-modes.html#reporting-mode).
|
||||
- `channel_authorize`: If `signing_support` is not enabled in the config, the RPC is disabled.
|
||||
|
||||
## XRP Ledger server version 2.4.0
|
||||
|
||||
[Version 2.4.0](https://github.com/XRPLF/rippled/releases/tag/2.4.0) was released on March 4, 2025.
|
||||
|
||||
### Additions and bugfixes in 2.4.0
|
||||
|
||||
- `ledger_entry`: `state` is added an alias for `ripple_state`.
|
||||
- `ledger_entry`: Enables case-insensitive filtering by canonical name in addition to case-sensitive filtering by RPC name.
|
||||
- `validators`: Added new field `validator_list_threshold` in response.
|
||||
- `simulate`: A new RPC that executes a [dry run of a transaction submission](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0069d-simulate#2-rpc-simulate)
|
||||
- Signing methods autofill fees better and properly handle transactions that don't have a base fee, and will also autofill the `NetworkID` field.
|
||||
|
||||
## XRP Ledger server version 2.3.0
|
||||
|
||||
[Version 2.3.0](https://github.com/XRPLF/rippled/releases/tag/2.3.0) was released on Nov 25, 2024.
|
||||
|
||||
### Breaking changes in 2.3.0
|
||||
|
||||
- `book_changes`: If the requested ledger version is not available on this node, a `ledgerNotFound` error is returned and the node does not attempt to acquire the ledger from the p2p network (as with other non-admin RPCs).
|
||||
|
||||
Admins can still attempt to retrieve old ledgers with the `ledger_request` RPC.
|
||||
|
||||
### Additions and bugfixes in 2.3.0
|
||||
|
||||
- `book_changes`: Returns a `validated` field in its response, which was missing in prior versions.
|
||||
|
||||
## XRP Ledger server version 2.2.0
|
||||
|
||||
[Version 2.2.0](https://github.com/XRPLF/rippled/releases/tag/2.2.0) was released on Jun 5, 2024. The following additions are non-breaking (because they are purely additive):
|
||||
|
||||
- The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781))
|
||||
|
||||
## XRP Ledger server version 2.0.0
|
||||
|
||||
### Additions in 2.2
|
||||
|
||||
Additions are intended to be non-breaking (because they are purely additive).
|
||||
|
||||
- `feature`: A non-admin mode that uses the same formatting as admin RPC, but hides potentially-sensitive data.
|
||||
|
||||
### Additions in 2.0
|
||||
|
||||
Additions are intended to be non-breaking (because they are purely additive).
|
||||
[Version 2.0.0](https://github.com/XRPLF/rippled/releases/tag/2.0.0) was released on Jan 9, 2024. The following additions are non-breaking (because they are purely additive):
|
||||
|
||||
- `server_definitions`: A new RPC that generates a `definitions.json`-like output that can be used in XRPL libraries.
|
||||
- In `Payment` transactions, `DeliverMax` has been added. This is a replacement for the `Amount` field, which should not be used. Typically, the `delivered_amount` (in transaction metadata) should be used. To ease the transition, `DeliverMax` is present regardless of API version, since adding a field is non-breaking.
|
||||
- API version 2 has been moved from beta to supported, meaning that it is generally available (regardless of the `beta_rpc_api` setting).
|
||||
|
||||
## XRP Ledger server version 2.2.0
|
||||
|
||||
The following is a non-breaking addition to the API.
|
||||
|
||||
- The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781))
|
||||
|
||||
## XRP Ledger server version 1.12.0
|
||||
|
||||
[Version 1.12.0](https://github.com/XRPLF/rippled/releases/tag/1.12.0) was released on Sep 6, 2023.
|
||||
|
||||
### Additions in 1.12
|
||||
|
||||
Additions are intended to be non-breaking (because they are purely additive).
|
||||
[Version 1.12.0](https://github.com/XRPLF/rippled/releases/tag/1.12.0) was released on Sep 6, 2023. The following additions are non-breaking (because they are purely additive).
|
||||
|
||||
- `server_info`: Added `ports`, an array which advertises the RPC and WebSocket ports. This information is also included in the `/crawl` endpoint (which calls `server_info` internally). `grpc` and `peer` ports are also included. (https://github.com/XRPLF/rippled/pull/4427)
|
||||
- `ports` contains objects, each containing a `port` for the listening port (a number string), and a `protocol` array listing the supported protocols on that port.
|
||||
@@ -125,71 +216,6 @@ was released on Mar 14, 2023.
|
||||
removed from the [ledger subscription stream](https://xrpl.org/subscribe.html#ledger-stream), because it will no longer
|
||||
have any meaning.
|
||||
|
||||
## API Version 2
|
||||
|
||||
API version 2 is introduced in `rippled` version 2.0. Users can request it explicitly by specifying `"api_version" : 2`.
|
||||
|
||||
#### Removed methods
|
||||
|
||||
In API version 2, the following deprecated methods are no longer available: (https://github.com/XRPLF/rippled/pull/4759)
|
||||
|
||||
- `tx_history` - Instead, use other methods such as `account_tx` or `ledger` with the `transactions` field set to `true`.
|
||||
- `ledger_header` - Instead, use the `ledger` method.
|
||||
|
||||
#### Modifications to JSON transaction element in V2
|
||||
|
||||
In API version 2, JSON elements for transaction output have been changed and made consistent for all methods which output transactions. (https://github.com/XRPLF/rippled/pull/4775)
|
||||
This helps to unify the JSON serialization format of transactions. (https://github.com/XRPLF/clio/issues/722, https://github.com/XRPLF/rippled/issues/4727)
|
||||
|
||||
- JSON transaction element is named `tx_json`
|
||||
- Binary transaction element is named `tx_blob`
|
||||
- JSON transaction metadata element is named `meta`
|
||||
- Binary transaction metadata element is named `meta_blob`
|
||||
|
||||
Additionally, these elements are now consistently available next to `tx_json` (i.e. sibling elements), where possible:
|
||||
|
||||
- `hash` - Transaction ID. This data was stored inside transaction output in API version 1, but in API version 2 is a sibling element.
|
||||
- `ledger_index` - Ledger index (only set on validated ledgers)
|
||||
- `ledger_hash` - Ledger hash (only set on closed or validated ledgers)
|
||||
- `close_time_iso` - Ledger close time expressed in ISO 8601 time format (only set on validated ledgers)
|
||||
- `validated` - Bool element set to `true` if the transaction is in a validated ledger, otherwise `false`
|
||||
|
||||
This change affects the following methods:
|
||||
|
||||
- `tx` - Transaction data moved into element `tx_json` (was inline inside `result`) or, if binary output was requested, moved from `tx` to `tx_blob`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
|
||||
- `account_tx` - Renamed transaction element from `tx` to `tx_json`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
|
||||
- `transaction_entry` - Renamed transaction metadata element from `metadata` to `meta`. Changed location of `hash` and added new elements
|
||||
- `subscribe` - Renamed transaction element from `transaction` to `tx_json`. Changed location of `hash` and added new elements
|
||||
- `sign`, `sign_for`, `submit` and `submit_multisigned` - Changed location of `hash` element.
|
||||
|
||||
#### Modification to `Payment` transaction JSON schema
|
||||
|
||||
When reading Payments, the `Amount` field should generally **not** be used. Instead, use [delivered_amount](https://xrpl.org/partial-payments.html#the-delivered_amount-field) to see the amount that the Payment delivered. To clarify its meaning, the `Amount` field is being renamed to `DeliverMax`. (https://github.com/XRPLF/rippled/pull/4733)
|
||||
|
||||
- In `Payment` transaction type, JSON RPC field `Amount` is renamed to `DeliverMax`. To enable smooth client transition, `Amount` is still handled, as described below: (https://github.com/XRPLF/rippled/pull/4733)
|
||||
- On JSON RPC input (e.g. `submit_multisigned` etc. methods), `Amount` is recognized as an alias to `DeliverMax` for both API version 1 and version 2 clients.
|
||||
- On JSON RPC input, submitting both `Amount` and `DeliverMax` fields is allowed _only_ if they are identical; otherwise such input is rejected with `rpcINVALID_PARAMS` error.
|
||||
- On JSON RPC output (e.g. `subscribe`, `account_tx` etc. methods), `DeliverMax` is present in both API version 1 and version 2.
|
||||
- On JSON RPC output, `Amount` is only present in API version 1 and _not_ in version 2.
|
||||
|
||||
#### Modifications to account_info response
|
||||
|
||||
- `signer_lists` is returned in the root of the response. In API version 1, it was nested under `account_data`. (https://github.com/XRPLF/rippled/pull/3770)
|
||||
- When using an invalid `signer_lists` value, the API now returns an "invalidParams" error. (https://github.com/XRPLF/rippled/pull/4585)
|
||||
- (`signer_lists` must be a boolean. In API version 1, strings were accepted and may return a normal response - i.e. as if `signer_lists` were `true`.)
|
||||
|
||||
#### Modifications to [account_tx](https://xrpl.org/account_tx.html#account_tx) response
|
||||
|
||||
- Using `ledger_index_min`, `ledger_index_max`, and `ledger_index` returns `invalidParams` because if you use `ledger_index_min` or `ledger_index_max`, then it does not make sense to also specify `ledger_index`. In API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4571)
|
||||
- The same applies for `ledger_index_min`, `ledger_index_max`, and `ledger_hash`. (https://github.com/XRPLF/rippled/issues/4545#issuecomment-1565065579)
|
||||
- Using a `ledger_index_min` or `ledger_index_max` beyond the range of ledgers that the server has:
|
||||
- returns `lgrIdxMalformed` in API version 2. Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/issues/4288)
|
||||
- Attempting to use a non-boolean value (such as a string) for the `binary` or `forward` parameters returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
|
||||
|
||||
#### Modifications to [noripple_check](https://xrpl.org/noripple_check.html#noripple_check) response
|
||||
|
||||
- Attempting to use a non-boolean value (such as a string) for the `transactions` parameter returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
|
||||
|
||||
# Unit tests for API changes
|
||||
|
||||
The following information is useful to developers contributing to this project:
|
||||
|
||||
597
BUILD.md
597
BUILD.md
@@ -3,29 +3,29 @@
|
||||
| These instructions assume you have a C++ development environment ready with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up on Linux, macOS, or Windows, [see this guide](./docs/build/environment.md). |
|
||||
|
||||
> These instructions also assume a basic familiarity with Conan and CMake.
|
||||
> If you are unfamiliar with Conan,
|
||||
> you can read our [crash course](./docs/build/conan.md)
|
||||
> or the official [Getting Started][3] walkthrough.
|
||||
> If you are unfamiliar with Conan, you can read our
|
||||
> [crash course](./docs/build/conan.md) or the official [Getting Started][3]
|
||||
> walkthrough.
|
||||
|
||||
## Branches
|
||||
|
||||
For a stable release, choose the `master` branch or one of the [tagged
|
||||
releases](https://github.com/ripple/rippled/releases).
|
||||
|
||||
```
|
||||
```bash
|
||||
git checkout master
|
||||
```
|
||||
|
||||
For the latest release candidate, choose the `release` branch.
|
||||
|
||||
```
|
||||
```bash
|
||||
git checkout release
|
||||
```
|
||||
|
||||
For the latest set of untested features, or to contribute, choose the `develop`
|
||||
branch.
|
||||
|
||||
```
|
||||
```bash
|
||||
git checkout develop
|
||||
```
|
||||
|
||||
@@ -33,133 +33,314 @@ git checkout develop
|
||||
|
||||
See [System Requirements](https://xrpl.org/system-requirements.html).
|
||||
|
||||
Building rippled generally requires git, Python, Conan, CMake, and a C++ compiler. Some guidance on setting up such a [C++ development environment can be found here](./docs/build/environment.md).
|
||||
Building rippled generally requires git, Python, Conan, CMake, and a C++
|
||||
compiler. Some guidance on setting up such a [C++ development environment can be
|
||||
found here](./docs/build/environment.md).
|
||||
|
||||
- [Python 3.7](https://www.python.org/downloads/)
|
||||
- [Conan 1.55](https://conan.io/downloads.html)
|
||||
- [CMake 3.16](https://cmake.org/download/)
|
||||
- [Python 3.11](https://www.python.org/downloads/), or higher
|
||||
- [Conan 2.17](https://conan.io/downloads.html)[^1], or higher
|
||||
- [CMake 3.22](https://cmake.org/download/), or higher
|
||||
|
||||
[^1]:
|
||||
It is possible to build with Conan 1.60+, but the instructions are
|
||||
significantly different, which is why we are not recommending it.
|
||||
|
||||
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
The [minimum compiler versions][2] required are:
|
||||
|
||||
| Compiler | Version |
|
||||
|-------------|---------|
|
||||
| GCC | 11 |
|
||||
| Clang | 13 |
|
||||
| Apple Clang | 13.1.6 |
|
||||
| MSVC | 19.23 |
|
||||
| Compiler | Version |
|
||||
| ----------- | --------- |
|
||||
| GCC | 12 |
|
||||
| Clang | 16 |
|
||||
| Apple Clang | 16 |
|
||||
| MSVC | 19.44[^3] |
|
||||
|
||||
### Linux
|
||||
|
||||
The Ubuntu operating system has received the highest level of
|
||||
quality assurance, testing, and support.
|
||||
The Ubuntu Linux distribution has received the highest level of quality
|
||||
assurance, testing, and support. We also support Red Hat and use Debian
|
||||
internally.
|
||||
|
||||
Here are [sample instructions for setting up a C++ development environment on Linux](./docs/build/environment.md#linux).
|
||||
Here are [sample instructions for setting up a C++ development environment on
|
||||
Linux](./docs/build/environment.md#linux).
|
||||
|
||||
### Mac
|
||||
|
||||
Many rippled engineers use macOS for development.
|
||||
|
||||
Here are [sample instructions for setting up a C++ development environment on macOS](./docs/build/environment.md#macos).
|
||||
Here are [sample instructions for setting up a C++ development environment on
|
||||
macOS](./docs/build/environment.md#macos).
|
||||
|
||||
### Windows
|
||||
|
||||
Windows is not recommended for production use at this time.
|
||||
Windows is used by some engineers for development only.
|
||||
|
||||
- Additionally, 32-bit Windows development is not supported.
|
||||
- Visual Studio 2022 is not yet supported.
|
||||
- rippled generally requires [Boost][] 1.77, which Conan cannot build with VS 2022.
|
||||
- Until rippled is updated for compatibility with later versions of Boost, Windows developers may need to use Visual Studio 2019.
|
||||
|
||||
[Boost]: https://www.boost.org/
|
||||
[^3]: Windows is not recommended for production use.
|
||||
|
||||
## Steps
|
||||
|
||||
### Set Up Conan
|
||||
|
||||
After you have a [C++ development environment](./docs/build/environment.md) ready with Git, Python, Conan, CMake, and a C++ compiler, you may need to set up your Conan profile.
|
||||
After you have a [C++ development environment](./docs/build/environment.md) ready with Git, Python,
|
||||
Conan, CMake, and a C++ compiler, you may need to set up your Conan profile.
|
||||
|
||||
These instructions assume a basic familiarity with Conan and CMake.
|
||||
These instructions assume a basic familiarity with Conan and CMake. If you are
|
||||
unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official
|
||||
[Getting Started][3] walkthrough.
|
||||
|
||||
If you are unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official [Getting Started][3] walkthrough.
|
||||
#### Default profile
|
||||
|
||||
You'll need at least one Conan profile:
|
||||
We recommend that you import the provided `conan/profiles/default` profile:
|
||||
|
||||
```
|
||||
conan profile new default --detect
|
||||
```
|
||||
|
||||
Update the compiler settings:
|
||||
|
||||
```
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
```
|
||||
|
||||
**Linux** developers will commonly have a default Conan [profile][] that compiles
|
||||
with GCC and links with libstdc++.
|
||||
If you are linking with libstdc++ (see profile setting `compiler.libcxx`),
|
||||
then you will need to choose the `libstdc++11` ABI:
|
||||
|
||||
```
|
||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
||||
```
|
||||
|
||||
**Windows** developers may need to use the x64 native build tools.
|
||||
An easy way to do that is to run the shortcut "x64 Native Tools Command
|
||||
Prompt" for the version of Visual Studio that you have installed.
|
||||
|
||||
Windows developers must also build `rippled` and its dependencies for the x64
|
||||
architecture:
|
||||
|
||||
```
|
||||
conan profile update settings.arch=x86_64 default
|
||||
```
|
||||
|
||||
### Multiple compilers
|
||||
|
||||
When `/usr/bin/g++` exists on a platform, it is the default cpp compiler. This
|
||||
default works for some users.
|
||||
|
||||
However, if this compiler cannot build rippled or its dependencies, then you can
|
||||
install another compiler and set Conan and CMake to use it.
|
||||
Update the `conf.tools.build:compiler_executables` setting in order to set the correct variables (`CMAKE_<LANG>_COMPILER`) in the
|
||||
generated CMake toolchain file.
|
||||
For example, on Ubuntu 20, you may have gcc at `/usr/bin/gcc` and g++ at `/usr/bin/g++`; if that is the case, you can select those compilers with:
|
||||
```
|
||||
conan profile update 'conf.tools.build:compiler_executables={"c": "/usr/bin/gcc", "cpp": "/usr/bin/g++"}' default
|
||||
```bash
|
||||
conan config install conan/profiles/ -tf $(conan config home)/profiles/
|
||||
```
|
||||
|
||||
Replace `/usr/bin/gcc` and `/usr/bin/g++` with paths to the desired compilers.
|
||||
You can check your Conan profile by running:
|
||||
|
||||
It should choose the compiler for dependencies as well,
|
||||
but not all of them have a Conan recipe that respects this setting (yet).
|
||||
For the rest, you can set these environment variables.
|
||||
Replace `<path>` with paths to the desired compilers:
|
||||
```bash
|
||||
conan profile show
|
||||
```
|
||||
|
||||
- `conan profile update env.CC=<path> default`
|
||||
- `conan profile update env.CXX=<path> default`
|
||||
#### Custom profile
|
||||
|
||||
Export our [Conan recipe for Snappy](./external/snappy).
|
||||
It does not explicitly link the C++ standard library,
|
||||
which allows you to statically link it with GCC, if you want.
|
||||
If the default profile does not work for you and you do not yet have a Conan
|
||||
profile, you can create one by running:
|
||||
|
||||
```
|
||||
conan export external/snappy snappy/1.1.10@
|
||||
```
|
||||
```bash
|
||||
conan profile detect
|
||||
```
|
||||
|
||||
Export our [Conan recipe for RocksDB](./external/rocksdb).
|
||||
It does not override paths to dependencies when building with Visual Studio.
|
||||
You may need to make changes to the profile to suit your environment. You can
|
||||
refer to the provided `conan/profiles/default` profile for inspiration, and you
|
||||
may also need to apply the required [tweaks](#conan-profile-tweaks) to this
|
||||
default profile.
|
||||
|
||||
```
|
||||
conan export external/rocksdb rocksdb/6.29.5@
|
||||
```
|
||||
### Patched recipes
|
||||
|
||||
Export our [Conan recipe for SOCI](./external/soci).
|
||||
It patches their CMake to correctly import its dependencies.
|
||||
The recipes in Conan Center occasionally need to be patched for compatibility
|
||||
with the latest version of `rippled`. We maintain a fork of the Conan Center
|
||||
[here](https://github.com/XRPLF/conan-center-index/) containing the patches.
|
||||
|
||||
```
|
||||
conan export external/soci soci/4.0.3@
|
||||
```
|
||||
To ensure our patched recipes are used, you must add our Conan remote at a
|
||||
higher index than the default Conan Center remote, so it is consulted first. You
|
||||
can do this by running:
|
||||
|
||||
```bash
|
||||
conan remote add --index 0 xrplf https://conan.ripplex.io
|
||||
```
|
||||
|
||||
Alternatively, you can pull the patched recipes into the repository and use them
|
||||
locally:
|
||||
|
||||
```bash
|
||||
cd external
|
||||
mkdir -p conan-center-index
|
||||
cd conan-center-index
|
||||
git init
|
||||
git remote add origin git@github.com:XRPLF/conan-center-index.git
|
||||
git sparse-checkout init
|
||||
git sparse-checkout set recipes/grpc
|
||||
git sparse-checkout add recipes/m4
|
||||
git sparse-checkout add recipes/snappy
|
||||
git sparse-checkout add recipes/soci
|
||||
git fetch origin master
|
||||
git checkout master
|
||||
rm -rf .git
|
||||
cd ../..
|
||||
conan export --version 1.72.0 external/conan-center-index/recipes/grpc/all
|
||||
conan export --version 1.4.19 external/conan-center-index/recipes/m4/all
|
||||
conan export --version 1.1.10 external/conan-center-index/recipes/snappy/all
|
||||
conan export --version 4.0.3 external/conan-center-index/recipes/soci/all
|
||||
```
|
||||
|
||||
In the case we switch to a newer version of a dependency that still requires a
|
||||
patch, it will be necessary for you to pull in the changes and re-export the
|
||||
updated dependencies with the newer version. However, if we switch to a newer
|
||||
version that no longer requires a patch, no action is required on your part, as
|
||||
the new recipe will be automatically pulled from the official Conan Center.
|
||||
|
||||
> [!NOTE]
|
||||
> You might need to add `--lockfile=""` to your `conan install` command
|
||||
> to avoid automatic use of the existing `conan.lock` file when you run `conan export` manually on your machine
|
||||
|
||||
### Conan profile tweaks
|
||||
|
||||
#### Missing compiler version
|
||||
|
||||
If you see an error similar to the following after running `conan profile show`:
|
||||
|
||||
```bash
|
||||
ERROR: Invalid setting '17' is not a valid 'settings.compiler.version' value.
|
||||
Possible values are ['5.0', '5.1', '6.0', '6.1', '7.0', '7.3', '8.0', '8.1',
|
||||
'9.0', '9.1', '10.0', '11.0', '12.0', '13', '13.0', '13.1', '14', '14.0', '15',
|
||||
'15.0', '16', '16.0']
|
||||
Read "http://docs.conan.io/2/knowledge/faq.html#error-invalid-setting"
|
||||
```
|
||||
|
||||
you need to amend the list of compiler versions in
|
||||
`$(conan config home)/settings.yml`, by appending the required version number(s)
|
||||
to the `version` array specific for your compiler. For example:
|
||||
|
||||
```yaml
|
||||
apple-clang:
|
||||
version:
|
||||
[
|
||||
"5.0",
|
||||
"5.1",
|
||||
"6.0",
|
||||
"6.1",
|
||||
"7.0",
|
||||
"7.3",
|
||||
"8.0",
|
||||
"8.1",
|
||||
"9.0",
|
||||
"9.1",
|
||||
"10.0",
|
||||
"11.0",
|
||||
"12.0",
|
||||
"13",
|
||||
"13.0",
|
||||
"13.1",
|
||||
"14",
|
||||
"14.0",
|
||||
"15",
|
||||
"15.0",
|
||||
"16",
|
||||
"16.0",
|
||||
"17",
|
||||
"17.0",
|
||||
]
|
||||
```
|
||||
|
||||
#### Multiple compilers
|
||||
|
||||
If you have multiple compilers installed, make sure to select the one to use in
|
||||
your default Conan configuration **before** running `conan profile detect`, by
|
||||
setting the `CC` and `CXX` environment variables.
|
||||
|
||||
For example, if you are running MacOS and have [homebrew
|
||||
LLVM@18](https://formulae.brew.sh/formula/llvm@18), and want to use it as a
|
||||
compiler in the new Conan profile:
|
||||
|
||||
```bash
|
||||
export CC=$(brew --prefix llvm@18)/bin/clang
|
||||
export CXX=$(brew --prefix llvm@18)/bin/clang++
|
||||
conan profile detect
|
||||
```
|
||||
|
||||
You should also explicitly set the path to the compiler in the profile file,
|
||||
which helps to avoid errors when `CC` and/or `CXX` are set and disagree with the
|
||||
selected Conan profile. For example:
|
||||
|
||||
```text
|
||||
[conf]
|
||||
tools.build:compiler_executables={'c':'/usr/bin/gcc','cpp':'/usr/bin/g++'}
|
||||
```
|
||||
|
||||
#### Multiple profiles
|
||||
|
||||
You can manage multiple Conan profiles in the directory
|
||||
`$(conan config home)/profiles`, for example renaming `default` to a different
|
||||
name and then creating a new `default` profile for a different compiler.
|
||||
|
||||
#### Select language
|
||||
|
||||
The default profile created by Conan will typically select different C++ dialect
|
||||
than C++20 used by this project. You should set `20` in the profile line
|
||||
starting with `compiler.cppstd=`. For example:
|
||||
|
||||
```bash
|
||||
sed -i.bak -e 's|^compiler\.cppstd=.*$|compiler.cppstd=20|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
#### Select standard library in Linux
|
||||
|
||||
**Linux** developers will commonly have a default Conan [profile][] that
|
||||
compiles with GCC and links with libstdc++. If you are linking with libstdc++
|
||||
(see profile setting `compiler.libcxx`), then you will need to choose the
|
||||
`libstdc++11` ABI:
|
||||
|
||||
```bash
|
||||
sed -i.bak -e 's|^compiler\.libcxx=.*$|compiler.libcxx=libstdc++11|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
#### Select architecture and runtime in Windows
|
||||
|
||||
**Windows** developers may need to use the x64 native build tools. An easy way
|
||||
to do that is to run the shortcut "x64 Native Tools Command Prompt" for the
|
||||
version of Visual Studio that you have installed.
|
||||
|
||||
Windows developers must also build `rippled` and its dependencies for the x64
|
||||
architecture:
|
||||
|
||||
```bash
|
||||
sed -i.bak -e 's|^arch=.*$|arch=x86_64|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
**Windows** developers also must select static runtime:
|
||||
|
||||
```bash
|
||||
sed -i.bak -e 's|^compiler\.runtime=.*$|compiler.runtime=static|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
#### Clang workaround for grpc
|
||||
|
||||
If your compiler is clang, version 19 or later, or apple-clang, version 17 or
|
||||
later, you may encounter a compilation error while building the `grpc`
|
||||
dependency:
|
||||
|
||||
```text
|
||||
In file included from .../lib/promise/try_seq.h:26:
|
||||
.../lib/promise/detail/basic_seq.h:499:38: error: a template argument list is expected after a name prefixed by the template keyword [-Wmissing-template-arg-list-after-template-kw]
|
||||
499 | Traits::template CallSeqFactory(f_, *cur_, std::move(arg)));
|
||||
| ^
|
||||
```
|
||||
|
||||
The workaround for this error is to add two lines to profile:
|
||||
|
||||
```text
|
||||
[conf]
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
```
|
||||
|
||||
#### Workaround for gcc 12
|
||||
|
||||
If your compiler is gcc, version 12, and you have enabled `werr` option, you may
|
||||
encounter a compilation error such as:
|
||||
|
||||
```text
|
||||
/usr/include/c++/12/bits/char_traits.h:435:56: error: 'void* __builtin_memcpy(void*, const void*, long unsigned int)' accessing 9223372036854775810 or more bytes at offsets [2, 9223372036854775807] and 1 may overlap up to 9223372036854775813 bytes at offset -3 [-Werror=restrict]
|
||||
435 | return static_cast<char_type*>(__builtin_memcpy(__s1, __s2, __n));
|
||||
| ~~~~~~~~~~~~~~~~^~~~~~~~~~~~~~~~~
|
||||
cc1plus: all warnings being treated as errors
|
||||
```
|
||||
|
||||
The workaround for this error is to add two lines to your profile:
|
||||
|
||||
```text
|
||||
[conf]
|
||||
tools.build:cxxflags=['-Wno-restrict']
|
||||
```
|
||||
|
||||
#### Workaround for clang 16
|
||||
|
||||
If your compiler is clang, version 16, you may encounter compilation error such
|
||||
as:
|
||||
|
||||
```text
|
||||
In file included from .../boost/beast/websocket/stream.hpp:2857:
|
||||
.../boost/beast/websocket/impl/read.hpp:695:17: error: call to 'async_teardown' is ambiguous
|
||||
async_teardown(impl.role, impl.stream(),
|
||||
^~~~~~~~~~~~~~
|
||||
```
|
||||
|
||||
The workaround for this error is to add two lines to your profile:
|
||||
|
||||
```text
|
||||
[conf]
|
||||
tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
|
||||
```
|
||||
|
||||
### Build and Test
|
||||
|
||||
@@ -179,63 +360,54 @@ It patches their CMake to correctly import its dependencies.
|
||||
the `install-folder` or `-if` option to every `conan install` command
|
||||
in the next step.
|
||||
|
||||
2. Generate CMake files for every configuration you want to build.
|
||||
2. Use conan to generate CMake files for every configuration you want to build:
|
||||
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
```
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
```
|
||||
|
||||
For a single-configuration generator, e.g. `Unix Makefiles` or `Ninja`,
|
||||
you only need to run this command once.
|
||||
For a multi-configuration generator, e.g. `Visual Studio`, you may want to
|
||||
run it more than once.
|
||||
To build Debug, in the next step, be sure to set `-DCMAKE_BUILD_TYPE=Debug`
|
||||
|
||||
Each of these commands should also have a different `build_type` setting.
|
||||
A second command with the same `build_type` setting will overwrite the files
|
||||
generated by the first. You can pass the build type on the command line with
|
||||
`--settings build_type=$BUILD_TYPE` or in the profile itself,
|
||||
under the section `[settings]` with the key `build_type`.
|
||||
|
||||
If you are using a Microsoft Visual C++ compiler,
|
||||
then you will need to ensure consistency between the `build_type` setting
|
||||
and the `compiler.runtime` setting.
|
||||
|
||||
When `build_type` is `Release`, `compiler.runtime` should be `MT`.
|
||||
|
||||
When `build_type` is `Debug`, `compiler.runtime` should be `MTd`.
|
||||
For a single-configuration generator, e.g. `Unix Makefiles` or `Ninja`,
|
||||
you only need to run this command once.
|
||||
For a multi-configuration generator, e.g. `Visual Studio`, you may want to
|
||||
run it more than once.
|
||||
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release --settings compiler.runtime=MT
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug --settings compiler.runtime=MTd
|
||||
```
|
||||
Each of these commands should also have a different `build_type` setting.
|
||||
A second command with the same `build_type` setting will overwrite the files
|
||||
generated by the first. You can pass the build type on the command line with
|
||||
`--settings build_type=$BUILD_TYPE` or in the profile itself,
|
||||
under the section `[settings]` with the key `build_type`.
|
||||
|
||||
3. Configure CMake and pass the toolchain file generated by Conan, located at
|
||||
`$OUTPUT_FOLDER/build/generators/conan_toolchain.cmake`.
|
||||
|
||||
Single-config generators:
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release ..
|
||||
```
|
||||
Pass the CMake variable [`CMAKE_BUILD_TYPE`][build_type]
|
||||
and make sure it matches the one of the `build_type` settings
|
||||
you chose in the previous step.
|
||||
|
||||
Pass the CMake variable [`CMAKE_BUILD_TYPE`][build_type]
|
||||
and make sure it matches the `build_type` setting you chose in the previous
|
||||
step.
|
||||
For example, to build Debug, in the next command, replace "Release" with "Debug"
|
||||
|
||||
Multi-config generators:
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release -Dxrpld=ON -Dtests=ON ..
|
||||
```
|
||||
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
|
||||
```
|
||||
Multi-config generators:
|
||||
|
||||
**Note:** You can pass build options for `rippled` in this step.
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -Dxrpld=ON -Dtests=ON ..
|
||||
```
|
||||
|
||||
**Note:** You can pass build options for `rippled` in this step.
|
||||
|
||||
4. Build `rippled`.
|
||||
|
||||
For a single-configuration generator, it will build whatever configuration
|
||||
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator,
|
||||
you must pass the option `--config` to select the build configuration.
|
||||
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator, you
|
||||
must pass the option `--config` to select the build configuration.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
@@ -255,19 +427,49 @@ It patches their CMake to correctly import its dependencies.
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
./rippled --unittest
|
||||
./rippled --unittest --unittest-jobs N
|
||||
```
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
./Release/rippled --unittest
|
||||
./Debug/rippled --unittest
|
||||
./Release/rippled --unittest --unittest-jobs N
|
||||
./Debug/rippled --unittest --unittest-jobs N
|
||||
```
|
||||
|
||||
The location of `rippled` in your build directory depends on your CMake
|
||||
generator. Pass `--help` to see the rest of the command line options.
|
||||
Replace the `--unittest-jobs` parameter N with the desired unit tests
|
||||
concurrency. Recommended setting is half of the number of available CPU
|
||||
cores.
|
||||
|
||||
The location of `rippled` binary in your build directory depends on your
|
||||
CMake generator. Pass `--help` to see the rest of the command line options.
|
||||
|
||||
#### Conan lockfile
|
||||
|
||||
To achieve reproducible dependencies, we use [Conan lockfile](https://docs.conan.io/2/tutorial/versioning/lockfiles.html).
|
||||
|
||||
The `conan.lock` file in the repository contains a "snapshot" of the current dependencies.
|
||||
It is implicitly used when running `conan` commands, you don't need to specify it.
|
||||
|
||||
You have to update this file every time you add a new dependency or change a revision or version of an existing dependency.
|
||||
|
||||
> [!NOTE]
|
||||
> Conan uses local cache by default when creating a lockfile.
|
||||
>
|
||||
> To ensure, that lockfile creation works the same way on all developer machines, you should clear the local cache before creating a new lockfile.
|
||||
|
||||
To create a new lockfile, run the following commands in the repository root:
|
||||
|
||||
```bash
|
||||
conan remove '*' --confirm
|
||||
rm conan.lock
|
||||
# This ensure that xrplf remote is the first to be consulted
|
||||
conan remote add --force --index 0 xrplf https://conan.ripplex.io
|
||||
conan lock create . -o '&:jemalloc=True' -o '&:rocksdb=True'
|
||||
```
|
||||
|
||||
> [!NOTE]
|
||||
> If some dependencies are exclusive for some OS, you may need to run the last command for them adding `--profile:all <PROFILE>`.
|
||||
|
||||
## Coverage report
|
||||
|
||||
@@ -308,7 +510,7 @@ variable in `cmake`. The specific command line used to run the `gcovr` tool will
|
||||
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
|
||||
|
||||
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
|
||||
set to the number of available CPU cores. This may cause spurious test
|
||||
set to the number of available CPU cores. This may cause spurious test
|
||||
errors on Apple. Developers can override the number of unit test jobs with
|
||||
the `coverage_test_parallelism` variable in `cmake`.
|
||||
|
||||
@@ -317,95 +519,81 @@ Example use with some cmake variables set:
|
||||
```
|
||||
cd .build
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
cmake -DCMAKE_BUILD_TYPE=Debug -Dcoverage=ON -Dcoverage_test_parallelism=2 -Dcoverage_format=html-details -Dcoverage_extra_args="--json coverage.json" -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
|
||||
cmake -DCMAKE_BUILD_TYPE=Debug -Dcoverage=ON -Dxrpld=ON -Dtests=ON -Dcoverage_test_parallelism=2 -Dcoverage_format=html-details -Dcoverage_extra_args="--json coverage.json" -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
|
||||
cmake --build . --target coverage
|
||||
```
|
||||
|
||||
After the `coverage` target is completed, the generated coverage report will be
|
||||
stored inside the build directory, as either of:
|
||||
|
||||
- file named `coverage.`_extension_ , with a suitable extension for the report format, or
|
||||
- file named `coverage.`_extension_, with a suitable extension for the report format, or
|
||||
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
|
||||
|
||||
|
||||
## Options
|
||||
|
||||
| Option | Default Value | Description |
|
||||
| --- | ---| ---|
|
||||
| `assert` | OFF | Enable assertions.
|
||||
| `reporting` | OFF | Build the reporting mode feature. |
|
||||
| `coverage` | OFF | Prepare the coverage report. |
|
||||
| `tests` | ON | Build tests. |
|
||||
| `unity` | ON | Configure a unity build. |
|
||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||
| Option | Default Value | Description |
|
||||
| ---------- | ------------- | -------------------------------------------------------------------------- |
|
||||
| `assert` | OFF | Enable assertions. |
|
||||
| `coverage` | OFF | Prepare the coverage report. |
|
||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||
| `tests` | OFF | Build tests. |
|
||||
| `unity` | OFF | Configure a unity build. |
|
||||
| `xrpld` | OFF | Build the xrpld (`rippled`) application, and not just the libxrpl library. |
|
||||
| `werr` | OFF | Treat compilation warnings as errors |
|
||||
| `wextra` | OFF | Enable additional compilation warnings |
|
||||
|
||||
[Unity builds][5] may be faster for the first build
|
||||
(at the cost of much more memory) since they concatenate sources into fewer
|
||||
translation units. Non-unity builds may be faster for incremental builds,
|
||||
and can be helpful for detecting `#include` omissions.
|
||||
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
||||
### Conan
|
||||
|
||||
After any updates or changes to dependencies, you may need to do the following:
|
||||
|
||||
1. Remove your build directory.
|
||||
2. Remove the Conan cache:
|
||||
2. Remove individual libraries from the Conan cache, e.g.
|
||||
|
||||
```bash
|
||||
conan remove 'grpc/*'
|
||||
```
|
||||
rm -rf ~/.conan/data
|
||||
|
||||
**or**
|
||||
|
||||
Remove all libraries from Conan cache:
|
||||
|
||||
```bash
|
||||
conan remove '*'
|
||||
```
|
||||
4. Re-run [conan install](#build-and-test).
|
||||
|
||||
3. Re-run [conan export](#patched-recipes) if needed.
|
||||
4. [Regenerate lockfile](#conan-lockfile).
|
||||
5. Re-run [conan install](#build-and-test).
|
||||
|
||||
### no std::result_of
|
||||
#### ERROR: Package not resolved
|
||||
|
||||
If your compiler version is recent enough to have removed `std::result_of` as
|
||||
part of C++20, e.g. Apple Clang 15.0, then you might need to add a preprocessor
|
||||
definition to your build.
|
||||
If you're seeing an error like `ERROR: Package 'snappy/1.1.10' not resolved: Unable to find 'snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1756234314.246' in remotes.`,
|
||||
please add `xrplf` remote or re-run `conan export` for [patched recipes](#patched-recipes).
|
||||
|
||||
### `protobuf/port_def.inc` file not found
|
||||
|
||||
If `cmake --build .` results in an error due to a missing a protobuf file, then
|
||||
you might have generated CMake files for a different `build_type` than the
|
||||
`CMAKE_BUILD_TYPE` you passed to Conan.
|
||||
|
||||
```
|
||||
conan profile update 'options.boost:extra_b2_flags="define=BOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
||||
conan profile update 'env.CFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
||||
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
||||
conan profile update 'conf.tools.build:cflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
|
||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
|
||||
/rippled/.build/pb-xrpl.libpb/xrpl/proto/ripple.pb.h:10:10: fatal error: 'google/protobuf/port_def.inc' file not found
|
||||
10 | #include <google/protobuf/port_def.inc>
|
||||
| ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
1 error generated.
|
||||
```
|
||||
|
||||
For example, if you want to build Debug:
|
||||
|
||||
### call to 'async_teardown' is ambiguous
|
||||
|
||||
If you are compiling with an early version of Clang 16, then you might hit
|
||||
a [regression][6] when compiling C++20 that manifests as an [error in a Boost
|
||||
header][7]. You can workaround it by adding this preprocessor definition:
|
||||
|
||||
```
|
||||
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS"' default
|
||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
|
||||
```
|
||||
|
||||
|
||||
### recompile with -fPIC
|
||||
|
||||
If you get a linker error suggesting that you recompile Boost with
|
||||
position-independent code, such as:
|
||||
|
||||
```
|
||||
/usr/bin/ld.gold: error: /home/username/.conan/data/boost/1.77.0/_/_/package/.../lib/libboost_container.a(alloc_lib.o):
|
||||
requires unsupported dynamic reloc 11; recompile with -fPIC
|
||||
```
|
||||
|
||||
Conan most likely downloaded a bad binary distribution of the dependency.
|
||||
This seems to be a [bug][1] in Conan just for Boost 1.77.0 compiled with GCC
|
||||
for Linux. The solution is to build the dependency locally by passing
|
||||
`--build boost` when calling `conan install`.
|
||||
|
||||
```
|
||||
conan install --build boost ...
|
||||
```
|
||||
|
||||
1. For conan install, pass `--settings build_type=Debug`
|
||||
2. For cmake, pass `-DCMAKE_BUILD_TYPE=Debug`
|
||||
|
||||
## Add a Dependency
|
||||
|
||||
@@ -413,16 +601,15 @@ If you want to experiment with a new package, follow these steps:
|
||||
|
||||
1. Search for the package on [Conan Center](https://conan.io/center/).
|
||||
2. Modify [`conanfile.py`](./conanfile.py):
|
||||
- Add a version of the package to the `requires` property.
|
||||
- Change any default options for the package by adding them to the
|
||||
`default_options` property (with syntax `'$package:$option': $value`).
|
||||
- Add a version of the package to the `requires` property.
|
||||
- Change any default options for the package by adding them to the
|
||||
`default_options` property (with syntax `'$package:$option': $value`).
|
||||
3. Modify [`CMakeLists.txt`](./CMakeLists.txt):
|
||||
- Add a call to `find_package($package REQUIRED)`.
|
||||
- Link a library from the package to the target `ripple_libs`
|
||||
(search for the existing call to `target_link_libraries(ripple_libs INTERFACE ...)`).
|
||||
- Add a call to `find_package($package REQUIRED)`.
|
||||
- Link a library from the package to the target `ripple_libs`
|
||||
(search for the existing call to `target_link_libraries(ripple_libs INTERFACE ...)`).
|
||||
4. Start coding! Don't forget to include whatever headers you need from the package.
|
||||
|
||||
|
||||
[1]: https://github.com/conan-io/conan-center-index/issues/13168
|
||||
[2]: https://en.cppreference.com/w/cpp/compiler_support/20
|
||||
[3]: https://docs.conan.io/en/latest/getting_started.html
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,39 +0,0 @@
|
||||
#[===================================================================[
|
||||
multiconfig misc
|
||||
#]===================================================================]
|
||||
|
||||
if (is_multiconfig)
|
||||
# This code finds all source files in the src subdirectory for inclusion
|
||||
# in the IDE file tree as non-compiled sources. Since this file list will
|
||||
# have some overlap with files we have already added to our targets to
|
||||
# be compiled, we explicitly remove any of these target source files from
|
||||
# this list.
|
||||
file (GLOB_RECURSE all_sources RELATIVE ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
CONFIGURE_DEPENDS
|
||||
src/*.* Builds/*.md docs/*.md src/*.md Builds/*.cmake)
|
||||
file(GLOB md_files RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} CONFIGURE_DEPENDS
|
||||
*.md)
|
||||
LIST(APPEND all_sources ${md_files})
|
||||
foreach (_target secp256k1::secp256k1 ed25519::ed25519 xrpl_core rippled)
|
||||
get_target_property (_type ${_target} TYPE)
|
||||
if(_type STREQUAL "INTERFACE_LIBRARY")
|
||||
continue()
|
||||
endif()
|
||||
get_target_property (_src ${_target} SOURCES)
|
||||
list (REMOVE_ITEM all_sources ${_src})
|
||||
endforeach ()
|
||||
target_sources (rippled PRIVATE ${all_sources})
|
||||
set_property (
|
||||
SOURCE ${all_sources}
|
||||
APPEND
|
||||
PROPERTY HEADER_FILE_ONLY true)
|
||||
if (MSVC)
|
||||
set_property(
|
||||
DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
PROPERTY VS_STARTUP_PROJECT rippled)
|
||||
endif ()
|
||||
|
||||
group_sources(src)
|
||||
group_sources(docs)
|
||||
group_sources(Builds)
|
||||
endif ()
|
||||
@@ -1,22 +0,0 @@
|
||||
option (validator_keys "Enables building of validator-keys-tool as a separate target (imported via FetchContent)" OFF)
|
||||
|
||||
if (validator_keys)
|
||||
git_branch (current_branch)
|
||||
# default to tracking VK master branch unless we are on release
|
||||
if (NOT (current_branch STREQUAL "release"))
|
||||
set (current_branch "master")
|
||||
endif ()
|
||||
message (STATUS "tracking ValidatorKeys branch: ${current_branch}")
|
||||
|
||||
FetchContent_Declare (
|
||||
validator_keys_src
|
||||
GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
|
||||
GIT_TAG "${current_branch}"
|
||||
)
|
||||
FetchContent_GetProperties (validator_keys_src)
|
||||
if (NOT validator_keys_src_POPULATED)
|
||||
message (STATUS "Pausing to download ValidatorKeys...")
|
||||
FetchContent_Populate (validator_keys_src)
|
||||
endif ()
|
||||
add_subdirectory (${validator_keys_src_SOURCE_DIR} ${CMAKE_BINARY_DIR}/validator-keys)
|
||||
endif ()
|
||||
@@ -1,15 +0,0 @@
|
||||
#[===================================================================[
|
||||
read version from source
|
||||
#]===================================================================]
|
||||
|
||||
file (STRINGS src/ripple/protocol/impl/BuildInfo.cpp BUILD_INFO)
|
||||
foreach (line_ ${BUILD_INFO})
|
||||
if (line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"")
|
||||
set (rippled_version ${CMAKE_MATCH_1})
|
||||
endif ()
|
||||
endforeach ()
|
||||
if (rippled_version)
|
||||
message (STATUS "rippled version: ${rippled_version}")
|
||||
else ()
|
||||
message (FATAL_ERROR "unable to determine rippled version")
|
||||
endif ()
|
||||
@@ -1,51 +0,0 @@
|
||||
Loop: ripple.app ripple.core
|
||||
ripple.app > ripple.core
|
||||
|
||||
Loop: ripple.app ripple.ledger
|
||||
ripple.app > ripple.ledger
|
||||
|
||||
Loop: ripple.app ripple.net
|
||||
ripple.app > ripple.net
|
||||
|
||||
Loop: ripple.app ripple.nodestore
|
||||
ripple.app > ripple.nodestore
|
||||
|
||||
Loop: ripple.app ripple.overlay
|
||||
ripple.overlay ~= ripple.app
|
||||
|
||||
Loop: ripple.app ripple.peerfinder
|
||||
ripple.app > ripple.peerfinder
|
||||
|
||||
Loop: ripple.app ripple.rpc
|
||||
ripple.rpc > ripple.app
|
||||
|
||||
Loop: ripple.app ripple.shamap
|
||||
ripple.app > ripple.shamap
|
||||
|
||||
Loop: ripple.basics ripple.core
|
||||
ripple.core > ripple.basics
|
||||
|
||||
Loop: ripple.basics ripple.json
|
||||
ripple.json ~= ripple.basics
|
||||
|
||||
Loop: ripple.basics ripple.protocol
|
||||
ripple.protocol > ripple.basics
|
||||
|
||||
Loop: ripple.core ripple.net
|
||||
ripple.net > ripple.core
|
||||
|
||||
Loop: ripple.net ripple.rpc
|
||||
ripple.rpc > ripple.net
|
||||
|
||||
Loop: ripple.nodestore ripple.overlay
|
||||
ripple.overlay ~= ripple.nodestore
|
||||
|
||||
Loop: ripple.overlay ripple.rpc
|
||||
ripple.rpc ~= ripple.overlay
|
||||
|
||||
Loop: test.jtx test.toplevel
|
||||
test.toplevel > test.jtx
|
||||
|
||||
Loop: test.jtx test.unit_test
|
||||
test.unit_test == test.jtx
|
||||
|
||||
@@ -1,229 +0,0 @@
|
||||
ripple.app > ripple.basics
|
||||
ripple.app > ripple.beast
|
||||
ripple.app > ripple.conditions
|
||||
ripple.app > ripple.consensus
|
||||
ripple.app > ripple.crypto
|
||||
ripple.app > ripple.json
|
||||
ripple.app > ripple.protocol
|
||||
ripple.app > ripple.resource
|
||||
ripple.app > test.unit_test
|
||||
ripple.basics > ripple.beast
|
||||
ripple.conditions > ripple.basics
|
||||
ripple.conditions > ripple.protocol
|
||||
ripple.consensus > ripple.basics
|
||||
ripple.consensus > ripple.beast
|
||||
ripple.consensus > ripple.json
|
||||
ripple.consensus > ripple.protocol
|
||||
ripple.core > ripple.beast
|
||||
ripple.core > ripple.json
|
||||
ripple.core > ripple.protocol
|
||||
ripple.crypto > ripple.basics
|
||||
ripple.json > ripple.beast
|
||||
ripple.ledger > ripple.basics
|
||||
ripple.ledger > ripple.beast
|
||||
ripple.ledger > ripple.core
|
||||
ripple.ledger > ripple.json
|
||||
ripple.ledger > ripple.protocol
|
||||
ripple.net > ripple.basics
|
||||
ripple.net > ripple.beast
|
||||
ripple.net > ripple.json
|
||||
ripple.net > ripple.protocol
|
||||
ripple.net > ripple.resource
|
||||
ripple.nodestore > ripple.basics
|
||||
ripple.nodestore > ripple.beast
|
||||
ripple.nodestore > ripple.core
|
||||
ripple.nodestore > ripple.json
|
||||
ripple.nodestore > ripple.protocol
|
||||
ripple.nodestore > ripple.unity
|
||||
ripple.overlay > ripple.basics
|
||||
ripple.overlay > ripple.beast
|
||||
ripple.overlay > ripple.core
|
||||
ripple.overlay > ripple.json
|
||||
ripple.overlay > ripple.peerfinder
|
||||
ripple.overlay > ripple.protocol
|
||||
ripple.overlay > ripple.resource
|
||||
ripple.overlay > ripple.server
|
||||
ripple.peerfinder > ripple.basics
|
||||
ripple.peerfinder > ripple.beast
|
||||
ripple.peerfinder > ripple.core
|
||||
ripple.peerfinder > ripple.protocol
|
||||
ripple.perflog > ripple.basics
|
||||
ripple.perflog > ripple.beast
|
||||
ripple.perflog > ripple.core
|
||||
ripple.perflog > ripple.json
|
||||
ripple.perflog > ripple.nodestore
|
||||
ripple.perflog > ripple.protocol
|
||||
ripple.perflog > ripple.rpc
|
||||
ripple.protocol > ripple.beast
|
||||
ripple.protocol > ripple.crypto
|
||||
ripple.protocol > ripple.json
|
||||
ripple.resource > ripple.basics
|
||||
ripple.resource > ripple.beast
|
||||
ripple.resource > ripple.json
|
||||
ripple.resource > ripple.protocol
|
||||
ripple.rpc > ripple.basics
|
||||
ripple.rpc > ripple.beast
|
||||
ripple.rpc > ripple.core
|
||||
ripple.rpc > ripple.crypto
|
||||
ripple.rpc > ripple.json
|
||||
ripple.rpc > ripple.ledger
|
||||
ripple.rpc > ripple.nodestore
|
||||
ripple.rpc > ripple.protocol
|
||||
ripple.rpc > ripple.resource
|
||||
ripple.rpc > ripple.server
|
||||
ripple.rpc > ripple.shamap
|
||||
ripple.server > ripple.basics
|
||||
ripple.server > ripple.beast
|
||||
ripple.server > ripple.crypto
|
||||
ripple.server > ripple.json
|
||||
ripple.server > ripple.protocol
|
||||
ripple.shamap > ripple.basics
|
||||
ripple.shamap > ripple.beast
|
||||
ripple.shamap > ripple.crypto
|
||||
ripple.shamap > ripple.nodestore
|
||||
ripple.shamap > ripple.protocol
|
||||
test.app > ripple.app
|
||||
test.app > ripple.basics
|
||||
test.app > ripple.beast
|
||||
test.app > ripple.core
|
||||
test.app > ripple.json
|
||||
test.app > ripple.ledger
|
||||
test.app > ripple.overlay
|
||||
test.app > ripple.protocol
|
||||
test.app > ripple.resource
|
||||
test.app > ripple.rpc
|
||||
test.app > test.jtx
|
||||
test.app > test.rpc
|
||||
test.app > test.toplevel
|
||||
test.app > test.unit_test
|
||||
test.basics > ripple.basics
|
||||
test.basics > ripple.beast
|
||||
test.basics > ripple.json
|
||||
test.basics > ripple.protocol
|
||||
test.basics > ripple.rpc
|
||||
test.basics > test.jtx
|
||||
test.basics > test.unit_test
|
||||
test.beast > ripple.basics
|
||||
test.beast > ripple.beast
|
||||
test.conditions > ripple.basics
|
||||
test.conditions > ripple.beast
|
||||
test.conditions > ripple.conditions
|
||||
test.consensus > ripple.app
|
||||
test.consensus > ripple.basics
|
||||
test.consensus > ripple.beast
|
||||
test.consensus > ripple.consensus
|
||||
test.consensus > ripple.ledger
|
||||
test.consensus > test.csf
|
||||
test.consensus > test.toplevel
|
||||
test.consensus > test.unit_test
|
||||
test.core > ripple.basics
|
||||
test.core > ripple.beast
|
||||
test.core > ripple.core
|
||||
test.core > ripple.crypto
|
||||
test.core > ripple.json
|
||||
test.core > ripple.server
|
||||
test.core > test.jtx
|
||||
test.core > test.toplevel
|
||||
test.core > test.unit_test
|
||||
test.csf > ripple.basics
|
||||
test.csf > ripple.beast
|
||||
test.csf > ripple.consensus
|
||||
test.csf > ripple.json
|
||||
test.csf > ripple.protocol
|
||||
test.json > ripple.beast
|
||||
test.json > ripple.json
|
||||
test.json > test.jtx
|
||||
test.jtx > ripple.app
|
||||
test.jtx > ripple.basics
|
||||
test.jtx > ripple.beast
|
||||
test.jtx > ripple.consensus
|
||||
test.jtx > ripple.core
|
||||
test.jtx > ripple.json
|
||||
test.jtx > ripple.ledger
|
||||
test.jtx > ripple.net
|
||||
test.jtx > ripple.protocol
|
||||
test.jtx > ripple.resource
|
||||
test.jtx > ripple.rpc
|
||||
test.jtx > ripple.server
|
||||
test.ledger > ripple.app
|
||||
test.ledger > ripple.basics
|
||||
test.ledger > ripple.beast
|
||||
test.ledger > ripple.core
|
||||
test.ledger > ripple.ledger
|
||||
test.ledger > ripple.protocol
|
||||
test.ledger > test.jtx
|
||||
test.ledger > test.toplevel
|
||||
test.net > ripple.net
|
||||
test.net > test.jtx
|
||||
test.net > test.toplevel
|
||||
test.net > test.unit_test
|
||||
test.nodestore > ripple.app
|
||||
test.nodestore > ripple.basics
|
||||
test.nodestore > ripple.beast
|
||||
test.nodestore > ripple.core
|
||||
test.nodestore > ripple.nodestore
|
||||
test.nodestore > ripple.protocol
|
||||
test.nodestore > ripple.unity
|
||||
test.nodestore > test.jtx
|
||||
test.nodestore > test.toplevel
|
||||
test.nodestore > test.unit_test
|
||||
test.overlay > ripple.app
|
||||
test.overlay > ripple.basics
|
||||
test.overlay > ripple.beast
|
||||
test.overlay > ripple.overlay
|
||||
test.overlay > ripple.peerfinder
|
||||
test.overlay > ripple.protocol
|
||||
test.overlay > ripple.shamap
|
||||
test.overlay > test.jtx
|
||||
test.overlay > test.unit_test
|
||||
test.peerfinder > ripple.basics
|
||||
test.peerfinder > ripple.beast
|
||||
test.peerfinder > ripple.core
|
||||
test.peerfinder > ripple.peerfinder
|
||||
test.peerfinder > ripple.protocol
|
||||
test.peerfinder > test.beast
|
||||
test.peerfinder > test.unit_test
|
||||
test.protocol > ripple.basics
|
||||
test.protocol > ripple.beast
|
||||
test.protocol > ripple.crypto
|
||||
test.protocol > ripple.json
|
||||
test.protocol > ripple.protocol
|
||||
test.protocol > test.toplevel
|
||||
test.resource > ripple.basics
|
||||
test.resource > ripple.beast
|
||||
test.resource > ripple.resource
|
||||
test.resource > test.unit_test
|
||||
test.rpc > ripple.app
|
||||
test.rpc > ripple.basics
|
||||
test.rpc > ripple.beast
|
||||
test.rpc > ripple.core
|
||||
test.rpc > ripple.json
|
||||
test.rpc > ripple.net
|
||||
test.rpc > ripple.nodestore
|
||||
test.rpc > ripple.overlay
|
||||
test.rpc > ripple.protocol
|
||||
test.rpc > ripple.resource
|
||||
test.rpc > ripple.rpc
|
||||
test.rpc > test.jtx
|
||||
test.rpc > test.nodestore
|
||||
test.rpc > test.toplevel
|
||||
test.server > ripple.app
|
||||
test.server > ripple.basics
|
||||
test.server > ripple.beast
|
||||
test.server > ripple.core
|
||||
test.server > ripple.json
|
||||
test.server > ripple.rpc
|
||||
test.server > ripple.server
|
||||
test.server > test.jtx
|
||||
test.server > test.toplevel
|
||||
test.server > test.unit_test
|
||||
test.shamap > ripple.basics
|
||||
test.shamap > ripple.beast
|
||||
test.shamap > ripple.nodestore
|
||||
test.shamap > ripple.protocol
|
||||
test.shamap > ripple.shamap
|
||||
test.shamap > test.unit_test
|
||||
test.toplevel > ripple.json
|
||||
test.toplevel > test.csf
|
||||
test.unit_test > ripple.basics
|
||||
test.unit_test > ripple.beast
|
||||
@@ -9,23 +9,43 @@ endif()
|
||||
|
||||
# Fix "unrecognized escape" issues when passing CMAKE_MODULE_PATH on Windows.
|
||||
file(TO_CMAKE_PATH "${CMAKE_MODULE_PATH}" CMAKE_MODULE_PATH)
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake")
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
|
||||
|
||||
project(rippled)
|
||||
project(xrpl)
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
set(CMAKE_CXX_STANDARD 20)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "GNU")
|
||||
# GCC-specific fixes
|
||||
add_compile_options(-Wno-unknown-pragmas -Wno-subobject-linkage)
|
||||
# -Wno-subobject-linkage can be removed when we upgrade GCC version to at least 13.3
|
||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
# Clang-specific fixes
|
||||
add_compile_options(-Wno-unknown-warning-option) # Ignore unknown warning options
|
||||
elseif(MSVC)
|
||||
# MSVC-specific fixes
|
||||
add_compile_options(/wd4068) # Ignore unknown pragmas
|
||||
endif()
|
||||
|
||||
# make GIT_COMMIT_HASH define available to all sources
|
||||
find_package(Git)
|
||||
if(Git_FOUND)
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git describe --always --abbrev=40
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse HEAD
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
|
||||
if(gch)
|
||||
set(GIT_COMMIT_HASH "${gch}")
|
||||
message(STATUS gch: ${GIT_COMMIT_HASH})
|
||||
add_definitions(-DGIT_COMMIT_HASH="${GIT_COMMIT_HASH}")
|
||||
endif()
|
||||
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse --abbrev-ref HEAD
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb)
|
||||
if(gb)
|
||||
set(GIT_BRANCH "${gb}")
|
||||
message(STATUS gb: ${GIT_BRANCH})
|
||||
add_definitions(-DGIT_BRANCH="${GIT_BRANCH}")
|
||||
endif()
|
||||
endif() #git
|
||||
|
||||
if(thread_safety_analysis)
|
||||
@@ -70,9 +90,15 @@ set_target_properties(OpenSSL::SSL PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
|
||||
)
|
||||
set(SECP256K1_INSTALL TRUE)
|
||||
set(SECP256K1_BUILD_BENCHMARK FALSE)
|
||||
set(SECP256K1_BUILD_TESTS FALSE)
|
||||
set(SECP256K1_BUILD_EXHAUSTIVE_TESTS FALSE)
|
||||
set(SECP256K1_BUILD_CTIME_TESTS FALSE)
|
||||
set(SECP256K1_BUILD_EXAMPLES FALSE)
|
||||
add_subdirectory(external/secp256k1)
|
||||
add_library(secp256k1::secp256k1 ALIAS secp256k1)
|
||||
add_subdirectory(external/ed25519-donna)
|
||||
add_subdirectory(external/antithesis-sdk)
|
||||
find_package(gRPC REQUIRED)
|
||||
find_package(lz4 REQUIRED)
|
||||
# Target names with :: are not allowed in a generator expression.
|
||||
@@ -81,7 +107,6 @@ find_package(lz4 REQUIRED)
|
||||
find_package(LibArchive REQUIRED)
|
||||
find_package(SOCI REQUIRED)
|
||||
find_package(SQLite3 REQUIRED)
|
||||
find_package(Snappy REQUIRED)
|
||||
|
||||
option(rocksdb "Enable RocksDB" ON)
|
||||
if(rocksdb)
|
||||
@@ -116,22 +141,16 @@ else()
|
||||
endif()
|
||||
target_link_libraries(ripple_libs INTERFACE ${nudb})
|
||||
|
||||
if(reporting)
|
||||
find_package(cassandra-cpp-driver REQUIRED)
|
||||
find_package(PostgreSQL REQUIRED)
|
||||
target_link_libraries(ripple_libs INTERFACE
|
||||
cassandra-cpp-driver::cassandra-cpp-driver
|
||||
PostgreSQL::PostgreSQL
|
||||
)
|
||||
endif()
|
||||
|
||||
if(coverage)
|
||||
include(RippledCov)
|
||||
endif()
|
||||
|
||||
###
|
||||
|
||||
set(PROJECT_EXPORT_SET RippleExports)
|
||||
include(RippledCore)
|
||||
include(RippledInstall)
|
||||
include(RippledMultiConfig)
|
||||
include(RippledValidatorKeys)
|
||||
|
||||
if(tests)
|
||||
include(CTest)
|
||||
add_subdirectory(src/tests/libxrpl)
|
||||
endif()
|
||||
|
||||
958
CONTRIBUTING.md
958
CONTRIBUTING.md
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
ISC License
|
||||
ISC License
|
||||
|
||||
Copyright (c) 2011, Arthur Britto, David Schwartz, Jed McCaleb, Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant.
|
||||
Copyright (c) 2012-2020, the XRP Ledger developers.
|
||||
@@ -14,4 +14,3 @@ ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
|
||||
50
README.md
50
README.md
@@ -1,51 +1,54 @@
|
||||
[](https://codecov.io/gh/XRPLF/rippled)
|
||||
|
||||
# The XRP Ledger
|
||||
|
||||
The [XRP Ledger](https://xrpl.org/) is a decentralized cryptographic ledger powered by a network of peer-to-peer nodes. The XRP Ledger uses a novel Byzantine Fault Tolerant consensus algorithm to settle and record transactions in a secure distributed database without a central operator.
|
||||
|
||||
## XRP
|
||||
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free asset native to the XRP Ledger, and is designed to bridge the many different currencies in use worldwide. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
|
||||
|
||||
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free crypto-asset native to the XRP Ledger, and is designed as a gas token for network services and to bridge different currencies. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
|
||||
|
||||
## rippled
|
||||
|
||||
The server software that powers the XRP Ledger is called `rippled` and is available in this repository under the permissive [ISC open-source license](LICENSE.md). The `rippled` server software is written primarily in C++ and runs on a variety of platforms. The `rippled` server software can run in several modes depending on its [configuration](https://xrpl.org/rippled-server-modes.html).
|
||||
|
||||
If you are interested in running an **API Server** (including a **Full History Server**) or a **Reporting Mode** server, take a look at [Clio](https://github.com/XRPLF/clio). rippled Reporting Mode is expected to be replaced by Clio.
|
||||
If you are interested in running an **API Server** (including a **Full History Server**), take a look at [Clio](https://github.com/XRPLF/clio). (rippled Reporting Mode has been replaced by Clio.)
|
||||
|
||||
### Build from Source
|
||||
|
||||
* [Read the build instructions in `BUILD.md`](BUILD.md)
|
||||
* If you encounter any issues, please [open an issue](https://github.com/XRPLF/rippled/issues)
|
||||
- [Read the build instructions in `BUILD.md`](BUILD.md)
|
||||
- If you encounter any issues, please [open an issue](https://github.com/XRPLF/rippled/issues)
|
||||
|
||||
## Key Features of the XRP Ledger
|
||||
|
||||
- **[Censorship-Resistant Transaction Processing][]:** No single party decides which transactions succeed or fail, and no one can "roll back" a transaction after it completes. As long as those who choose to participate in the network keep it healthy, they can settle transactions in seconds.
|
||||
- **[Fast, Efficient Consensus Algorithm][]:** The XRP Ledger's consensus algorithm settles transactions in 4 to 5 seconds, processing at a throughput of up to 1500 transactions per second. These properties put XRP at least an order of magnitude ahead of other top digital assets.
|
||||
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction costs.
|
||||
- **[Responsible Software Governance][]:** A team of full-time, world-class developers at Ripple maintain and continually improve the XRP Ledger's underlying software with contributions from the open-source community. Ripple acts as a steward for the technology and an advocate for its interests, and builds constructive relationships with governments and financial institutions worldwide.
|
||||
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction fees.
|
||||
- **[Responsible Software Governance][]:** A team of full-time developers at Ripple & other organizations maintain and continually improve the XRP Ledger's underlying software with contributions from the open-source community. Ripple acts as a steward for the technology and an advocate for its interests.
|
||||
- **[Secure, Adaptable Cryptography][]:** The XRP Ledger relies on industry standard digital signature systems like ECDSA (the same scheme used by Bitcoin) but also supports modern, efficient algorithms like Ed25519. The extensible nature of the XRP Ledger's software makes it possible to add and disable algorithms as the state of the art in cryptography advances.
|
||||
- **[Modern Features for Smart Contracts][]:** Features like Escrow, Checks, and Payment Channels support cutting-edge financial applications including the [Interledger Protocol](https://interledger.org/). This toolbox of advanced features comes with safety features like a process for amending the network and separate checks against invariant constraints.
|
||||
- **[Modern Features][]:** Features like Escrow, Checks, and Payment Channels support financial applications atop of the XRP Ledger. This toolbox of advanced features comes with safety features like a process for amending the network and separate checks against invariant constraints.
|
||||
- **[On-Ledger Decentralized Exchange][]:** In addition to all the features that make XRP useful on its own, the XRP Ledger also has a fully-functional accounting system for tracking and trading obligations denominated in any way users want, and an exchange built into the protocol. The XRP Ledger can settle long, cross-currency payment paths and exchanges of multiple currencies in atomic transactions, bridging gaps of trust with XRP.
|
||||
|
||||
[Censorship-Resistant Transaction Processing]: https://xrpl.org/xrp-ledger-overview.html#censorship-resistant-transaction-processing
|
||||
[Fast, Efficient Consensus Algorithm]: https://xrpl.org/xrp-ledger-overview.html#fast-efficient-consensus-algorithm
|
||||
[Finite XRP Supply]: https://xrpl.org/xrp-ledger-overview.html#finite-xrp-supply
|
||||
[Responsible Software Governance]: https://xrpl.org/xrp-ledger-overview.html#responsible-software-governance
|
||||
[Secure, Adaptable Cryptography]: https://xrpl.org/xrp-ledger-overview.html#secure-adaptable-cryptography
|
||||
[Modern Features for Smart Contracts]: https://xrpl.org/xrp-ledger-overview.html#modern-features-for-smart-contracts
|
||||
[On-Ledger Decentralized Exchange]: https://xrpl.org/xrp-ledger-overview.html#on-ledger-decentralized-exchange
|
||||
|
||||
[Censorship-Resistant Transaction Processing]: https://xrpl.org/transaction-censorship-detection.html#transaction-censorship-detection
|
||||
[Fast, Efficient Consensus Algorithm]: https://xrpl.org/consensus-research.html#consensus-research
|
||||
[Finite XRP Supply]: https://xrpl.org/what-is-xrp.html
|
||||
[Responsible Software Governance]: https://xrpl.org/contribute-code.html#contribute-code-to-the-xrp-ledger
|
||||
[Secure, Adaptable Cryptography]: https://xrpl.org/cryptographic-keys.html#cryptographic-keys
|
||||
[Modern Features]: https://xrpl.org/use-specialized-payment-types.html
|
||||
[On-Ledger Decentralized Exchange]: https://xrpl.org/decentralized-exchange.html#decentralized-exchange
|
||||
|
||||
## Source Code
|
||||
|
||||
Here are some good places to start learning the source code:
|
||||
|
||||
- Read the markdown files in the source tree: `src/ripple/**/*.md`.
|
||||
- Read [the levelization document](./Builds/levelization) to get an idea of the internal dependency graph.
|
||||
- Read [the levelization document](.github/scripts/levelization) to get an idea of the internal dependency graph.
|
||||
- In the big picture, the `main` function constructs an `ApplicationImp` object, which implements the `Application` virtual interface. Almost every component in the application takes an `Application&` parameter in its constructor, typically named `app` and stored as a member variable `app_`. This allows most components to depend on any other component.
|
||||
|
||||
### Repository Contents
|
||||
|
||||
| Folder | Contents |
|
||||
|:-----------|:-------------------------------------------------|
|
||||
| :--------- | :----------------------------------------------- |
|
||||
| `./bin` | Scripts and data files for Ripple integrators. |
|
||||
| `./Builds` | Platform-specific guides for building `rippled`. |
|
||||
| `./docs` | Source documentation files and doxygen config. |
|
||||
@@ -55,15 +58,14 @@ Here are some good places to start learning the source code:
|
||||
Some of the directories under `src` are external repositories included using
|
||||
git-subtree. See those directories' README files for more details.
|
||||
|
||||
|
||||
## Additional Documentation
|
||||
|
||||
* [XRP Ledger Dev Portal](https://xrpl.org/)
|
||||
* [Setup and Installation](https://xrpl.org/install-rippled.html)
|
||||
* [Source Documentation (Doxygen)](https://xrplf.github.io/rippled/)
|
||||
- [XRP Ledger Dev Portal](https://xrpl.org/)
|
||||
- [Setup and Installation](https://xrpl.org/install-rippled.html)
|
||||
- [Source Documentation (Doxygen)](https://xrplf.github.io/rippled/)
|
||||
|
||||
## See Also
|
||||
|
||||
* [Clio API Server for the XRP Ledger](https://github.com/XRPLF/clio)
|
||||
* [Mailing List for Release Announcements](https://groups.google.com/g/ripple-server)
|
||||
* [Learn more about the XRP Ledger (YouTube)](https://www.youtube.com/playlist?list=PLJQ55Tj1hIVZtJ_JdTvSum2qMTsedWkNi)
|
||||
- [Clio API Server for the XRP Ledger](https://github.com/XRPLF/clio)
|
||||
- [Mailing List for Release Announcements](https://groups.google.com/g/ripple-server)
|
||||
- [Learn more about the XRP Ledger (YouTube)](https://www.youtube.com/playlist?list=PLJQ55Tj1hIVZtJ_JdTvSum2qMTsedWkNi)
|
||||
|
||||
4330
RELEASENOTES.md
4330
RELEASENOTES.md
File diff suppressed because it is too large
Load Diff
14
SECURITY.md
14
SECURITY.md
@@ -2,7 +2,6 @@
|
||||
|
||||
For more details on operating an XRP Ledger server securely, please visit https://xrpl.org/manage-the-rippled-server.html.
|
||||
|
||||
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
@@ -77,13 +76,14 @@ The amount paid varies dramatically. Vulnerabilities that are harmless on their
|
||||
|
||||
To report a qualifying bug, please send a detailed report to:
|
||||
|
||||
|Email Address|bugs@ripple.com |
|
||||
|:-----------:|:----------------------------------------------------|
|
||||
|Short Key ID | `0xC57929BE` |
|
||||
|Long Key ID | `0xCD49A0AFC57929BE` |
|
||||
|Fingerprint | `24E6 3B02 37E0 FA9C 5E96 8974 CD49 A0AF C579 29BE` |
|
||||
| Email Address | bugs@ripple.com |
|
||||
| :-----------: | :-------------------------------------------------- |
|
||||
| Short Key ID | `0xC57929BE` |
|
||||
| Long Key ID | `0xCD49A0AFC57929BE` |
|
||||
| Fingerprint | `24E6 3B02 37E0 FA9C 5E96 8974 CD49 A0AF C579 29BE` |
|
||||
|
||||
The full PGP key for this address, which is also available on several key servers (e.g. on [keyserver.ubuntu.com](https://keyserver.ubuntu.com)), is:
|
||||
|
||||
The full PGP key for this address, which is also available on several key servers (e.g. on [keys.gnupg.net](https://keys.gnupg.net)), is:
|
||||
```
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
mQINBFUwGHYBEAC0wpGpBPkd8W1UdQjg9+cEFzeIEJRaoZoeuJD8mofwI5Ejnjdt
|
||||
|
||||
470
bin/browser.js
470
bin/browser.js
@@ -1,470 +0,0 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// ledger?l=L
|
||||
// transaction?h=H
|
||||
// ledger_entry?l=L&h=H
|
||||
// account?l=L&a=A
|
||||
// directory?l=L&dir_root=H&i=I
|
||||
// directory?l=L&o=A&i=I // owner directory
|
||||
// offer?l=L&offer=H
|
||||
// offer?l=L&account=A&i=I
|
||||
// ripple_state=l=L&a=A&b=A&c=C
|
||||
// account_lines?l=L&a=A
|
||||
//
|
||||
// A=address
|
||||
// C=currency 3 letter code
|
||||
// H=hash
|
||||
// I=index
|
||||
// L=current | closed | validated | index | hash
|
||||
//
|
||||
|
||||
var async = require("async");
|
||||
var extend = require("extend");
|
||||
var http = require("http");
|
||||
var url = require("url");
|
||||
|
||||
var Remote = require("ripple-lib").Remote;
|
||||
|
||||
var program = process.argv[1];
|
||||
|
||||
var httpd_response = function (res, opts) {
|
||||
var self=this;
|
||||
|
||||
res.statusCode = opts.statusCode;
|
||||
res.end(
|
||||
"<HTML>"
|
||||
+ "<HEAD><TITLE>Title</TITLE></HEAD>"
|
||||
+ "<BODY BACKGROUND=\"#FFFFFF\">"
|
||||
+ "State:" + self.state
|
||||
+ "<UL>"
|
||||
+ "<LI><A HREF=\"/\">home</A>"
|
||||
+ "<LI>" + html_link('r4EM4gBQfr1QgQLXSPF4r7h84qE9mb6iCC')
|
||||
// + "<LI><A HREF=\""+test+"\">rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh</A>"
|
||||
+ "<LI><A HREF=\"/ledger\">ledger</A>"
|
||||
+ "</UL>"
|
||||
+ (opts.body || '')
|
||||
+ '<HR><PRE>'
|
||||
+ (opts.url || '')
|
||||
+ '</PRE>'
|
||||
+ "</BODY>"
|
||||
+ "</HTML>"
|
||||
);
|
||||
};
|
||||
|
||||
var html_link = function (generic) {
|
||||
return '<A HREF="' + build_uri({ type: 'account', account: generic}) + '">' + generic + '</A>';
|
||||
};
|
||||
|
||||
// Build a link to a type.
|
||||
var build_uri = function (params, opts) {
|
||||
var c;
|
||||
|
||||
if (params.type === 'account') {
|
||||
c = {
|
||||
pathname: 'account',
|
||||
query: {
|
||||
l: params.ledger,
|
||||
a: params.account,
|
||||
},
|
||||
};
|
||||
|
||||
} else if (params.type === 'ledger') {
|
||||
c = {
|
||||
pathname: 'ledger',
|
||||
query: {
|
||||
l: params.ledger,
|
||||
},
|
||||
};
|
||||
|
||||
} else if (params.type === 'transaction') {
|
||||
c = {
|
||||
pathname: 'transaction',
|
||||
query: {
|
||||
h: params.hash,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
c = {};
|
||||
}
|
||||
|
||||
opts = opts || {};
|
||||
|
||||
c.protocol = "http";
|
||||
c.hostname = opts.hostname || self.base.hostname;
|
||||
c.port = opts.port || self.base.port;
|
||||
|
||||
return url.format(c);
|
||||
};
|
||||
|
||||
var build_link = function (item, link) {
|
||||
console.log(link);
|
||||
return "<A HREF=" + link + ">" + item + "</A>";
|
||||
};
|
||||
|
||||
var rewrite_field = function (type, obj, field, opts) {
|
||||
if (field in obj) {
|
||||
obj[field] = rewrite_type(type, obj[field], opts);
|
||||
}
|
||||
};
|
||||
|
||||
var rewrite_type = function (type, obj, opts) {
|
||||
if ('amount' === type) {
|
||||
if ('string' === typeof obj) {
|
||||
// XRP.
|
||||
return '<B>' + obj + '</B>';
|
||||
|
||||
} else {
|
||||
rewrite_field('address', obj, 'issuer', opts);
|
||||
|
||||
return obj;
|
||||
}
|
||||
return build_link(
|
||||
obj,
|
||||
build_uri({
|
||||
type: 'account',
|
||||
account: obj
|
||||
}, opts)
|
||||
);
|
||||
}
|
||||
if ('address' === type) {
|
||||
return build_link(
|
||||
obj,
|
||||
build_uri({
|
||||
type: 'account',
|
||||
account: obj
|
||||
}, opts)
|
||||
);
|
||||
}
|
||||
else if ('ledger' === type) {
|
||||
return build_link(
|
||||
obj,
|
||||
build_uri({
|
||||
type: 'ledger',
|
||||
ledger: obj,
|
||||
}, opts)
|
||||
);
|
||||
}
|
||||
else if ('node' === type) {
|
||||
// A node
|
||||
if ('PreviousTxnID' in obj)
|
||||
obj.PreviousTxnID = rewrite_type('transaction', obj.PreviousTxnID, opts);
|
||||
|
||||
if ('Offer' === obj.LedgerEntryType) {
|
||||
if ('NewFields' in obj) {
|
||||
if ('TakerGets' in obj.NewFields)
|
||||
obj.NewFields.TakerGets = rewrite_type('amount', obj.NewFields.TakerGets, opts);
|
||||
|
||||
if ('TakerPays' in obj.NewFields)
|
||||
obj.NewFields.TakerPays = rewrite_type('amount', obj.NewFields.TakerPays, opts);
|
||||
}
|
||||
}
|
||||
|
||||
obj.LedgerEntryType = '<B>' + obj.LedgerEntryType + '</B>';
|
||||
|
||||
return obj;
|
||||
}
|
||||
else if ('transaction' === type) {
|
||||
// Reference to a transaction.
|
||||
return build_link(
|
||||
obj,
|
||||
build_uri({
|
||||
type: 'transaction',
|
||||
hash: obj
|
||||
}, opts)
|
||||
);
|
||||
}
|
||||
|
||||
return 'ERROR: ' + type;
|
||||
};
|
||||
|
||||
var rewrite_object = function (obj, opts) {
|
||||
var out = extend({}, obj);
|
||||
|
||||
rewrite_field('address', out, 'Account', opts);
|
||||
|
||||
rewrite_field('ledger', out, 'parent_hash', opts);
|
||||
rewrite_field('ledger', out, 'ledger_index', opts);
|
||||
rewrite_field('ledger', out, 'ledger_current_index', opts);
|
||||
rewrite_field('ledger', out, 'ledger_hash', opts);
|
||||
|
||||
if ('ledger' in obj) {
|
||||
// It's a ledger header.
|
||||
out.ledger = rewrite_object(out.ledger, opts);
|
||||
|
||||
if ('ledger_hash' in out.ledger)
|
||||
out.ledger.ledger_hash = '<B>' + out.ledger.ledger_hash + '</B>';
|
||||
|
||||
delete out.ledger.hash;
|
||||
delete out.ledger.totalCoins;
|
||||
}
|
||||
|
||||
if ('TransactionType' in obj) {
|
||||
// It's a transaction.
|
||||
out.TransactionType = '<B>' + obj.TransactionType + '</B>';
|
||||
|
||||
rewrite_field('amount', out, 'TakerGets', opts);
|
||||
rewrite_field('amount', out, 'TakerPays', opts);
|
||||
rewrite_field('ledger', out, 'inLedger', opts);
|
||||
|
||||
out.meta.AffectedNodes = out.meta.AffectedNodes.map(function (node) {
|
||||
var kind = 'CreatedNode' in node
|
||||
? 'CreatedNode'
|
||||
: 'ModifiedNode' in node
|
||||
? 'ModifiedNode'
|
||||
: 'DeletedNode' in node
|
||||
? 'DeletedNode'
|
||||
: undefined;
|
||||
|
||||
if (kind) {
|
||||
node[kind] = rewrite_type('node', node[kind], opts);
|
||||
}
|
||||
return node;
|
||||
});
|
||||
}
|
||||
else if ('node' in obj && 'LedgerEntryType' in obj.node) {
|
||||
// Its a ledger entry.
|
||||
|
||||
if (obj.node.LedgerEntryType === 'AccountRoot') {
|
||||
rewrite_field('address', out.node, 'Account', opts);
|
||||
rewrite_field('transaction', out.node, 'PreviousTxnID', opts);
|
||||
rewrite_field('ledger', out.node, 'PreviousTxnLgrSeq', opts);
|
||||
}
|
||||
|
||||
out.node.LedgerEntryType = '<B>' + out.node.LedgerEntryType + '</B>';
|
||||
}
|
||||
|
||||
return out;
|
||||
};
|
||||
|
||||
var augment_object = function (obj, opts, done) {
|
||||
if (obj.node.LedgerEntryType == 'AccountRoot') {
|
||||
var tx_hash = obj.node.PreviousTxnID;
|
||||
var tx_ledger = obj.node.PreviousTxnLgrSeq;
|
||||
|
||||
obj.history = [];
|
||||
|
||||
async.whilst(
|
||||
function () { return tx_hash; },
|
||||
function (callback) {
|
||||
// console.log("augment_object: request: %s %s", tx_hash, tx_ledger);
|
||||
opts.remote.request_tx(tx_hash)
|
||||
.on('success', function (m) {
|
||||
tx_hash = undefined;
|
||||
tx_ledger = undefined;
|
||||
|
||||
//console.log("augment_object: ", JSON.stringify(m));
|
||||
m.meta.AffectedNodes.filter(function(n) {
|
||||
// console.log("augment_object: ", JSON.stringify(n));
|
||||
// if (n.ModifiedNode)
|
||||
// console.log("augment_object: %s %s %s %s %s %s/%s", 'ModifiedNode' in n, n.ModifiedNode && (n.ModifiedNode.LedgerEntryType === 'AccountRoot'), n.ModifiedNode && n.ModifiedNode.FinalFields && (n.ModifiedNode.FinalFields.Account === obj.node.Account), Object.keys(n)[0], n.ModifiedNode && (n.ModifiedNode.LedgerEntryType), obj.node.Account, n.ModifiedNode && n.ModifiedNode.FinalFields && n.ModifiedNode.FinalFields.Account);
|
||||
// if ('ModifiedNode' in n && n.ModifiedNode.LedgerEntryType === 'AccountRoot')
|
||||
// {
|
||||
// console.log("***: ", JSON.stringify(m));
|
||||
// console.log("***: ", JSON.stringify(n));
|
||||
// }
|
||||
return 'ModifiedNode' in n
|
||||
&& n.ModifiedNode.LedgerEntryType === 'AccountRoot'
|
||||
&& n.ModifiedNode.FinalFields
|
||||
&& n.ModifiedNode.FinalFields.Account === obj.node.Account;
|
||||
})
|
||||
.forEach(function (n) {
|
||||
tx_hash = n.ModifiedNode.PreviousTxnID;
|
||||
tx_ledger = n.ModifiedNode.PreviousTxnLgrSeq;
|
||||
|
||||
obj.history.push({
|
||||
tx_hash: tx_hash,
|
||||
tx_ledger: tx_ledger
|
||||
});
|
||||
console.log("augment_object: next: %s %s", tx_hash, tx_ledger);
|
||||
});
|
||||
|
||||
callback();
|
||||
})
|
||||
.on('error', function (m) {
|
||||
callback(m);
|
||||
})
|
||||
.request();
|
||||
},
|
||||
function (err) {
|
||||
if (err) {
|
||||
done();
|
||||
}
|
||||
else {
|
||||
async.forEach(obj.history, function (o, callback) {
|
||||
opts.remote.request_account_info(obj.node.Account)
|
||||
.ledger_index(o.tx_ledger)
|
||||
.on('success', function (m) {
|
||||
//console.log("augment_object: ", JSON.stringify(m));
|
||||
o.Balance = m.account_data.Balance;
|
||||
// o.account_data = m.account_data;
|
||||
callback();
|
||||
})
|
||||
.on('error', function (m) {
|
||||
o.error = m;
|
||||
callback();
|
||||
})
|
||||
.request();
|
||||
},
|
||||
function (err) {
|
||||
done(err);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
done();
|
||||
}
|
||||
};
|
||||
|
||||
if (process.argv.length < 4 || process.argv.length > 7) {
|
||||
console.log("Usage: %s ws_ip ws_port [<ip> [<port> [<start>]]]", program);
|
||||
}
|
||||
else {
|
||||
var ws_ip = process.argv[2];
|
||||
var ws_port = process.argv[3];
|
||||
var ip = process.argv.length > 4 ? process.argv[4] : "127.0.0.1";
|
||||
var port = process.argv.length > 5 ? process.argv[5] : "8080";
|
||||
|
||||
// console.log("START");
|
||||
var self = this;
|
||||
|
||||
var remote = (new Remote({
|
||||
websocket_ip: ws_ip,
|
||||
websocket_port: ws_port,
|
||||
trace: false
|
||||
}))
|
||||
.on('state', function (m) {
|
||||
console.log("STATE: %s", m);
|
||||
|
||||
self.state = m;
|
||||
})
|
||||
// .once('ledger_closed', callback)
|
||||
.connect()
|
||||
;
|
||||
|
||||
self.base = {
|
||||
hostname: ip,
|
||||
port: port,
|
||||
remote: remote,
|
||||
};
|
||||
|
||||
// console.log("SERVE");
|
||||
var server = http.createServer(function (req, res) {
|
||||
var input = "";
|
||||
|
||||
req.setEncoding();
|
||||
|
||||
req.on('data', function (buffer) {
|
||||
// console.log("DATA: %s", buffer);
|
||||
input = input + buffer;
|
||||
});
|
||||
|
||||
req.on('end', function () {
|
||||
// console.log("URL: %s", req.url);
|
||||
// console.log("HEADERS: %s", JSON.stringify(req.headers, undefined, 2));
|
||||
|
||||
var _parsed = url.parse(req.url, true);
|
||||
var _url = JSON.stringify(_parsed, undefined, 2);
|
||||
|
||||
// console.log("HEADERS: %s", JSON.stringify(_parsed, undefined, 2));
|
||||
if (_parsed.pathname === "/account") {
|
||||
var request = remote
|
||||
.request_ledger_entry('account_root')
|
||||
.ledger_index(-1)
|
||||
.account_root(_parsed.query.a)
|
||||
.on('success', function (m) {
|
||||
// console.log("account_root: %s", JSON.stringify(m, undefined, 2));
|
||||
|
||||
augment_object(m, self.base, function() {
|
||||
httpd_response(res,
|
||||
{
|
||||
statusCode: 200,
|
||||
url: _url,
|
||||
body: "<PRE>"
|
||||
+ JSON.stringify(rewrite_object(m, self.base), undefined, 2)
|
||||
+ "</PRE>"
|
||||
});
|
||||
});
|
||||
})
|
||||
.request();
|
||||
|
||||
} else if (_parsed.pathname === "/ledger") {
|
||||
var request = remote
|
||||
.request_ledger(undefined, { expand: true, transactions: true })
|
||||
.on('success', function (m) {
|
||||
// console.log("Ledger: %s", JSON.stringify(m, undefined, 2));
|
||||
|
||||
httpd_response(res,
|
||||
{
|
||||
statusCode: 200,
|
||||
url: _url,
|
||||
body: "<PRE>"
|
||||
+ JSON.stringify(rewrite_object(m, self.base), undefined, 2)
|
||||
+"</PRE>"
|
||||
});
|
||||
})
|
||||
|
||||
if (_parsed.query.l && _parsed.query.l.length === 64) {
|
||||
request.ledger_hash(_parsed.query.l);
|
||||
}
|
||||
else if (_parsed.query.l) {
|
||||
request.ledger_index(Number(_parsed.query.l));
|
||||
}
|
||||
else {
|
||||
request.ledger_index(-1);
|
||||
}
|
||||
|
||||
request.request();
|
||||
|
||||
} else if (_parsed.pathname === "/transaction") {
|
||||
var request = remote
|
||||
.request_tx(_parsed.query.h)
|
||||
// .request_transaction_entry(_parsed.query.h)
|
||||
// .ledger_select(_parsed.query.l)
|
||||
.on('success', function (m) {
|
||||
// console.log("transaction: %s", JSON.stringify(m, undefined, 2));
|
||||
|
||||
httpd_response(res,
|
||||
{
|
||||
statusCode: 200,
|
||||
url: _url,
|
||||
body: "<PRE>"
|
||||
+ JSON.stringify(rewrite_object(m, self.base), undefined, 2)
|
||||
+"</PRE>"
|
||||
});
|
||||
})
|
||||
.on('error', function (m) {
|
||||
httpd_response(res,
|
||||
{
|
||||
statusCode: 200,
|
||||
url: _url,
|
||||
body: "<PRE>"
|
||||
+ 'ERROR: ' + JSON.stringify(m, undefined, 2)
|
||||
+"</PRE>"
|
||||
});
|
||||
})
|
||||
.request();
|
||||
|
||||
} else {
|
||||
var test = build_uri({
|
||||
type: 'account',
|
||||
ledger: 'closed',
|
||||
account: 'rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh',
|
||||
}, self.base);
|
||||
|
||||
httpd_response(res,
|
||||
{
|
||||
statusCode: req.url === "/" ? 200 : 404,
|
||||
url: _url,
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
server.listen(port, ip, undefined,
|
||||
function () {
|
||||
console.log("Listening at: http://%s:%s", ip, port);
|
||||
});
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
@@ -1,24 +0,0 @@
|
||||
In this directory are two scripts, `build.sh` and `test.sh` used for building
|
||||
and testing rippled.
|
||||
|
||||
(For now, they assume Bash and Linux. Once I get Windows containers for
|
||||
testing, I'll try them there, but if Bash is not available, then they will
|
||||
soon be joined by PowerShell scripts `build.ps` and `test.ps`.)
|
||||
|
||||
We don't want these scripts to require arcane invocations that can only be
|
||||
pieced together from within a CI configuration. We want something that humans
|
||||
can easily invoke, read, and understand, for when we eventually have to test
|
||||
and debug them interactively. That means:
|
||||
|
||||
(1) They should work with no arguments.
|
||||
(2) They should document their arguments.
|
||||
(3) They should expand short arguments into long arguments.
|
||||
|
||||
While we want to provide options for common use cases, we don't need to offer
|
||||
the kitchen sink. We can rightfully expect users with esoteric, complicated
|
||||
needs to write their own scripts.
|
||||
|
||||
To make argument-handling easy for us, the implementers, we can just take all
|
||||
arguments from environment variables. They have the nice advantage that every
|
||||
command-line uses named arguments. For the benefit of us and our users, we
|
||||
document those variables at the top of each script.
|
||||
@@ -1,31 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -o xtrace
|
||||
set -o errexit
|
||||
|
||||
# The build system. Either 'Unix Makefiles' or 'Ninja'.
|
||||
GENERATOR=${GENERATOR:-Unix Makefiles}
|
||||
# The compiler. Either 'gcc' or 'clang'.
|
||||
COMPILER=${COMPILER:-gcc}
|
||||
# The build type. Either 'Debug' or 'Release'.
|
||||
BUILD_TYPE=${BUILD_TYPE:-Debug}
|
||||
# Additional arguments to CMake.
|
||||
# We use the `-` substitution here instead of `:-` so that callers can erase
|
||||
# the default by setting `$CMAKE_ARGS` to the empty string.
|
||||
CMAKE_ARGS=${CMAKE_ARGS-'-Dwerr=ON'}
|
||||
|
||||
# https://gitlab.kitware.com/cmake/cmake/issues/18865
|
||||
CMAKE_ARGS="-DBoost_NO_BOOST_CMAKE=ON ${CMAKE_ARGS}"
|
||||
|
||||
if [[ ${COMPILER} == 'gcc' ]]; then
|
||||
export CC='gcc'
|
||||
export CXX='g++'
|
||||
elif [[ ${COMPILER} == 'clang' ]]; then
|
||||
export CC='clang'
|
||||
export CXX='clang++'
|
||||
fi
|
||||
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -G "${GENERATOR}" -DCMAKE_BUILD_TYPE=${BUILD_TYPE} ${CMAKE_ARGS} ..
|
||||
cmake --build . -- -j $(nproc)
|
||||
@@ -1,41 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -o xtrace
|
||||
set -o errexit
|
||||
|
||||
# Set to 'true' to run the known "manual" tests in rippled.
|
||||
MANUAL_TESTS=${MANUAL_TESTS:-false}
|
||||
# The maximum number of concurrent tests.
|
||||
CONCURRENT_TESTS=${CONCURRENT_TESTS:-$(nproc)}
|
||||
# The path to rippled.
|
||||
RIPPLED=${RIPPLED:-build/rippled}
|
||||
# Additional arguments to rippled.
|
||||
RIPPLED_ARGS=${RIPPLED_ARGS:-}
|
||||
|
||||
function join_by { local IFS="$1"; shift; echo "$*"; }
|
||||
|
||||
declare -a manual_tests=(
|
||||
'beast.chrono.abstract_clock'
|
||||
'beast.unit_test.print'
|
||||
'ripple.NodeStore.Timing'
|
||||
'ripple.app.Flow_manual'
|
||||
'ripple.app.NoRippleCheckLimits'
|
||||
'ripple.app.PayStrandAllPairs'
|
||||
'ripple.consensus.ByzantineFailureSim'
|
||||
'ripple.consensus.DistributedValidators'
|
||||
'ripple.consensus.ScaleFreeSim'
|
||||
'ripple.tx.CrossingLimits'
|
||||
'ripple.tx.FindOversizeCross'
|
||||
'ripple.tx.Offer_manual'
|
||||
'ripple.tx.OversizeMeta'
|
||||
'ripple.tx.PlumpBook'
|
||||
)
|
||||
|
||||
if [[ ${MANUAL_TESTS} == 'true' ]]; then
|
||||
RIPPLED_ARGS+=" --unittest=$(join_by , "${manual_tests[@]}")"
|
||||
else
|
||||
RIPPLED_ARGS+=" --unittest --quiet --unittest-log"
|
||||
fi
|
||||
RIPPLED_ARGS+=" --unittest-jobs ${CONCURRENT_TESTS}"
|
||||
|
||||
${RIPPLED} ${RIPPLED_ARGS}
|
||||
@@ -1,274 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -ex
|
||||
|
||||
function version_ge() { test "$(echo "$@" | tr " " "\n" | sort -rV | head -n 1)" == "$1"; }
|
||||
|
||||
__dirname=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||
echo "using CC: ${CC}"
|
||||
"${CC}" --version
|
||||
export CC
|
||||
|
||||
COMPNAME=$(basename $CC)
|
||||
echo "using CXX: ${CXX:-notset}"
|
||||
if [[ $CXX ]]; then
|
||||
"${CXX}" --version
|
||||
export CXX
|
||||
fi
|
||||
: ${BUILD_TYPE:=Debug}
|
||||
echo "BUILD TYPE: ${BUILD_TYPE}"
|
||||
|
||||
: ${TARGET:=install}
|
||||
echo "BUILD TARGET: ${TARGET}"
|
||||
|
||||
JOBS=${NUM_PROCESSORS:-2}
|
||||
if [[ ${TRAVIS:-false} != "true" ]]; then
|
||||
JOBS=$((JOBS+1))
|
||||
fi
|
||||
|
||||
if [[ ! -z "${CMAKE_EXE:-}" ]] ; then
|
||||
export PATH="$(dirname ${CMAKE_EXE}):$PATH"
|
||||
fi
|
||||
|
||||
if [ -x /usr/bin/time ] ; then
|
||||
: ${TIME:="Duration: %E"}
|
||||
export TIME
|
||||
time=/usr/bin/time
|
||||
else
|
||||
time=
|
||||
fi
|
||||
|
||||
echo "Building rippled"
|
||||
: ${CMAKE_EXTRA_ARGS:=""}
|
||||
if [[ ${NINJA_BUILD:-} == true ]]; then
|
||||
CMAKE_EXTRA_ARGS+=" -G Ninja"
|
||||
fi
|
||||
|
||||
coverage=false
|
||||
if [[ "${TARGET}" == "coverage" ]] ; then
|
||||
echo "coverage option detected."
|
||||
coverage=true
|
||||
fi
|
||||
|
||||
cmake --version
|
||||
CMAKE_VER=$(cmake --version | cut -d " " -f 3 | head -1)
|
||||
|
||||
#
|
||||
# allow explicit setting of the name of the build
|
||||
# dir, otherwise default to the compiler.build_type
|
||||
#
|
||||
: "${BUILD_DIR:=${COMPNAME}.${BUILD_TYPE}}"
|
||||
BUILDARGS="--target ${TARGET}"
|
||||
BUILDTOOLARGS=""
|
||||
if version_ge $CMAKE_VER "3.12.0" ; then
|
||||
BUILDARGS+=" --parallel"
|
||||
fi
|
||||
|
||||
if [[ ${NINJA_BUILD:-} == false ]]; then
|
||||
if version_ge $CMAKE_VER "3.12.0" ; then
|
||||
BUILDARGS+=" ${JOBS}"
|
||||
else
|
||||
BUILDTOOLARGS+=" -j ${JOBS}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ ${VERBOSE_BUILD:-} == true ]]; then
|
||||
CMAKE_EXTRA_ARGS+=" -DCMAKE_VERBOSE_MAKEFILE=ON"
|
||||
if version_ge $CMAKE_VER "3.14.0" ; then
|
||||
BUILDARGS+=" --verbose"
|
||||
else
|
||||
if [[ ${NINJA_BUILD:-} == false ]]; then
|
||||
BUILDTOOLARGS+=" verbose=1"
|
||||
else
|
||||
BUILDTOOLARGS+=" -v"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ ${USE_CCACHE:-} == true ]]; then
|
||||
echo "using ccache with basedir [${CCACHE_BASEDIR:-}]"
|
||||
CMAKE_EXTRA_ARGS+=" -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||
fi
|
||||
if [ -d "build/${BUILD_DIR}" ]; then
|
||||
rm -rf "build/${BUILD_DIR}"
|
||||
fi
|
||||
|
||||
mkdir -p "build/${BUILD_DIR}"
|
||||
pushd "build/${BUILD_DIR}"
|
||||
|
||||
# cleanup possible artifacts
|
||||
rm -fv CMakeFiles/CMakeOutput.log CMakeFiles/CMakeError.log
|
||||
# Clean up NIH directories which should be git repos, but aren't
|
||||
for nih_path in ${NIH_CACHE_ROOT}/*/*/*/src ${NIH_CACHE_ROOT}/*/*/src
|
||||
do
|
||||
for dir in lz4 snappy rocksdb
|
||||
do
|
||||
if [ -e ${nih_path}/${dir} -a \! -e ${nih_path}/${dir}/.git ]
|
||||
then
|
||||
ls -la ${nih_path}/${dir}*
|
||||
rm -rfv ${nih_path}/${dir}*
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
# generate
|
||||
${time} cmake ../.. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} ${CMAKE_EXTRA_ARGS}
|
||||
# Display the cmake output, to help with debugging if something fails
|
||||
for file in CMakeOutput.log CMakeError.log
|
||||
do
|
||||
if [ -f CMakeFiles/${file} ]
|
||||
then
|
||||
ls -l CMakeFiles/${file}
|
||||
cat CMakeFiles/${file}
|
||||
fi
|
||||
done
|
||||
# build
|
||||
export DESTDIR=$(pwd)/_INSTALLED_
|
||||
|
||||
${time} eval cmake --build . ${BUILDARGS} -- ${BUILDTOOLARGS}
|
||||
|
||||
if [[ ${TARGET} == "docs" ]]; then
|
||||
## mimic the standard test output for docs build
|
||||
## to make controlling processes like jenkins happy
|
||||
if [ -f docs/html/index.html ]; then
|
||||
echo "1 case, 1 test total, 0 failures"
|
||||
else
|
||||
echo "1 case, 1 test total, 1 failures"
|
||||
fi
|
||||
exit
|
||||
fi
|
||||
popd
|
||||
|
||||
if [[ "${TARGET}" == "validator-keys" ]] ; then
|
||||
export APP_PATH="$PWD/build/${BUILD_DIR}/validator-keys/validator-keys"
|
||||
else
|
||||
export APP_PATH="$PWD/build/${BUILD_DIR}/rippled"
|
||||
fi
|
||||
echo "using APP_PATH: ${APP_PATH}"
|
||||
|
||||
# See what we've actually built
|
||||
ldd ${APP_PATH}
|
||||
|
||||
: ${APP_ARGS:=}
|
||||
|
||||
if [[ "${TARGET}" == "validator-keys" ]] ; then
|
||||
APP_ARGS="--unittest"
|
||||
else
|
||||
function join_by { local IFS="$1"; shift; echo "$*"; }
|
||||
|
||||
# This is a list of manual tests
|
||||
# in rippled that we want to run
|
||||
# ORDER matters here...sorted in approximately
|
||||
# descending execution time (longest running tests at top)
|
||||
declare -a manual_tests=(
|
||||
'ripple.ripple_data.reduce_relay_simulate'
|
||||
'ripple.tx.Offer_manual'
|
||||
'ripple.tx.CrossingLimits'
|
||||
'ripple.tx.PlumpBook'
|
||||
'ripple.app.Flow_manual'
|
||||
'ripple.tx.OversizeMeta'
|
||||
'ripple.consensus.DistributedValidators'
|
||||
'ripple.app.NoRippleCheckLimits'
|
||||
'ripple.ripple_data.compression'
|
||||
'ripple.NodeStore.Timing'
|
||||
'ripple.consensus.ByzantineFailureSim'
|
||||
'beast.chrono.abstract_clock'
|
||||
'beast.unit_test.print'
|
||||
)
|
||||
if [[ ${TRAVIS:-false} != "true" ]]; then
|
||||
# these two tests cause travis CI to run out of memory.
|
||||
# TODO: investigate possible workarounds.
|
||||
manual_tests=(
|
||||
'ripple.consensus.ScaleFreeSim'
|
||||
'ripple.tx.FindOversizeCross'
|
||||
"${manual_tests[@]}"
|
||||
)
|
||||
fi
|
||||
|
||||
if [[ ${MANUAL_TESTS:-} == true ]]; then
|
||||
APP_ARGS+=" --unittest=$(join_by , "${manual_tests[@]}")"
|
||||
else
|
||||
APP_ARGS+=" --unittest --quiet --unittest-log"
|
||||
fi
|
||||
if [[ ${coverage} == false && ${PARALLEL_TESTS:-} == true ]]; then
|
||||
APP_ARGS+=" --unittest-jobs ${JOBS}"
|
||||
fi
|
||||
|
||||
if [[ ${IPV6_TESTS:-} == true ]]; then
|
||||
APP_ARGS+=" --unittest-ipv6"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ ${coverage} == true && $CC =~ ^gcc ]]; then
|
||||
# Push the results (lcov.info) to codecov
|
||||
codecov -X gcov # don't even try and look for .gcov files ;)
|
||||
find . -name "*.gcda" | xargs rm -f
|
||||
fi
|
||||
|
||||
if [[ ${SKIP_TESTS:-} == true ]]; then
|
||||
echo "skipping tests."
|
||||
exit
|
||||
fi
|
||||
|
||||
ulimit -a
|
||||
corepat=$(cat /proc/sys/kernel/core_pattern)
|
||||
if [[ ${corepat} =~ ^[:space:]*\| ]] ; then
|
||||
echo "WARNING: core pattern is piping - can't search for core files"
|
||||
look_core=false
|
||||
else
|
||||
look_core=true
|
||||
coredir=$(dirname ${corepat})
|
||||
fi
|
||||
if [[ ${look_core} == true ]]; then
|
||||
before=$(ls -A1 ${coredir})
|
||||
fi
|
||||
|
||||
set +e
|
||||
echo "Running tests for ${APP_PATH}"
|
||||
if [[ ${MANUAL_TESTS:-} == true && ${PARALLEL_TESTS:-} != true ]]; then
|
||||
for t in "${manual_tests[@]}" ; do
|
||||
${APP_PATH} --unittest=${t}
|
||||
TEST_STAT=$?
|
||||
if [[ $TEST_STAT -ne 0 ]] ; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
else
|
||||
${APP_PATH} ${APP_ARGS}
|
||||
TEST_STAT=$?
|
||||
fi
|
||||
set -e
|
||||
|
||||
if [[ ${look_core} == true ]]; then
|
||||
after=$(ls -A1 ${coredir})
|
||||
oIFS="${IFS}"
|
||||
IFS=$'\n\r'
|
||||
found_core=false
|
||||
for l in $(diff -w --suppress-common-lines <(echo "$before") <(echo "$after")) ; do
|
||||
if [[ "$l" =~ ^[[:space:]]*\>[[:space:]]*(.+)$ ]] ; then
|
||||
corefile="${BASH_REMATCH[1]}"
|
||||
echo "FOUND core dump file at '${coredir}/${corefile}'"
|
||||
gdb_output=$(/bin/mktemp /tmp/gdb_output_XXXXXXXXXX.txt)
|
||||
found_core=true
|
||||
gdb \
|
||||
-ex "set height 0" \
|
||||
-ex "set logging file ${gdb_output}" \
|
||||
-ex "set logging on" \
|
||||
-ex "print 'ripple::BuildInfo::versionString'" \
|
||||
-ex "thread apply all backtrace full" \
|
||||
-ex "info inferiors" \
|
||||
-ex quit \
|
||||
"$APP_PATH" \
|
||||
"${coredir}/${corefile}" &> /dev/null
|
||||
|
||||
echo -e "CORE INFO: \n\n $(cat ${gdb_output}) \n\n)"
|
||||
fi
|
||||
done
|
||||
IFS="${oIFS}"
|
||||
fi
|
||||
|
||||
if [[ ${found_core} == true ]]; then
|
||||
exit -1
|
||||
else
|
||||
exit $TEST_STAT
|
||||
fi
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# run our build script in a docker container
|
||||
# using travis-ci hosts
|
||||
set -eux
|
||||
|
||||
function join_by { local IFS="$1"; shift; echo "$*"; }
|
||||
|
||||
set +x
|
||||
echo "VERBOSE_BUILD=true" > /tmp/co.env
|
||||
matchers=(
|
||||
'TRAVIS.*' 'CI' 'CC' 'CXX'
|
||||
'BUILD_TYPE' 'TARGET' 'MAX_TIME'
|
||||
'CODECOV.+' 'CMAKE.*' '.+_TESTS'
|
||||
'.+_OPTIONS' 'NINJA.*' 'NUM_.+'
|
||||
'NIH_.+' 'BOOST.*' '.*CCACHE.*')
|
||||
|
||||
matchstring=$(join_by '|' "${matchers[@]}")
|
||||
echo "MATCHSTRING IS:: $matchstring"
|
||||
env | grep -E "^(${matchstring})=" >> /tmp/co.env
|
||||
set -x
|
||||
# need to eliminate TRAVIS_CMD...don't want to pass it to the container
|
||||
cat /tmp/co.env | grep -v TRAVIS_CMD > /tmp/co.env.2
|
||||
mv /tmp/co.env.2 /tmp/co.env
|
||||
cat /tmp/co.env
|
||||
mkdir -p -m 0777 ${TRAVIS_BUILD_DIR}/cores
|
||||
echo "${TRAVIS_BUILD_DIR}/cores/%e.%p" | sudo tee /proc/sys/kernel/core_pattern
|
||||
docker run \
|
||||
-t --env-file /tmp/co.env \
|
||||
-v ${TRAVIS_HOME}:${TRAVIS_HOME} \
|
||||
-w ${TRAVIS_BUILD_DIR} \
|
||||
--cap-add SYS_PTRACE \
|
||||
--ulimit "core=-1" \
|
||||
$DOCKER_IMAGE \
|
||||
/bin/bash -c 'if [[ $CC =~ ([[:alpha:]]+)-([[:digit:].]+) ]] ; then sudo update-alternatives --set ${BASH_REMATCH[1]} /usr/bin/$CC; fi; bin/ci/ubuntu/build-and-test.sh'
|
||||
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# some cached files create churn, so save them here for
|
||||
# later restoration before packing the cache
|
||||
set -eux
|
||||
clean_cache="travis_clean_cache"
|
||||
if [[ ! ( "${TRAVIS_JOB_NAME}" =~ "windows" || \
|
||||
"${TRAVIS_JOB_NAME}" =~ "prereq-keep" ) ]] && \
|
||||
( [[ "${TRAVIS_COMMIT_MESSAGE}" =~ "${clean_cache}" ]] || \
|
||||
( [[ -v TRAVIS_PULL_REQUEST_SHA && \
|
||||
"${TRAVIS_PULL_REQUEST_SHA}" != "" ]] && \
|
||||
git log -1 "${TRAVIS_PULL_REQUEST_SHA}" | grep -cq "${clean_cache}" -
|
||||
)
|
||||
)
|
||||
then
|
||||
find ${TRAVIS_HOME}/_cache -maxdepth 2 -type d
|
||||
rm -rf ${TRAVIS_HOME}/_cache
|
||||
mkdir -p ${TRAVIS_HOME}/_cache
|
||||
fi
|
||||
|
||||
pushd ${TRAVIS_HOME}
|
||||
if [ -f cache_ignore.tar ] ; then
|
||||
rm -f cache_ignore.tar
|
||||
fi
|
||||
|
||||
if [ -d _cache/nih_c ] ; then
|
||||
find _cache/nih_c -name "build.ninja" | tar rf cache_ignore.tar --files-from -
|
||||
find _cache/nih_c -name ".ninja_deps" | tar rf cache_ignore.tar --files-from -
|
||||
find _cache/nih_c -name ".ninja_log" | tar rf cache_ignore.tar --files-from -
|
||||
find _cache/nih_c -name "*.log" | tar rf cache_ignore.tar --files-from -
|
||||
find _cache/nih_c -name "*.tlog" | tar rf cache_ignore.tar --files-from -
|
||||
# show .a files in the cache, for sanity checking
|
||||
find _cache/nih_c -name "*.a" -ls
|
||||
fi
|
||||
|
||||
if [ -d _cache/ccache ] ; then
|
||||
find _cache/ccache -name "stats" | tar rf cache_ignore.tar --files-from -
|
||||
fi
|
||||
|
||||
if [ -f cache_ignore.tar ] ; then
|
||||
tar -tf cache_ignore.tar
|
||||
fi
|
||||
popd
|
||||
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
var ripple = require('ripple-lib');
|
||||
|
||||
var v = {
|
||||
seed: "snoPBrXtMeMyMHUVTgbuqAfg1SUTb",
|
||||
addr: "rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh"
|
||||
};
|
||||
|
||||
var remote = ripple.Remote.from_config({
|
||||
"trusted" : true,
|
||||
"websocket_ip" : "127.0.0.1",
|
||||
"websocket_port" : 5006,
|
||||
"websocket_ssl" : false,
|
||||
"local_signing" : true
|
||||
});
|
||||
|
||||
var tx_json = {
|
||||
"Account" : v.addr,
|
||||
"Amount" : "10000000",
|
||||
"Destination" : "rEu2ULPiEQm1BAL8pYzmXnNX1aFX9sCks",
|
||||
"Fee" : "10",
|
||||
"Flags" : 0,
|
||||
"Sequence" : 3,
|
||||
"TransactionType" : "Payment"
|
||||
|
||||
//"SigningPubKey": '0396941B22791A448E5877A44CE98434DB217D6FB97D63F0DAD23BE49ED45173C9'
|
||||
};
|
||||
|
||||
remote.on('connected', function () {
|
||||
var req = remote.request_sign(v.seed, tx_json);
|
||||
req.message.debug_signing = true;
|
||||
req.on('success', function (result) {
|
||||
console.log("SERVER RESULT");
|
||||
console.log(result);
|
||||
|
||||
var sim = {};
|
||||
var tx = remote.transaction();
|
||||
tx.tx_json = tx_json;
|
||||
tx._secret = v.seed;
|
||||
tx.complete();
|
||||
var unsigned = tx.serialize().to_hex();
|
||||
tx.sign();
|
||||
|
||||
sim.tx_blob = tx.serialize().to_hex();
|
||||
sim.tx_json = tx.tx_json;
|
||||
sim.tx_signing_hash = tx.signing_hash().to_hex();
|
||||
sim.tx_unsigned = unsigned;
|
||||
|
||||
console.log("\nLOCAL RESULT");
|
||||
console.log(sim);
|
||||
|
||||
remote.connect(false);
|
||||
});
|
||||
req.on('error', function (err) {
|
||||
if (err.error === "remoteError" && err.remote.error === "srcActNotFound") {
|
||||
console.log("Please fund account "+v.addr+" to run this test.");
|
||||
} else {
|
||||
console.log('error', err);
|
||||
}
|
||||
remote.connect(false);
|
||||
});
|
||||
req.request();
|
||||
|
||||
});
|
||||
remote.connect();
|
||||
@@ -1,18 +0,0 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// Returns a Gravatar style hash as per: http://en.gravatar.com/site/implement/hash/
|
||||
//
|
||||
|
||||
if (3 != process.argv.length) {
|
||||
process.stderr.write("Usage: " + process.argv[1] + " email_address\n\nReturns gravatar style hash.\n");
|
||||
process.exit(1);
|
||||
|
||||
} else {
|
||||
var md5 = require('crypto').createHash('md5');
|
||||
|
||||
md5.update(process.argv[2].trim().toLowerCase());
|
||||
|
||||
process.stdout.write(md5.digest('hex') + "\n");
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
@@ -1,31 +0,0 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// This program allows IE 9 ripple-clients to make websocket connections to
|
||||
// rippled using flash. As IE 9 does not have websocket support, this required
|
||||
// if you wish to support IE 9 ripple-clients.
|
||||
//
|
||||
// http://www.lightsphere.com/dev/articles/flash_socket_policy.html
|
||||
//
|
||||
// For better security, be sure to set the Port below to the port of your
|
||||
// [websocket_public_port].
|
||||
//
|
||||
|
||||
var net = require("net"),
|
||||
port = "*",
|
||||
domains = ["*:"+port]; // Domain:Port
|
||||
|
||||
net.createServer(
|
||||
function(socket) {
|
||||
socket.write("<?xml version='1.0' ?>\n");
|
||||
socket.write("<!DOCTYPE cross-domain-policy SYSTEM 'http://www.macromedia.com/xml/dtds/cross-domain-policy.dtd'>\n");
|
||||
socket.write("<cross-domain-policy>\n");
|
||||
domains.forEach(
|
||||
function(domain) {
|
||||
var parts = domain.split(':');
|
||||
socket.write("\t<allow-access-from domain='" + parts[0] + "' to-ports='" + parts[1] + "' />\n");
|
||||
}
|
||||
);
|
||||
socket.write("</cross-domain-policy>\n");
|
||||
socket.end();
|
||||
}
|
||||
).listen(843);
|
||||
@@ -1,150 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This script generates information about your rippled installation
|
||||
# and system. It can be used to help debug issues that you may face
|
||||
# in your installation. While this script endeavors to not display any
|
||||
# sensitive information, it is recommended that you read the output
|
||||
# before sharing with any third parties.
|
||||
|
||||
|
||||
rippled_exe=/opt/ripple/bin/rippled
|
||||
conf_file=/etc/opt/ripple/rippled.cfg
|
||||
|
||||
while getopts ":e:c:" opt; do
|
||||
case $opt in
|
||||
e)
|
||||
rippled_exe=${OPTARG}
|
||||
;;
|
||||
c)
|
||||
conf_file=${OPTARG}
|
||||
;;
|
||||
\?)
|
||||
echo "Invalid option: -$OPTARG"
|
||||
exit -1
|
||||
esac
|
||||
done
|
||||
|
||||
tmp_loc=$(mktemp -d --tmpdir ripple_info.XXXXX)
|
||||
chmod 751 ${tmp_loc}
|
||||
awk_prog=${tmp_loc}/cfg.awk
|
||||
summary_out=${tmp_loc}/rippled_info.md
|
||||
printf "# rippled report info\n\n> generated at %s\n" "$(date -R)" > ${summary_out}
|
||||
|
||||
function log_section {
|
||||
printf "\n## %s\n" "$*" >> ${summary_out}
|
||||
|
||||
while read -r l; do
|
||||
echo " $l" >> ${summary_out}
|
||||
done </dev/stdin
|
||||
}
|
||||
|
||||
function join_by {
|
||||
local IFS="$1"; shift; echo "$*";
|
||||
}
|
||||
|
||||
if [[ -f ${conf_file} ]] ; then
|
||||
exclude=( ips ips_fixed node_seed validation_seed validator_token )
|
||||
cleaned_conf=${tmp_loc}/cleaned_rippled_cfg.txt
|
||||
cat << 'EOP' >> ${awk_prog}
|
||||
BEGIN {FS="[[:space:]]*=[[:space:]]*"; skip=0; db_path=""; print > OUT_FILE; split(exl,exa,"|")}
|
||||
/^#/ {next}
|
||||
save==2 && /^[[:space:]]*$/ {next}
|
||||
/^\[.+\]$/ {
|
||||
section=tolower(gensub(/^\[[[:space:]]*([a-zA-Z_]+)[[:space:]]*\]$/, "\\1", "g"))
|
||||
skip = 0
|
||||
for (i in exa) {
|
||||
if (section == exa[i])
|
||||
skip = 1
|
||||
}
|
||||
if (section == "database_path")
|
||||
save = 1
|
||||
}
|
||||
skip==1 {next}
|
||||
save==2 {save=0; db_path=$0}
|
||||
save==1 {save=2}
|
||||
$1 ~ /password/ {$0=$1"=<redacted>"}
|
||||
{print >> OUT_FILE}
|
||||
END {print db_path}
|
||||
EOP
|
||||
|
||||
db=$(\
|
||||
sed -r -e 's/\<s[[:alnum:]]{28}\>/<redactedsecret>/g;s/^[[:space:]]*//;s/[[:space:]]*$//' ${conf_file} |\
|
||||
awk -v OUT_FILE=${cleaned_conf} -v exl="$(join_by '|' "${exclude[@]}")" -f ${awk_prog})
|
||||
rm ${awk_prog}
|
||||
cat ${cleaned_conf} | log_section "cleaned config file"
|
||||
rm ${cleaned_conf}
|
||||
echo "${db}" | log_section "database path"
|
||||
df ${db} | log_section "df: database"
|
||||
fi
|
||||
|
||||
# Send output from this script to a log file
|
||||
## this captures any messages
|
||||
## or errors from the script itself
|
||||
|
||||
log_file=${tmp_loc}/get_info.log
|
||||
exec 3>&1 1>>${log_file} 2>&1
|
||||
|
||||
## Send all stdout files to /tmp
|
||||
|
||||
if [[ -x ${rippled_exe} ]] ; then
|
||||
pgrep rippled && \
|
||||
${rippled_exe} --conf ${conf_file} \
|
||||
-- server_info | log_section "server info"
|
||||
fi
|
||||
|
||||
cat /proc/meminfo | log_section "meminfo"
|
||||
cat /proc/swaps | log_section "swap space"
|
||||
ulimit -a | log_section "ulimit"
|
||||
|
||||
if command -v lshw >/dev/null 2>&1 ; then
|
||||
lshw 2>/dev/null | log_section "hardware info"
|
||||
else
|
||||
lscpu > ${tmp_loc}/hw_info.txt
|
||||
hwinfo >> ${tmp_loc}/hw_info.txt
|
||||
lspci >> ${tmp_loc}/hw_info.txt
|
||||
lsblk >> ${tmp_loc}/hw_info.txt
|
||||
cat ${tmp_loc}/hw_info.txt | log_section "hardware info"
|
||||
rm ${tmp_loc}/hw_info.txt
|
||||
fi
|
||||
|
||||
if command -v iostat >/dev/null 2>&1 ; then
|
||||
iostat -t -d -x 2 6 | log_section "iostat"
|
||||
fi
|
||||
|
||||
df -h | log_section "free disk space"
|
||||
drives=($(df | awk '$1 ~ /^\/dev\// {print $1}' | xargs -n 1 basename))
|
||||
block_devs=($(ls /sys/block/))
|
||||
for d in "${drives[@]}"; do
|
||||
for dev in "${block_devs[@]}"; do
|
||||
#echo "D: [$d], DEV: [$dev]"
|
||||
if [[ $d =~ $dev ]]; then
|
||||
# this file (if exists) has 0 for SSD and 1 for HDD
|
||||
if [[ "$(cat /sys/block/${dev}/queue/rotational 2>/dev/null)" == 0 ]] ; then
|
||||
echo "${d} : SSD" >> ${tmp_loc}/is_ssd.txt
|
||||
else
|
||||
echo "${d} : NO SSD" >> ${tmp_loc}/is_ssd.txt
|
||||
fi
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
if [[ -f ${tmp_loc}/is_ssd.txt ]] ; then
|
||||
cat ${tmp_loc}/is_ssd.txt | log_section "SSD"
|
||||
rm ${tmp_loc}/is_ssd.txt
|
||||
fi
|
||||
|
||||
cat ${log_file} | log_section "script log"
|
||||
|
||||
cat << MSG | tee /dev/fd/3
|
||||
####################################################
|
||||
rippled info has been gathered. Please copy the
|
||||
contents of ${summary_out}
|
||||
to a github gist at https://gist.github.com/
|
||||
|
||||
PLEASE REVIEW THIS FILE FOR ANY SENSITIVE DATA
|
||||
BEFORE POSTING! We have tried our best to omit
|
||||
any sensitive information from this file, but you
|
||||
should verify before posting.
|
||||
####################################################
|
||||
MSG
|
||||
|
||||
85
bin/git/setup-upstreams.sh
Executable file
85
bin/git/setup-upstreams.sh
Executable file
@@ -0,0 +1,85 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [[ $# -ne 1 || "$1" == "--help" || "$1" == "-h" ]]
|
||||
then
|
||||
name=$( basename $0 )
|
||||
cat <<- USAGE
|
||||
Usage: $name <username>
|
||||
|
||||
Where <username> is the Github username of the upstream repo. e.g. XRPLF
|
||||
USAGE
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create upstream remotes based on origin
|
||||
shift
|
||||
user="$1"
|
||||
# Get the origin URL. Expect it be an SSH-style URL
|
||||
origin=$( git remote get-url origin )
|
||||
if [[ "${origin}" == "" ]]
|
||||
then
|
||||
echo Invalid origin remote >&2
|
||||
exit 1
|
||||
fi
|
||||
# echo "Origin: ${origin}"
|
||||
# Parse the origin
|
||||
ifs_orig="${IFS}"
|
||||
IFS=':' read remote originpath <<< "${origin}"
|
||||
# echo "Remote: ${remote}, Originpath: ${originpath}"
|
||||
IFS='@' read sshuser server <<< "${remote}"
|
||||
# echo "SSHUser: ${sshuser}, Server: ${server}"
|
||||
IFS='/' read originuser repo <<< "${originpath}"
|
||||
# echo "Originuser: ${originuser}, Repo: ${repo}"
|
||||
if [[ "${sshuser}" == "" || "${server}" == "" || "${originuser}" == ""
|
||||
|| "${repo}" == "" ]]
|
||||
then
|
||||
echo "Can't parse origin URL: ${origin}" >&2
|
||||
exit 1
|
||||
fi
|
||||
upstream="https://${server}/${user}/${repo}"
|
||||
upstreampush="${remote}:${user}/${repo}"
|
||||
upstreamgroup="upstream upstream-push"
|
||||
current=$( git remote get-url upstream 2>/dev/null )
|
||||
currentpush=$( git remote get-url upstream-push 2>/dev/null )
|
||||
currentgroup=$( git config remotes.upstreams )
|
||||
if [[ "${current}" == "${upstream}" ]]
|
||||
then
|
||||
echo "Upstream already set up correctly. Skip"
|
||||
elif [[ -n "${current}" && "${current}" != "${upstream}" &&
|
||||
"${current}" != "${upstreampush}" ]]
|
||||
then
|
||||
echo "Upstream already set up as: ${current}. Skip"
|
||||
else
|
||||
if [[ "${current}" == "${upstreampush}" ]]
|
||||
then
|
||||
echo "Upstream set to dangerous push URL. Update."
|
||||
_run git remote rename upstream upstream-push || \
|
||||
_run git remote remove upstream
|
||||
currentpush=$( git remote get-url upstream-push 2>/dev/null )
|
||||
fi
|
||||
_run git remote add upstream "${upstream}"
|
||||
fi
|
||||
|
||||
if [[ "${currentpush}" == "${upstreampush}" ]]
|
||||
then
|
||||
echo "upstream-push already set up correctly. Skip"
|
||||
elif [[ -n "${currentpush}" && "${currentpush}" != "${upstreampush}" ]]
|
||||
then
|
||||
echo "upstream-push already set up as: ${currentpush}. Skip"
|
||||
else
|
||||
_run git remote add upstream-push "${upstreampush}"
|
||||
fi
|
||||
|
||||
if [[ "${currentgroup}" == "${upstreamgroup}" ]]
|
||||
then
|
||||
echo "Upstreams group already set up correctly. Skip"
|
||||
elif [[ -n "${currentgroup}" && "${currentgroup}" != "${upstreamgroup}" ]]
|
||||
then
|
||||
echo "Upstreams group already set up as: ${currentgroup}. Skip"
|
||||
else
|
||||
_run git config --add remotes.upstreams "${upstreamgroup}"
|
||||
fi
|
||||
|
||||
_run git fetch --jobs=$(nproc) upstreams
|
||||
|
||||
exit 0
|
||||
68
bin/git/squash-branches.sh
Executable file
68
bin/git/squash-branches.sh
Executable file
@@ -0,0 +1,68 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [[ $# -lt 3 || "$1" == "--help" || "$1" = "-h" ]]
|
||||
then
|
||||
name=$( basename $0 )
|
||||
cat <<- USAGE
|
||||
Usage: $name workbranch base/branch user/branch [user/branch [...]]
|
||||
|
||||
* workbranch will be created locally from base/branch
|
||||
* base/branch and user/branch may be specified as user:branch to allow
|
||||
easy copying from Github PRs
|
||||
* Remotes for each user must already be set up
|
||||
USAGE
|
||||
exit 0
|
||||
fi
|
||||
|
||||
work="$1"
|
||||
shift
|
||||
|
||||
branches=( $( echo "${@}" | sed "s/:/\//" ) )
|
||||
base="${branches[0]}"
|
||||
unset branches[0]
|
||||
|
||||
set -e
|
||||
|
||||
users=()
|
||||
for b in "${branches[@]}"
|
||||
do
|
||||
users+=( $( echo $b | cut -d/ -f1 ) )
|
||||
done
|
||||
|
||||
users=( $( printf '%s\n' "${users[@]}" | sort -u ) )
|
||||
|
||||
git fetch --multiple upstreams "${users[@]}"
|
||||
git checkout -B "$work" --no-track "$base"
|
||||
|
||||
for b in "${branches[@]}"
|
||||
do
|
||||
git merge --squash "${b}"
|
||||
git commit -S # Use the commit message provided on the PR
|
||||
done
|
||||
|
||||
# Make sure the commits look right
|
||||
git log --show-signature "$base..HEAD"
|
||||
|
||||
parts=( $( echo $base | sed "s/\// /" ) )
|
||||
repo="${parts[0]}"
|
||||
b="${parts[1]}"
|
||||
push=$repo
|
||||
if [[ "$push" == "upstream" ]]
|
||||
then
|
||||
push="upstream-push"
|
||||
fi
|
||||
if [[ "$repo" == "upstream" ]]
|
||||
then
|
||||
repo="upstreams"
|
||||
fi
|
||||
cat << PUSH
|
||||
|
||||
-------------------------------------------------------------------
|
||||
This script will not push. Verify everything is correct, then push
|
||||
to your repo, and create a PR if necessary. Once the PR is approved,
|
||||
run:
|
||||
|
||||
git push $push HEAD:$b
|
||||
git fetch $repo
|
||||
-------------------------------------------------------------------
|
||||
PUSH
|
||||
58
bin/git/update-version.sh
Executable file
58
bin/git/update-version.sh
Executable file
@@ -0,0 +1,58 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [[ $# -ne 3 || "$1" == "--help" || "$1" = "-h" ]]
|
||||
then
|
||||
name=$( basename $0 )
|
||||
cat <<- USAGE
|
||||
Usage: $name workbranch base/branch version
|
||||
|
||||
* workbranch will be created locally from base/branch. If it exists,
|
||||
it will be reused, so make sure you don't overwrite any work.
|
||||
* base/branch may be specified as user:branch to allow easy copying
|
||||
from Github PRs.
|
||||
USAGE
|
||||
exit 0
|
||||
fi
|
||||
|
||||
work="$1"
|
||||
shift
|
||||
|
||||
base=$( echo "$1" | sed "s/:/\//" )
|
||||
shift
|
||||
|
||||
version=$1
|
||||
shift
|
||||
|
||||
set -e
|
||||
|
||||
git fetch upstreams
|
||||
|
||||
git checkout -B "${work}" --no-track "${base}"
|
||||
|
||||
push=$( git rev-parse --abbrev-ref --symbolic-full-name '@{push}' \
|
||||
2>/dev/null ) || true
|
||||
if [[ "${push}" != "" ]]
|
||||
then
|
||||
echo "Warning: ${push} may already exist."
|
||||
fi
|
||||
|
||||
build=$( find -name BuildInfo.cpp )
|
||||
sed 's/\(^.*versionString =\).*$/\1 "'${version}'"/' ${build} > version.cpp && \
|
||||
diff "${build}" version.cpp && exit 1 || \
|
||||
mv -vi version.cpp ${build}
|
||||
|
||||
git diff
|
||||
|
||||
git add ${build}
|
||||
|
||||
git commit -S -m "Set version to ${version}"
|
||||
|
||||
git log --oneline --first-parent ${base}^..
|
||||
|
||||
cat << PUSH
|
||||
|
||||
-------------------------------------------------------------------
|
||||
This script will not push. Verify everything is correct, then push
|
||||
to your repo, and create a PR as described in CONTRIBUTING.md.
|
||||
-------------------------------------------------------------------
|
||||
PUSH
|
||||
@@ -1,23 +0,0 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// Returns hex of lowercasing a string.
|
||||
//
|
||||
|
||||
var stringToHex = function (s) {
|
||||
return Array.prototype.map.call(s, function (c) {
|
||||
var b = c.charCodeAt(0);
|
||||
|
||||
return b < 16 ? "0" + b.toString(16) : b.toString(16);
|
||||
}).join("");
|
||||
};
|
||||
|
||||
if (3 != process.argv.length) {
|
||||
process.stderr.write("Usage: " + process.argv[1] + " string\n\nReturns hex of lowercasing string.\n");
|
||||
process.exit(1);
|
||||
|
||||
} else {
|
||||
|
||||
process.stdout.write(stringToHex(process.argv[2].toLowerCase()) + "\n");
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
@@ -1,42 +0,0 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// This is a tool to issue JSON-RPC requests from the command line.
|
||||
//
|
||||
// This can be used to test a JSON-RPC server.
|
||||
//
|
||||
// Requires: npm simple-jsonrpc
|
||||
//
|
||||
|
||||
var jsonrpc = require('simple-jsonrpc');
|
||||
|
||||
var program = process.argv[1];
|
||||
|
||||
if (5 !== process.argv.length) {
|
||||
console.log("Usage: %s <URL> <method> <json>", program);
|
||||
}
|
||||
else {
|
||||
var url = process.argv[2];
|
||||
var method = process.argv[3];
|
||||
var json_raw = process.argv[4];
|
||||
var json;
|
||||
|
||||
try {
|
||||
json = JSON.parse(json_raw);
|
||||
}
|
||||
catch (e) {
|
||||
console.log("JSON parse error: %s", e.message);
|
||||
throw e;
|
||||
}
|
||||
|
||||
var client = jsonrpc.client(url);
|
||||
|
||||
client.call(method, json,
|
||||
function (result) {
|
||||
console.log(JSON.stringify(result, undefined, 2));
|
||||
},
|
||||
function (error) {
|
||||
console.log(JSON.stringify(error, undefined, 2));
|
||||
});
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
@@ -1,68 +0,0 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// This is a tool to listen for JSON-RPC requests at an IP and port.
|
||||
//
|
||||
// This will report the request to console and echo back the request as the response.
|
||||
//
|
||||
|
||||
var http = require("http");
|
||||
|
||||
var program = process.argv[1];
|
||||
|
||||
if (4 !== process.argv.length) {
|
||||
console.log("Usage: %s <ip> <port>", program);
|
||||
}
|
||||
else {
|
||||
var ip = process.argv[2];
|
||||
var port = process.argv[3];
|
||||
|
||||
var server = http.createServer(function (req, res) {
|
||||
console.log("CONNECT");
|
||||
var input = "";
|
||||
|
||||
req.setEncoding();
|
||||
|
||||
req.on('data', function (buffer) {
|
||||
// console.log("DATA: %s", buffer);
|
||||
input = input + buffer;
|
||||
});
|
||||
|
||||
req.on('end', function () {
|
||||
// console.log("END");
|
||||
|
||||
var json_req;
|
||||
|
||||
console.log("URL: %s", req.url);
|
||||
console.log("HEADERS: %s", JSON.stringify(req.headers, undefined, 2));
|
||||
|
||||
try {
|
||||
json_req = JSON.parse(input);
|
||||
|
||||
console.log("REQ: %s", JSON.stringify(json_req, undefined, 2));
|
||||
}
|
||||
catch (e) {
|
||||
console.log("BAD JSON: %s", e.message);
|
||||
|
||||
json_req = { error : e.message }
|
||||
}
|
||||
|
||||
res.statusCode = 200;
|
||||
res.end(JSON.stringify({
|
||||
jsonrpc: "2.0",
|
||||
result: { request : json_req },
|
||||
id: req.id
|
||||
}));
|
||||
});
|
||||
|
||||
req.on('close', function () {
|
||||
console.log("CLOSE");
|
||||
});
|
||||
});
|
||||
|
||||
server.listen(port, ip, undefined,
|
||||
function () {
|
||||
console.log("Listening at: %s:%s", ip, port);
|
||||
});
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
252
bin/rlint.js
252
bin/rlint.js
@@ -1,252 +0,0 @@
|
||||
#!/usr/bin/node
|
||||
|
||||
var async = require('async');
|
||||
var Remote = require('ripple-lib').Remote;
|
||||
var Transaction = require('ripple-lib').Transaction;
|
||||
var UInt160 = require('ripple-lib').UInt160;
|
||||
var Amount = require('ripple-lib').Amount;
|
||||
|
||||
var book_key = function (book) {
|
||||
return book.taker_pays.currency
|
||||
+ ":" + book.taker_pays.issuer
|
||||
+ ":" + book.taker_gets.currency
|
||||
+ ":" + book.taker_gets.issuer;
|
||||
};
|
||||
|
||||
var book_key_cross = function (book) {
|
||||
return book.taker_gets.currency
|
||||
+ ":" + book.taker_gets.issuer
|
||||
+ ":" + book.taker_pays.currency
|
||||
+ ":" + book.taker_pays.issuer;
|
||||
};
|
||||
|
||||
var ledger_verify = function (ledger) {
|
||||
var dir_nodes = ledger.accountState.filter(function (entry) {
|
||||
return entry.LedgerEntryType === 'DirectoryNode' // Only directories
|
||||
&& entry.index === entry.RootIndex // Only root nodes
|
||||
&& 'TakerGetsCurrency' in entry; // Only offer directories
|
||||
});
|
||||
|
||||
var books = {};
|
||||
|
||||
dir_nodes.forEach(function (node) {
|
||||
var book = {
|
||||
taker_gets: {
|
||||
currency: UInt160.from_generic(node.TakerGetsCurrency).to_json(),
|
||||
issuer: UInt160.from_generic(node.TakerGetsIssuer).to_json()
|
||||
},
|
||||
taker_pays: {
|
||||
currency: UInt160.from_generic(node.TakerPaysCurrency).to_json(),
|
||||
issuer: UInt160.from_generic(node.TakerPaysIssuer).to_json()
|
||||
},
|
||||
quality: Amount.from_quality(node.RootIndex),
|
||||
index: node.RootIndex
|
||||
};
|
||||
|
||||
books[book_key(book)] = book;
|
||||
|
||||
// console.log(JSON.stringify(node, undefined, 2));
|
||||
});
|
||||
|
||||
// console.log(JSON.stringify(dir_entry, undefined, 2));
|
||||
console.log("#%s books: %s", ledger.ledger_index, Object.keys(books).length);
|
||||
|
||||
Object.keys(books).forEach(function (key) {
|
||||
var book = books[key];
|
||||
var key_cross = book_key_cross(book);
|
||||
var book_cross = books[key_cross];
|
||||
|
||||
if (book && book_cross && !book_cross.done)
|
||||
{
|
||||
var book_cross_quality_inverted = Amount.from_json("1.0/1/1").divide(book_cross.quality);
|
||||
|
||||
if (book_cross_quality_inverted.compareTo(book.quality) >= 0)
|
||||
{
|
||||
// Crossing books
|
||||
console.log("crossing: #%s :: %s :: %s :: %s :: %s :: %s :: %s", ledger.ledger_index, key, book.quality.to_text(), book_cross.quality.to_text(), book_cross_quality_inverted.to_text(),
|
||||
book.index, book_cross.index);
|
||||
}
|
||||
|
||||
book_cross.done = true;
|
||||
}
|
||||
});
|
||||
|
||||
var ripple_selfs = {};
|
||||
|
||||
var accounts = {};
|
||||
var counts = {};
|
||||
|
||||
ledger.accountState.forEach(function (entry) {
|
||||
if (entry.LedgerEntryType === 'Offer')
|
||||
{
|
||||
counts[entry.Account] = (counts[entry.Account] || 0) + 1;
|
||||
}
|
||||
else if (entry.LedgerEntryType === 'RippleState')
|
||||
{
|
||||
if (entry.Flags & (0x10000 | 0x40000))
|
||||
{
|
||||
counts[entry.LowLimit.issuer] = (counts[entry.LowLimit.issuer] || 0) + 1;
|
||||
}
|
||||
|
||||
if (entry.Flags & (0x20000 | 0x80000))
|
||||
{
|
||||
counts[entry.HighLimit.issuer] = (counts[entry.HighLimit.issuer] || 0) + 1;
|
||||
}
|
||||
|
||||
if (entry.HighLimit.issuer === entry.LowLimit.issuer)
|
||||
ripple_selfs[entry.Account] = entry;
|
||||
}
|
||||
else if (entry.LedgerEntryType == 'AccountRoot')
|
||||
{
|
||||
accounts[entry.Account] = entry;
|
||||
}
|
||||
});
|
||||
|
||||
var low = 0; // Accounts with too low a count.
|
||||
var high = 0;
|
||||
var missing_accounts = 0; // Objects with no referencing account.
|
||||
var missing_objects = 0; // Accounts specifying an object but having none.
|
||||
|
||||
Object.keys(counts).forEach(function (account) {
|
||||
if (account in accounts)
|
||||
{
|
||||
if (counts[account] !== accounts[account].OwnerCount)
|
||||
{
|
||||
if (counts[account] < accounts[account].OwnerCount)
|
||||
{
|
||||
high += 1;
|
||||
console.log("%s: high count %s/%s", account, counts[account], accounts[account].OwnerCount);
|
||||
}
|
||||
else
|
||||
{
|
||||
low += 1;
|
||||
console.log("%s: low count %s/%s", account, counts[account], accounts[account].OwnerCount);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
missing_accounts += 1;
|
||||
|
||||
console.log("%s: missing : count %s", account, counts[account]);
|
||||
}
|
||||
});
|
||||
|
||||
Object.keys(accounts).forEach(function (account) {
|
||||
if (!('OwnerCount' in accounts[account]))
|
||||
{
|
||||
console.log("%s: bad entry : %s", account, JSON.stringify(accounts[account], undefined, 2));
|
||||
}
|
||||
else if (!(account in counts) && accounts[account].OwnerCount)
|
||||
{
|
||||
missing_objects += 1;
|
||||
|
||||
console.log("%s: no objects : %s/%s", account, 0, accounts[account].OwnerCount);
|
||||
}
|
||||
});
|
||||
|
||||
if (low)
|
||||
console.log("counts too low = %s", low);
|
||||
|
||||
if (high)
|
||||
console.log("counts too high = %s", high);
|
||||
|
||||
if (missing_objects)
|
||||
console.log("missing_objects = %s", missing_objects);
|
||||
|
||||
if (missing_accounts)
|
||||
console.log("missing_accounts = %s", missing_accounts);
|
||||
|
||||
if (Object.keys(ripple_selfs).length)
|
||||
console.log("RippleState selfs = %s", Object.keys(ripple_selfs).length);
|
||||
|
||||
};
|
||||
|
||||
var ledger_request = function (remote, ledger_index, done) {
|
||||
remote.request_ledger(undefined, {
|
||||
accounts: true,
|
||||
expand: true,
|
||||
})
|
||||
.ledger_index(ledger_index)
|
||||
.on('success', function (m) {
|
||||
// console.log("ledger: ", ledger_index);
|
||||
// console.log("ledger: ", JSON.stringify(m, undefined, 2));
|
||||
done(m.ledger);
|
||||
})
|
||||
.on('error', function (m) {
|
||||
console.log("error");
|
||||
done();
|
||||
})
|
||||
.request();
|
||||
};
|
||||
|
||||
var usage = function () {
|
||||
console.log("rlint.js _websocket_ip_ _websocket_port_ ");
|
||||
};
|
||||
|
||||
var finish = function (remote) {
|
||||
remote.disconnect();
|
||||
|
||||
// XXX Because remote.disconnect() doesn't work:
|
||||
process.exit();
|
||||
};
|
||||
|
||||
console.log("args: ", process.argv.length);
|
||||
console.log("args: ", process.argv);
|
||||
|
||||
if (process.argv.length < 4) {
|
||||
usage();
|
||||
}
|
||||
else {
|
||||
var remote = Remote.from_config({
|
||||
websocket_ip: process.argv[2],
|
||||
websocket_port: process.argv[3],
|
||||
})
|
||||
.once('ledger_closed', function (m) {
|
||||
console.log("ledger_closed: ", JSON.stringify(m, undefined, 2));
|
||||
|
||||
if (process.argv.length === 5) {
|
||||
var ledger_index = process.argv[4];
|
||||
|
||||
ledger_request(remote, ledger_index, function (l) {
|
||||
if (l) {
|
||||
ledger_verify(l);
|
||||
}
|
||||
|
||||
finish(remote);
|
||||
});
|
||||
|
||||
} else if (process.argv.length === 6) {
|
||||
var ledger_start = Number(process.argv[4]);
|
||||
var ledger_end = Number(process.argv[5]);
|
||||
var ledger_cursor = ledger_end;
|
||||
|
||||
async.whilst(
|
||||
function () {
|
||||
return ledger_start <= ledger_cursor && ledger_cursor <=ledger_end;
|
||||
},
|
||||
function (callback) {
|
||||
// console.log(ledger_cursor);
|
||||
|
||||
ledger_request(remote, ledger_cursor, function (l) {
|
||||
if (l) {
|
||||
ledger_verify(l);
|
||||
}
|
||||
|
||||
--ledger_cursor;
|
||||
|
||||
callback();
|
||||
});
|
||||
},
|
||||
function (error) {
|
||||
finish(remote);
|
||||
});
|
||||
|
||||
} else {
|
||||
finish(remote);
|
||||
}
|
||||
})
|
||||
.connect();
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
@@ -1,51 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -exu
|
||||
|
||||
: ${TRAVIS_BUILD_DIR:=""}
|
||||
: ${VCPKG_DIR:=".vcpkg"}
|
||||
export VCPKG_ROOT=${VCPKG_DIR}
|
||||
: ${VCPKG_DEFAULT_TRIPLET:="x64-windows-static"}
|
||||
|
||||
export VCPKG_DEFAULT_TRIPLET
|
||||
|
||||
EXE="vcpkg"
|
||||
if [[ -z ${COMSPEC:-} ]]; then
|
||||
EXE="${EXE}.exe"
|
||||
fi
|
||||
|
||||
if [[ -d "${VCPKG_DIR}" && -x "${VCPKG_DIR}/${EXE}" && -d "${VCPKG_DIR}/installed" ]] ; then
|
||||
echo "Using cached vcpkg at ${VCPKG_DIR}"
|
||||
${VCPKG_DIR}/${EXE} list
|
||||
else
|
||||
if [[ -d "${VCPKG_DIR}" ]] ; then
|
||||
rm -rf "${VCPKG_DIR}"
|
||||
fi
|
||||
git clone --branch 2021.04.30 https://github.com/Microsoft/vcpkg.git ${VCPKG_DIR}
|
||||
pushd ${VCPKG_DIR}
|
||||
BSARGS=()
|
||||
if [[ "$(uname)" == "Darwin" ]] ; then
|
||||
BSARGS+=(--allowAppleClang)
|
||||
fi
|
||||
if [[ -z ${COMSPEC:-} ]]; then
|
||||
chmod +x ./bootstrap-vcpkg.sh
|
||||
time ./bootstrap-vcpkg.sh "${BSARGS[@]}"
|
||||
else
|
||||
time ./bootstrap-vcpkg.bat
|
||||
fi
|
||||
popd
|
||||
fi
|
||||
|
||||
# TODO: bring boost in this way as well ?
|
||||
# NOTE: can pin specific ports to a commit/version like this:
|
||||
# git checkout <SOME COMMIT HASH> ports/boost
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "No extra packages specified..."
|
||||
PKGS=()
|
||||
else
|
||||
PKGS=( "$@" )
|
||||
fi
|
||||
for LIB in "${PKGS[@]}"; do
|
||||
time ${VCPKG_DIR}/${EXE} --clean-after-build install ${LIB}
|
||||
done
|
||||
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
|
||||
# NOTE: must be sourced from a shell so it can export vars
|
||||
|
||||
cat << BATCH > ./getenv.bat
|
||||
CALL %*
|
||||
ENV
|
||||
BATCH
|
||||
|
||||
while read line ; do
|
||||
IFS='"' read x path arg <<<"${line}"
|
||||
if [ -f "${path}" ] ; then
|
||||
echo "FOUND: $path"
|
||||
export VCINSTALLDIR=$(./getenv.bat "${path}" ${arg} | grep "^VCINSTALLDIR=" | sed -E "s/^VCINSTALLDIR=//g")
|
||||
if [ "${VCINSTALLDIR}" != "" ] ; then
|
||||
echo "USING ${VCINSTALLDIR}"
|
||||
export LIB=$(./getenv.bat "${path}" ${arg} | grep "^LIB=" | sed -E "s/^LIB=//g")
|
||||
export LIBPATH=$(./getenv.bat "${path}" ${arg} | grep "^LIBPATH=" | sed -E "s/^LIBPATH=//g")
|
||||
export INCLUDE=$(./getenv.bat "${path}" ${arg} | grep "^INCLUDE=" | sed -E "s/^INCLUDE=//g")
|
||||
ADDPATH=$(./getenv.bat "${path}" ${arg} | grep "^PATH=" | sed -E "s/^PATH=//g")
|
||||
export PATH="${ADDPATH}:${PATH}"
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done <<EOL
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio/2019/BuildTools/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio/2019/Community/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio/2017/BuildTools/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio 15.0/VC/vcvarsall.bat" amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/vcvarsall.bat" amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio 13.0/VC/vcvarsall.bat" amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio 12.0/VC/vcvarsall.bat" amd64
|
||||
EOL
|
||||
# TODO: update the list above as needed to support newer versions of msvc tools
|
||||
|
||||
rm -f getenv.bat
|
||||
|
||||
if [ "${VCINSTALLDIR}" = "" ] ; then
|
||||
echo "No compatible visual studio found!"
|
||||
fi
|
||||
@@ -1,246 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
"""A script to test rippled in an infinite loop of start-sync-stop.
|
||||
|
||||
- Requires Python 3.7+.
|
||||
- Can be stopped with SIGINT.
|
||||
- Has no dependencies outside the standard library.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
assert sys.version_info.major == 3 and sys.version_info.minor >= 7
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import configparser
|
||||
import contextlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import platform
|
||||
import subprocess
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
|
||||
# Enable asynchronous subprocesses on Windows. The default changed in 3.8.
|
||||
# https://docs.python.org/3.7/library/asyncio-platforms.html#subprocess-support-on-windows
|
||||
if (platform.system() == 'Windows' and sys.version_info.major == 3
|
||||
and sys.version_info.minor < 8):
|
||||
asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
|
||||
|
||||
DEFAULT_EXE = 'rippled'
|
||||
DEFAULT_CONFIGURATION_FILE = 'rippled.cfg'
|
||||
# Number of seconds to wait before forcefully terminating.
|
||||
PATIENCE = 120
|
||||
# Number of contiguous seconds in a sync state to be considered synced.
|
||||
DEFAULT_SYNC_DURATION = 60
|
||||
# Number of seconds between polls of state.
|
||||
DEFAULT_POLL_INTERVAL = 5
|
||||
SYNC_STATES = ('full', 'validating', 'proposing')
|
||||
|
||||
|
||||
def read_config(config_file):
|
||||
# strict = False: Allow duplicate keys, e.g. [rpc_startup].
|
||||
# allow_no_value = True: Allow keys with no values. Generally, these
|
||||
# instances use the "key" as the value, and the section name is the key,
|
||||
# e.g. [debug_logfile].
|
||||
# delimiters = ('='): Allow ':' as a character in Windows paths. Some of
|
||||
# our "keys" are actually values, and we don't want to split them on ':'.
|
||||
config = configparser.ConfigParser(
|
||||
strict=False,
|
||||
allow_no_value=True,
|
||||
delimiters=('='),
|
||||
)
|
||||
config.read(config_file)
|
||||
return config
|
||||
|
||||
|
||||
def to_list(value, separator=','):
|
||||
"""Parse a list from a delimited string value."""
|
||||
return [s.strip() for s in value.split(separator) if s]
|
||||
|
||||
|
||||
def find_log_file(config_file):
|
||||
"""Try to figure out what log file the user has chosen. Raises all kinds
|
||||
of exceptions if there is any possibility of ambiguity."""
|
||||
config = read_config(config_file)
|
||||
values = list(config['debug_logfile'].keys())
|
||||
if len(values) < 1:
|
||||
raise ValueError(
|
||||
f'no [debug_logfile] in configuration file: {config_file}')
|
||||
if len(values) > 1:
|
||||
raise ValueError(
|
||||
f'too many [debug_logfile] in configuration file: {config_file}')
|
||||
return values[0]
|
||||
|
||||
|
||||
def find_http_port(config_file):
|
||||
config = read_config(config_file)
|
||||
names = list(config['server'].keys())
|
||||
for name in names:
|
||||
server = config[name]
|
||||
if 'http' in to_list(server.get('protocol', '')):
|
||||
return int(server['port'])
|
||||
raise ValueError(f'no server in [server] for "http" protocol')
|
||||
|
||||
|
||||
@contextlib.asynccontextmanager
|
||||
async def rippled(exe=DEFAULT_EXE, config_file=DEFAULT_CONFIGURATION_FILE):
|
||||
"""A context manager for a rippled process."""
|
||||
# Start the server.
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
str(exe),
|
||||
'--conf',
|
||||
str(config_file),
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
logging.info(f'rippled started with pid {process.pid}')
|
||||
try:
|
||||
yield process
|
||||
finally:
|
||||
# Ask it to stop.
|
||||
logging.info(f'asking rippled (pid: {process.pid}) to stop')
|
||||
start = time.time()
|
||||
process.terminate()
|
||||
|
||||
# Wait nicely.
|
||||
try:
|
||||
await asyncio.wait_for(process.wait(), PATIENCE)
|
||||
except asyncio.TimeoutError:
|
||||
# Ask the operating system to kill it.
|
||||
logging.warning(f'killing rippled ({process.pid})')
|
||||
try:
|
||||
process.kill()
|
||||
except ProcessLookupError:
|
||||
pass
|
||||
|
||||
code = await process.wait()
|
||||
end = time.time()
|
||||
logging.info(
|
||||
f'rippled stopped after {end - start:.1f} seconds with code {code}'
|
||||
)
|
||||
|
||||
|
||||
async def sync(
|
||||
port,
|
||||
*,
|
||||
duration=DEFAULT_SYNC_DURATION,
|
||||
interval=DEFAULT_POLL_INTERVAL,
|
||||
):
|
||||
"""Poll rippled on an interval until it has been synced for a duration."""
|
||||
start = time.perf_counter()
|
||||
while (time.perf_counter() - start) < duration:
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
request = urllib.request.Request(
|
||||
f'http://127.0.0.1:{port}',
|
||||
data=json.dumps({
|
||||
'method': 'server_state'
|
||||
}).encode(),
|
||||
headers={'Content-Type': 'application/json'},
|
||||
)
|
||||
with urllib.request.urlopen(request) as response:
|
||||
try:
|
||||
body = json.loads(response.read())
|
||||
except urllib.error.HTTPError as cause:
|
||||
logging.warning(f'server_state returned not JSON: {cause}')
|
||||
start = time.perf_counter()
|
||||
continue
|
||||
|
||||
try:
|
||||
state = body['result']['state']['server_state']
|
||||
except KeyError as cause:
|
||||
logging.warning(f'server_state response missing key: {cause.key}')
|
||||
start = time.perf_counter()
|
||||
continue
|
||||
logging.info(f'server_state: {state}')
|
||||
if state not in SYNC_STATES:
|
||||
# Require a contiguous sync state.
|
||||
start = time.perf_counter()
|
||||
|
||||
|
||||
async def loop(test,
|
||||
*,
|
||||
exe=DEFAULT_EXE,
|
||||
config_file=DEFAULT_CONFIGURATION_FILE):
|
||||
"""
|
||||
Start-test-stop rippled in an infinite loop.
|
||||
|
||||
Moves log to a different file after each iteration.
|
||||
"""
|
||||
log_file = find_log_file(config_file)
|
||||
id = 0
|
||||
while True:
|
||||
logging.info(f'iteration: {id}')
|
||||
async with rippled(exe, config_file) as process:
|
||||
start = time.perf_counter()
|
||||
exited = asyncio.create_task(process.wait())
|
||||
tested = asyncio.create_task(test())
|
||||
# Try to sync as long as the process is running.
|
||||
done, pending = await asyncio.wait(
|
||||
{exited, tested},
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
if done == {exited}:
|
||||
code = exited.result()
|
||||
logging.warning(
|
||||
f'server halted for unknown reason with code {code}')
|
||||
else:
|
||||
assert done == {tested}
|
||||
assert tested.exception() is None
|
||||
end = time.perf_counter()
|
||||
logging.info(f'synced after {end - start:.0f} seconds')
|
||||
os.replace(log_file, f'debug.{id}.log')
|
||||
id += 1
|
||||
|
||||
|
||||
logging.basicConfig(
|
||||
format='%(asctime)s %(levelname)-8s %(message)s',
|
||||
level=logging.INFO,
|
||||
datefmt='%Y-%m-%d %H:%M:%S',
|
||||
)
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
parser.add_argument(
|
||||
'rippled',
|
||||
type=Path,
|
||||
nargs='?',
|
||||
default=DEFAULT_EXE,
|
||||
help='Path to rippled.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--conf',
|
||||
type=Path,
|
||||
default=DEFAULT_CONFIGURATION_FILE,
|
||||
help='Path to configuration file.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--duration',
|
||||
type=int,
|
||||
default=DEFAULT_SYNC_DURATION,
|
||||
help='Number of contiguous seconds required in a synchronized state.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--interval',
|
||||
type=int,
|
||||
default=DEFAULT_POLL_INTERVAL,
|
||||
help='Number of seconds to wait between polls of state.',
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
port = find_http_port(args.conf)
|
||||
|
||||
|
||||
def test():
|
||||
return sync(port, duration=args.duration, interval=args.interval)
|
||||
|
||||
|
||||
try:
|
||||
asyncio.run(loop(test, exe=args.rippled, config_file=args.conf))
|
||||
except KeyboardInterrupt:
|
||||
# Squelch the message. This is a normal mode of exit.
|
||||
pass
|
||||
133
bin/stop-test.js
133
bin/stop-test.js
@@ -1,133 +0,0 @@
|
||||
/* -------------------------------- REQUIRES -------------------------------- */
|
||||
|
||||
var child = require("child_process");
|
||||
var assert = require("assert");
|
||||
|
||||
/* --------------------------------- CONFIG --------------------------------- */
|
||||
|
||||
if (process.argv[2] == null) {
|
||||
[
|
||||
'Usage: ',
|
||||
'',
|
||||
' `node bin/stop-test.js i,j [rippled_path] [rippled_conf]`',
|
||||
'',
|
||||
' Launch rippled and stop it after n seconds for all n in [i, j}',
|
||||
' For all even values of n launch rippled with `--fg`',
|
||||
' For values of n where n % 3 == 0 launch rippled with `--fg`\n',
|
||||
'Examples: ',
|
||||
'',
|
||||
' $ node bin/stop-test.js 5,10',
|
||||
(' $ node bin/stop-test.js 1,4 ' +
|
||||
'build/clang.debug/rippled $HOME/.confs/rippled.cfg')
|
||||
]
|
||||
.forEach(function(l){console.log(l)});
|
||||
|
||||
process.exit();
|
||||
} else {
|
||||
var testRange = process.argv[2].split(',').map(Number);
|
||||
var rippledPath = process.argv[3] || 'build/rippled'
|
||||
var rippledConf = process.argv[4] || 'rippled.cfg'
|
||||
}
|
||||
|
||||
var options = {
|
||||
env: process.env,
|
||||
stdio: 'ignore' // we could dump the child io when it fails abnormally
|
||||
};
|
||||
|
||||
// default args
|
||||
var conf_args = ['--conf='+rippledConf];
|
||||
var start_args = conf_args.concat([/*'--net'*/])
|
||||
var stop_args = conf_args.concat(['stop']);
|
||||
|
||||
/* --------------------------------- HELPERS -------------------------------- */
|
||||
|
||||
function start(args) {
|
||||
return child.spawn(rippledPath, args, options);
|
||||
}
|
||||
function stop(rippled) { child.execFile(rippledPath, stop_args, options)}
|
||||
function secs_l8r(ms, f) {setTimeout(f, ms * 1000); }
|
||||
|
||||
function show_results_and_exit(results) {
|
||||
console.log(JSON.stringify(results, undefined, 2));
|
||||
process.exit();
|
||||
}
|
||||
|
||||
var timeTakes = function (range) {
|
||||
function sumRange(n) {return (n+1) * n /2}
|
||||
var ret = sumRange(range[1]);
|
||||
if (range[0] > 1) {
|
||||
ret = ret - sumRange(range[0] - 1)
|
||||
}
|
||||
var stopping = (range[1] - range[0]) * 0.5;
|
||||
return ret + stopping;
|
||||
}
|
||||
|
||||
/* ---------------------------------- TEST ---------------------------------- */
|
||||
|
||||
console.log("Test will take ~%s seconds", timeTakes(testRange));
|
||||
|
||||
(function oneTest(n /* seconds */, results) {
|
||||
if (n >= testRange[1]) {
|
||||
// show_results_and_exit(results);
|
||||
console.log(JSON.stringify(results, undefined, 2));
|
||||
oneTest(testRange[0], []);
|
||||
return;
|
||||
}
|
||||
|
||||
var args = start_args;
|
||||
if (n % 2 == 0) {args = args.concat(['--fg'])}
|
||||
if (n % 3 == 0) {args = args.concat(['--net'])}
|
||||
|
||||
var result = {args: args, alive_for: n};
|
||||
results.push(result);
|
||||
|
||||
console.log("\nLaunching `%s` with `%s` for %d seconds",
|
||||
rippledPath, JSON.stringify(args), n);
|
||||
|
||||
rippled = start(args);
|
||||
console.log("Rippled pid: %d", rippled.pid);
|
||||
|
||||
// defaults
|
||||
var b4StopSent = false;
|
||||
var stopSent = false;
|
||||
var stop_took = null;
|
||||
|
||||
rippled.once('exit', function(){
|
||||
if (!stopSent && !b4StopSent) {
|
||||
console.warn('\nRippled exited itself b4 stop issued');
|
||||
process.exit();
|
||||
};
|
||||
|
||||
// The io handles close AFTER exit, may have implications for
|
||||
// `stdio:'inherit'` option to `child.spawn`.
|
||||
rippled.once('close', function() {
|
||||
result.stop_took = (+new Date() - stop_took) / 1000; // seconds
|
||||
console.log("Stopping after %d seconds took %s seconds",
|
||||
n, result.stop_took);
|
||||
oneTest(n+1, results);
|
||||
});
|
||||
});
|
||||
|
||||
secs_l8r(n, function(){
|
||||
console.log("Stopping rippled after %d seconds", n);
|
||||
|
||||
// possible race here ?
|
||||
// seems highly unlikely, but I was having issues at one point
|
||||
b4StopSent=true;
|
||||
stop_took = (+new Date());
|
||||
// when does `exit` actually get sent?
|
||||
stop();
|
||||
stopSent=true;
|
||||
|
||||
// Sometimes we want to attach with a debugger.
|
||||
if (process.env.ABORT_TESTS_ON_STALL != null) {
|
||||
// We wait 30 seconds, and if it hasn't stopped, we abort the process
|
||||
secs_l8r(30, function() {
|
||||
if (result.stop_took == null) {
|
||||
console.log("rippled has stalled");
|
||||
process.exit();
|
||||
};
|
||||
});
|
||||
}
|
||||
})
|
||||
}(testRange[0], []));
|
||||
@@ -1,119 +0,0 @@
|
||||
/**
|
||||
* bin/update_bintypes.js
|
||||
*
|
||||
* This unholy abomination of a script generates the JavaScript file
|
||||
* src/js/bintypes.js from various parts of the C++ source code.
|
||||
*
|
||||
* This should *NOT* be part of any automatic build process unless the C++
|
||||
* source data are brought into a more easily parseable format. Until then,
|
||||
* simply run this script manually and fix as needed.
|
||||
*/
|
||||
|
||||
// XXX: Process LedgerFormats.(h|cpp) as well.
|
||||
|
||||
var filenameProto = __dirname + '/../src/cpp/ripple/SerializeProto.h',
|
||||
filenameTxFormatsH = __dirname + '/../src/cpp/ripple/TransactionFormats.h',
|
||||
filenameTxFormats = __dirname + '/../src/cpp/ripple/TransactionFormats.cpp';
|
||||
|
||||
var fs = require('fs');
|
||||
|
||||
var output = [];
|
||||
|
||||
// Stage 1: Get the field types and codes from SerializeProto.h
|
||||
var types = {},
|
||||
fields = {};
|
||||
String(fs.readFileSync(filenameProto)).split('\n').forEach(function (line) {
|
||||
line = line.replace(/^\s+|\s+$/g, '').replace(/\s+/g, '');
|
||||
if (!line.length || line.slice(0, 2) === '//' || line.slice(-1) !== ')') return;
|
||||
|
||||
var tmp = line.slice(0, -1).split('('),
|
||||
type = tmp[0],
|
||||
opts = tmp[1].split(',');
|
||||
|
||||
if (type === 'TYPE') types[opts[1]] = [opts[0], +opts[2]];
|
||||
else if (type === 'FIELD') fields[opts[0]] = [types[opts[1]][0], +opts[2]];
|
||||
});
|
||||
|
||||
output.push('var ST = require("./serializedtypes");');
|
||||
output.push('');
|
||||
output.push('var REQUIRED = exports.REQUIRED = 0,');
|
||||
output.push(' OPTIONAL = exports.OPTIONAL = 1,');
|
||||
output.push(' DEFAULT = exports.DEFAULT = 2;');
|
||||
output.push('');
|
||||
|
||||
function pad(s, n) { while (s.length < n) s += ' '; return s; }
|
||||
function padl(s, n) { while (s.length < n) s = ' '+s; return s; }
|
||||
|
||||
Object.keys(types).forEach(function (type) {
|
||||
output.push(pad('ST.'+types[type][0]+'.id', 25) + ' = '+types[type][1]+';');
|
||||
});
|
||||
output.push('');
|
||||
|
||||
// Stage 2: Get the transaction type IDs from TransactionFormats.h
|
||||
var ttConsts = {};
|
||||
String(fs.readFileSync(filenameTxFormatsH)).split('\n').forEach(function (line) {
|
||||
var regex = /tt([A-Z_]+)\s+=\s+([0-9-]+)/;
|
||||
var match = line.match(regex);
|
||||
if (match) ttConsts[match[1]] = +match[2];
|
||||
});
|
||||
|
||||
// Stage 3: Get the transaction formats from TransactionFormats.cpp
|
||||
var base = [],
|
||||
sections = [],
|
||||
current = base;
|
||||
String(fs.readFileSync(filenameTxFormats)).split('\n').forEach(function (line) {
|
||||
line = line.replace(/^\s+|\s+$/g, '').replace(/\s+/g, '');
|
||||
|
||||
var d_regex = /DECLARE_TF\(([A-Za-z]+),tt([A-Z_]+)/;
|
||||
var d_match = line.match(d_regex);
|
||||
|
||||
var s_regex = /SOElement\(sf([a-z]+),SOE_(REQUIRED|OPTIONAL|DEFAULT)/i;
|
||||
var s_match = line.match(s_regex);
|
||||
|
||||
if (d_match) sections.push(current = [d_match[1], ttConsts[d_match[2]]]);
|
||||
else if (s_match) current.push([s_match[1], s_match[2]]);
|
||||
});
|
||||
|
||||
function removeFinalComma(arr) {
|
||||
arr[arr.length-1] = arr[arr.length-1].slice(0, -1);
|
||||
}
|
||||
|
||||
output.push('var base = [');
|
||||
base.forEach(function (field) {
|
||||
var spec = fields[field[0]];
|
||||
output.push(' [ '+
|
||||
pad("'"+field[0]+"'", 21)+', '+
|
||||
pad(field[1], 8)+', '+
|
||||
padl(""+spec[1], 2)+', '+
|
||||
'ST.'+pad(spec[0], 3)+
|
||||
' ],');
|
||||
});
|
||||
removeFinalComma(output);
|
||||
output.push('];');
|
||||
output.push('');
|
||||
|
||||
|
||||
output.push('exports.tx = {');
|
||||
sections.forEach(function (section) {
|
||||
var name = section.shift(),
|
||||
ttid = section.shift();
|
||||
|
||||
output.push(' '+name+': ['+ttid+'].concat(base, [');
|
||||
section.forEach(function (field) {
|
||||
var spec = fields[field[0]];
|
||||
output.push(' [ '+
|
||||
pad("'"+field[0]+"'", 21)+', '+
|
||||
pad(field[1], 8)+', '+
|
||||
padl(""+spec[1], 2)+', '+
|
||||
'ST.'+pad(spec[0], 3)+
|
||||
' ],');
|
||||
});
|
||||
removeFinalComma(output);
|
||||
output.push(' ]),');
|
||||
});
|
||||
removeFinalComma(output);
|
||||
output.push('};');
|
||||
output.push('');
|
||||
|
||||
console.log(output.join('\n'));
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Execute this script with a running Postgres server on the current host.
|
||||
# It should work with the most generic installation of Postgres,
|
||||
# and is necessary for rippled to store data in Postgres.
|
||||
|
||||
# usage: sudo -u postgres ./initdb.sh
|
||||
psql -c "CREATE USER rippled"
|
||||
psql -c "CREATE DATABASE rippled WITH OWNER = rippled"
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
#
|
||||
# 4. HTTPS Client
|
||||
#
|
||||
# 5. Reporting Mode
|
||||
# 5. <vacated>
|
||||
#
|
||||
# 6. Database
|
||||
#
|
||||
@@ -396,8 +396,8 @@
|
||||
# true - enables compression
|
||||
# false - disables compression [default].
|
||||
#
|
||||
# The rippled server can save bandwidth by compressing its peer-to-peer communications,
|
||||
# at a cost of greater CPU usage. If you enable link compression,
|
||||
# The rippled server can save bandwidth by compressing its peer-to-peer communications,
|
||||
# at a cost of greater CPU usage. If you enable link compression,
|
||||
# the server automatically compresses communications with peer servers
|
||||
# that also have link compression enabled.
|
||||
# https://xrpl.org/enable-link-compression.html
|
||||
@@ -410,13 +410,17 @@
|
||||
# starter list is included in the code and used if no other hostnames are
|
||||
# available.
|
||||
#
|
||||
# One address or domain name per line is allowed. A port may must be
|
||||
# specified after adding a space to the address. The ordering of entries
|
||||
# does not generally matter.
|
||||
# One address or domain name per line is allowed. A port may be specified
|
||||
# after adding a space to the address. If a port is not specified, the default
|
||||
# port of 2459 will be used. Many servers still use the legacy port of 51235.
|
||||
# To connect to such servers, you must specify the port number. The ordering
|
||||
# of entries does not generally matter.
|
||||
#
|
||||
# The default list of entries is:
|
||||
# - r.ripple.com 51235
|
||||
# - sahyadri.isrdc.in 51235
|
||||
# - hubs.xrpkuwait.com 51235
|
||||
# - hub.xrpl-commons.org 51235
|
||||
#
|
||||
# Examples:
|
||||
#
|
||||
@@ -883,119 +887,6 @@
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
# 5. Reporting Mode
|
||||
#
|
||||
#------------
|
||||
#
|
||||
# rippled has an optional operating mode called Reporting Mode. In Reporting
|
||||
# Mode, rippled does not connect to the peer to peer network. Instead, rippled
|
||||
# will continuously extract data from one or more rippled servers that are
|
||||
# connected to the peer to peer network (referred to as an ETL source).
|
||||
# Reporting mode servers will forward RPC requests that require access to the
|
||||
# peer to peer network (submit, fee, etc) to an ETL source.
|
||||
#
|
||||
# [reporting] Settings for Reporting Mode. If and only if this section is
|
||||
# present, rippled will start in reporting mode. This section
|
||||
# contains a list of ETL source names, and key-value pairs. The
|
||||
# ETL source names each correspond to a configuration file
|
||||
# section; the names must match exactly. The key-value pairs are
|
||||
# optional.
|
||||
#
|
||||
#
|
||||
# [<name>]
|
||||
#
|
||||
# A series of key/value pairs that specify an ETL source.
|
||||
#
|
||||
# source_ip = <IP-address>
|
||||
#
|
||||
# Required. IP address of the ETL source. Can also be a DNS record.
|
||||
#
|
||||
# source_ws_port = <number>
|
||||
#
|
||||
# Required. Port on which ETL source is accepting unencrypted websocket
|
||||
# connections.
|
||||
#
|
||||
# source_grpc_port = <number>
|
||||
#
|
||||
# Required for ETL. Port on which ETL source is accepting gRPC requests.
|
||||
# If this option is ommitted, this ETL source cannot actually be used for
|
||||
# ETL; the Reporting Mode server can still forward RPCs to this ETL
|
||||
# source, but cannot extract data from this ETL source.
|
||||
#
|
||||
#
|
||||
# Key-value pairs (all optional):
|
||||
#
|
||||
# read_only Valid values: 0, 1. Default is 0. If set to 1, the server
|
||||
# will start in strict read-only mode, and will not perform
|
||||
# ETL. The server will still handle RPC requests, and will
|
||||
# still forward RPC requests that require access to the p2p
|
||||
# network.
|
||||
#
|
||||
# start_sequence
|
||||
# Sequence of first ledger to extract if the database is empty.
|
||||
# ETL extracts ledgers in order. If this setting is absent and
|
||||
# the database is empty, ETL will start with the next ledger
|
||||
# validated by the network. If this setting is present and the
|
||||
# database is not empty, an exception is thrown.
|
||||
#
|
||||
# num_markers Degree of parallelism used during the initial ledger
|
||||
# download. Only used if the database is empty. Valid values
|
||||
# are 1-256. A higher degree of parallelism results in a
|
||||
# faster download, but puts more load on the ETL source.
|
||||
# Default is 2.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# [reporting]
|
||||
# etl_source1
|
||||
# etl_source2
|
||||
# read_only=0
|
||||
# start_sequence=32570
|
||||
# num_markers=8
|
||||
#
|
||||
# [etl_source1]
|
||||
# source_ip=1.2.3.4
|
||||
# source_ws_port=6005
|
||||
# source_grpc_port=50051
|
||||
#
|
||||
# [etl_source2]
|
||||
# source_ip=5.6.7.8
|
||||
# source_ws_port=6005
|
||||
# source_grpc_port=50051
|
||||
#
|
||||
# Minimal Example:
|
||||
#
|
||||
# [reporting]
|
||||
# etl_source1
|
||||
#
|
||||
# [etl_source1]
|
||||
# source_ip=1.2.3.4
|
||||
# source_ws_port=6005
|
||||
# source_grpc_port=50051
|
||||
#
|
||||
#
|
||||
# Notes:
|
||||
#
|
||||
# Reporting Mode requires Postgres (instead of SQLite). The Postgres
|
||||
# connection info is specified under the [ledger_tx_tables] config section;
|
||||
# see the Database section for further documentation.
|
||||
#
|
||||
# Each ETL source specified must have gRPC enabled (by adding a [port_grpc]
|
||||
# section to the config). It is recommended to add a secure_gateway entry to
|
||||
# the gRPC section, in order to bypass the server's rate limiting.
|
||||
# This section needs to be added to the config of the ETL source, not
|
||||
# the config of the reporting node. In the example below, the
|
||||
# reporting server is running at 127.0.0.1. Multiple IPs can be
|
||||
# specified in secure_gateway via a comma separated list.
|
||||
#
|
||||
# [port_grpc]
|
||||
# ip = 0.0.0.0
|
||||
# port = 50051
|
||||
# secure_gateway = 127.0.0.1
|
||||
#
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
# 6. Database
|
||||
#
|
||||
#------------
|
||||
@@ -1003,13 +894,7 @@
|
||||
# rippled creates 4 SQLite database to hold bookkeeping information
|
||||
# about transactions, local credentials, and various other things.
|
||||
# It also creates the NodeDB, which holds all the objects that
|
||||
# make up the current and historical ledgers. In Reporting Mode, rippled
|
||||
# uses a Postgres database instead of SQLite.
|
||||
#
|
||||
# The simplest way to work with Postgres is to install it locally.
|
||||
# When it is running, execute the initdb.sh script in the current
|
||||
# directory as: sudo -u postgres ./initdb.sh
|
||||
# This will create the rippled user and an empty database of the same name.
|
||||
# make up the current and historical ledgers.
|
||||
#
|
||||
# The size of the NodeDB grows in proportion to the amount of new data and the
|
||||
# amount of historical data (a configurable setting) so the performance of the
|
||||
@@ -1051,33 +936,10 @@
|
||||
# keeping full history is not advised, and using online delete is
|
||||
# recommended.
|
||||
#
|
||||
# type = Cassandra
|
||||
#
|
||||
# Apache Cassandra is an open-source, distributed key-value store - see
|
||||
# https://cassandra.apache.org/ for more details.
|
||||
#
|
||||
# Cassandra is an alternative backend to be used only with Reporting Mode.
|
||||
# See the Reporting Mode section for more details about Reporting Mode.
|
||||
#
|
||||
# Required keys for NuDB and RocksDB:
|
||||
#
|
||||
# path Location to store the database
|
||||
#
|
||||
# Required keys for Cassandra:
|
||||
#
|
||||
# contact_points IP of a node in the Cassandra cluster
|
||||
#
|
||||
# port CQL Native Transport Port
|
||||
#
|
||||
# secure_connect_bundle
|
||||
# Absolute path to a secure connect bundle. When using
|
||||
# a secure connect bundle, contact_points and port are
|
||||
# not required.
|
||||
#
|
||||
# keyspace Name of Cassandra keyspace to use
|
||||
#
|
||||
# table_name Name of table in above keyspace to use
|
||||
#
|
||||
# Optional keys
|
||||
#
|
||||
# cache_size Size of cache for database records. Default is 16384.
|
||||
@@ -1094,7 +956,7 @@
|
||||
# default value for the unspecified parameter.
|
||||
#
|
||||
# Note: the cache will not be created if online_delete
|
||||
# is specified, or if shards are used.
|
||||
# is specified.
|
||||
#
|
||||
# fast_load Boolean. If set, load the last persisted ledger
|
||||
# from disk upon process start before syncing to
|
||||
@@ -1107,16 +969,53 @@
|
||||
# earliest_seq The default is 32570 to match the XRP ledger
|
||||
# network's earliest allowed sequence. Alternate
|
||||
# networks may set this value. Minimum value of 1.
|
||||
# If a [shard_db] section is defined, and this
|
||||
# value is present either [node_db] or [shard_db],
|
||||
# it must be defined with the same value in both
|
||||
# sections.
|
||||
#
|
||||
# online_delete Minimum value of 256. Enable automatic purging
|
||||
# of older ledger information. Maintain at least this
|
||||
# number of ledger records online. Must be greater
|
||||
# than or equal to ledger_history.
|
||||
#
|
||||
# Optional keys for NuDB only:
|
||||
#
|
||||
# nudb_block_size EXPERIMENTAL: Block size in bytes for NuDB storage.
|
||||
# Must be a power of 2 between 4096 and 32768. Default is 4096.
|
||||
#
|
||||
# This parameter controls the fundamental storage unit
|
||||
# size for NuDB's internal data structures. The choice
|
||||
# of block size can significantly impact performance
|
||||
# depending on your storage hardware and filesystem:
|
||||
#
|
||||
# - 4096 bytes: Optimal for most standard SSDs and
|
||||
# traditional filesystems (ext4, NTFS, HFS+).
|
||||
# Provides good balance of performance and storage
|
||||
# efficiency. Recommended for most deployments.
|
||||
# Minimizes memory footprint and provides consistent
|
||||
# low-latency access patterns across diverse hardware.
|
||||
#
|
||||
# - 8192-16384 bytes: May improve performance on
|
||||
# high-end NVMe SSDs and copy-on-write filesystems
|
||||
# like ZFS or Btrfs that benefit from larger block
|
||||
# alignment. Can reduce metadata overhead for large
|
||||
# databases. Offers better sequential throughput and
|
||||
# reduced I/O operations at the cost of higher memory
|
||||
# usage per operation.
|
||||
#
|
||||
# - 32768 bytes (32K): Maximum supported block size
|
||||
# for high-performance scenarios with very fast
|
||||
# storage. May increase memory usage and reduce
|
||||
# efficiency for smaller databases. Best suited for
|
||||
# enterprise environments with abundant RAM.
|
||||
#
|
||||
# Performance testing is recommended before deploying
|
||||
# any non-default block size in production environments.
|
||||
#
|
||||
# Note: This setting cannot be changed after database
|
||||
# creation without rebuilding the entire database.
|
||||
# Choose carefully based on your hardware and expected
|
||||
# database size.
|
||||
#
|
||||
# Example: nudb_block_size=4096
|
||||
#
|
||||
# These keys modify the behavior of online_delete, and thus are only
|
||||
# relevant if online_delete is defined and non-zero:
|
||||
#
|
||||
@@ -1153,29 +1052,10 @@
|
||||
# that rippled is still in sync with the network,
|
||||
# and that the validated ledger is less than
|
||||
# 'age_threshold_seconds' old. If not, then continue
|
||||
# sleeping for this number of seconds and
|
||||
# sleeping for this number of seconds and
|
||||
# checking until healthy.
|
||||
# Default is 5.
|
||||
#
|
||||
# Optional keys for Cassandra:
|
||||
#
|
||||
# username Username to use if Cassandra cluster requires
|
||||
# authentication
|
||||
#
|
||||
# password Password to use if Cassandra cluster requires
|
||||
# authentication
|
||||
#
|
||||
# max_requests_outstanding
|
||||
# Limits the maximum number of concurrent database
|
||||
# writes. Default is 10 million. For slower clusters,
|
||||
# large numbers of concurrent writes can overload the
|
||||
# cluster. Setting this option can help eliminate
|
||||
# write timeouts and other write errors due to the
|
||||
# cluster being overloaded.
|
||||
# io_threads
|
||||
# Set the number of IO threads used by the
|
||||
# Cassandra driver. Defaults to 4.
|
||||
#
|
||||
# Notes:
|
||||
# The 'node_db' entry configures the primary, persistent storage.
|
||||
#
|
||||
@@ -1192,32 +1072,6 @@
|
||||
# your rippled.cfg file.
|
||||
# Partial pathnames are relative to the location of the rippled executable.
|
||||
#
|
||||
# [shard_db] Settings for the Shard Database (optional)
|
||||
#
|
||||
# Format (without spaces):
|
||||
# One or more lines of case-insensitive key / value pairs:
|
||||
# <key> '=' <value>
|
||||
# ...
|
||||
#
|
||||
# Example:
|
||||
# path=db/shards/nudb
|
||||
#
|
||||
# Required keys:
|
||||
# path Location to store the database
|
||||
#
|
||||
# Optional keys:
|
||||
# max_historical_shards
|
||||
# The maximum number of historical shards
|
||||
# to store.
|
||||
#
|
||||
# [historical_shard_paths] Additional storage paths for the Shard Database (optional)
|
||||
#
|
||||
# Format (without spaces):
|
||||
# One or more lines, each expressing a full path for storing historical shards:
|
||||
# /mnt/disk1
|
||||
# /mnt/disk2
|
||||
# ...
|
||||
#
|
||||
# [sqlite] Tuning settings for the SQLite databases (optional)
|
||||
#
|
||||
# Format (without spaces):
|
||||
@@ -1297,40 +1151,18 @@
|
||||
# This setting may not be combined with the
|
||||
# "safety_level" setting.
|
||||
#
|
||||
# [ledger_tx_tables] (optional)
|
||||
# page_size Valid values: integer (MUST be power of 2 between 512 and 65536)
|
||||
# The default is 4096 bytes. This setting determines
|
||||
# the size of a page in the transaction.db file.
|
||||
# See https://www.sqlite.org/pragma.html#pragma_page_size
|
||||
# for more details about the available options.
|
||||
#
|
||||
# conninfo Info for connecting to Postgres. Format is
|
||||
# postgres://[username]:[password]@[ip]/[database].
|
||||
# The database and user must already exist. If this
|
||||
# section is missing and rippled is running in
|
||||
# Reporting Mode, rippled will connect as the
|
||||
# user running rippled to a database with the
|
||||
# same name. On Linux and Mac OS X, the connection
|
||||
# will take place using the server's UNIX domain
|
||||
# socket. On Windows, through the localhost IP
|
||||
# address. Default is empty.
|
||||
#
|
||||
# use_tx_tables Valid values: 1, 0
|
||||
# The default is 1 (true). Determines whether to use
|
||||
# the SQLite transaction database. If set to 0,
|
||||
# rippled will not write to the transaction database,
|
||||
# and will reject tx, account_tx and tx_history RPCs.
|
||||
# In Reporting Mode, this setting is ignored.
|
||||
#
|
||||
# max_connections Valid values: any positive integer up to 64 bit
|
||||
# storage length. This configures the maximum
|
||||
# number of concurrent connections to postgres.
|
||||
# Default is the maximum possible value to
|
||||
# fit in a 64 bit integer.
|
||||
#
|
||||
# timeout Number of seconds after which idle postgres
|
||||
# connections are discconnected. If set to 0,
|
||||
# connections never timeout. Default is 600.
|
||||
#
|
||||
#
|
||||
# remember_ip Value values: 1, 0
|
||||
# Default is 1 (true). Whether to cache host and
|
||||
# port connection settings.
|
||||
# journal_size_limit Valid values: integer
|
||||
# The default is 1582080. This setting limits
|
||||
# the size of the journal for transaction.db file. When the limit is
|
||||
# reached, older entries will be deleted.
|
||||
# See https://www.sqlite.org/pragma.html#pragma_journal_size_limit
|
||||
# for more details about the available options.
|
||||
#
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
@@ -1596,6 +1428,12 @@
|
||||
# Admin level API commands over Secure Websockets, when originating
|
||||
# from the same machine (via the loopback adapter at 127.0.0.1).
|
||||
#
|
||||
# "grpc"
|
||||
#
|
||||
# ETL commands for Clio. We recommend setting secure_gateway
|
||||
# in this section to a comma-separated list of the addresses
|
||||
# of your Clio servers, in order to bypass rippled's rate limiting.
|
||||
#
|
||||
# This port is commented out but can be enabled by removing
|
||||
# the '#' from each corresponding line including the entry under [server]
|
||||
#
|
||||
@@ -1629,6 +1467,9 @@ admin = 127.0.0.1
|
||||
protocol = http
|
||||
|
||||
[port_peer]
|
||||
# Many servers still use the legacy port of 51235, so for backward-compatibility
|
||||
# we maintain that port number here. However, for new servers we recommend
|
||||
# changing this to the default port of 2459.
|
||||
port = 51235
|
||||
ip = 0.0.0.0
|
||||
# alternatively, to accept connections on IPv4 + IPv6, use:
|
||||
@@ -1671,37 +1512,14 @@ secure_gateway = 127.0.0.1
|
||||
[node_db]
|
||||
type=NuDB
|
||||
path=/var/lib/rippled/db/nudb
|
||||
nudb_block_size=4096
|
||||
online_delete=512
|
||||
advisory_delete=0
|
||||
|
||||
# This is the persistent datastore for shards. It is important for the health
|
||||
# of the ripple network that rippled operators shard as much as practical.
|
||||
# NuDB requires SSD storage. Helpful information can be found at
|
||||
# https://xrpl.org/history-sharding.html
|
||||
#[shard_db]
|
||||
#path=/var/lib/rippled/db/shards/nudb
|
||||
#max_historical_shards=50
|
||||
#
|
||||
# This optional section can be configured with a list
|
||||
# of paths to use for storing historical shards. Each
|
||||
# path must correspond to a unique filesystem.
|
||||
#[historical_shard_paths]
|
||||
#/path/1
|
||||
#/path/2
|
||||
|
||||
[database_path]
|
||||
/var/lib/rippled/db
|
||||
|
||||
|
||||
# To use Postgres, uncomment this section and fill in the appropriate connection
|
||||
# info. Postgres can only be used in Reporting Mode.
|
||||
# To disable writing to the transaction database, uncomment this section, and
|
||||
# set use_tx_tables=0
|
||||
# [ledger_tx_tables]
|
||||
# conninfo = postgres://[username]:[password]@[ip]/[database]
|
||||
# use_tx_tables=1
|
||||
|
||||
|
||||
# This needs to be an absolute directory reference, not a relative one.
|
||||
# Modify this value as required.
|
||||
[debug_logfile]
|
||||
@@ -1729,15 +1547,3 @@ validators.txt
|
||||
# set to ssl_verify to 0.
|
||||
[ssl_verify]
|
||||
1
|
||||
|
||||
|
||||
# To run in Reporting Mode, uncomment this section and fill in the appropriate
|
||||
# connection info for one or more ETL sources.
|
||||
# [reporting]
|
||||
# etl_source
|
||||
#
|
||||
#
|
||||
# [etl_source]
|
||||
# source_grpc_port=50051
|
||||
# source_ws_port=6005
|
||||
# source_ip=127.0.0.1
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -26,7 +26,7 @@
|
||||
#
|
||||
# Examples:
|
||||
# https://vl.ripple.com
|
||||
# https://vl.xrplf.org
|
||||
# https://unl.xrplf.org
|
||||
# http://127.0.0.1:8000
|
||||
# file:///etc/opt/ripple/vl.txt
|
||||
#
|
||||
@@ -54,13 +54,13 @@
|
||||
|
||||
[validator_list_sites]
|
||||
https://vl.ripple.com
|
||||
https://vl.xrplf.org
|
||||
https://unl.xrplf.org
|
||||
|
||||
[validator_list_keys]
|
||||
#vl.ripple.com
|
||||
ED2677ABFFD1B33AC6FBC3062B71F1E8397C1505E1C42C64D11AD1B28FF73F4734
|
||||
# vl.xrplf.org
|
||||
ED45D1840EE724BE327ABE9146503D5848EFD5F38B6D5FEDE71E80ACCE5E6E738B
|
||||
#unl.xrplf.org
|
||||
ED42AEC58B701EEBB77356FFFEC26F83C1F0407263530F068C7C73D392C7E06FD1
|
||||
|
||||
# To use the test network (see https://xrpl.org/connect-your-rippled-to-the-xrp-test-net.html),
|
||||
# use the following configuration instead:
|
||||
@@ -70,3 +70,21 @@ ED45D1840EE724BE327ABE9146503D5848EFD5F38B6D5FEDE71E80ACCE5E6E738B
|
||||
#
|
||||
# [validator_list_keys]
|
||||
# ED264807102805220DA0F312E71FC2C69E1552C9C5790F6C25E3729DEB573D5860
|
||||
|
||||
|
||||
# [validator_list_threshold]
|
||||
#
|
||||
# Minimum number of validator lists on which a validator must be listed in
|
||||
# order to be used.
|
||||
#
|
||||
# This can be set explicitly to any positive integer number not greater than
|
||||
# the size of [validator_list_keys]. If it is not set, or set to 0, the
|
||||
# value will be calculated at startup from the size of [validator_list_keys],
|
||||
# where the calculation is:
|
||||
#
|
||||
# threshold = size(validator_list_keys) < 3
|
||||
# ? 1
|
||||
# : floor(size(validator_list_keys) / 2) + 1
|
||||
|
||||
[validator_list_threshold]
|
||||
0
|
||||
|
||||
@@ -98,6 +98,17 @@
|
||||
# 2024-04-03, Bronek Kozicki
|
||||
# - add support for output formats: jacoco, clover, lcov
|
||||
#
|
||||
# 2025-05-12, Jingchen Wu
|
||||
# - add -fprofile-update=atomic to ensure atomic profile generation
|
||||
#
|
||||
# 2025-08-28, Bronek Kozicki
|
||||
# - fix "At least one COMMAND must be given" CMake warning from policy CMP0175
|
||||
#
|
||||
# 2025-09-03, Jingchen Wu
|
||||
# - remove the unused function append_coverage_compiler_flags and append_coverage_compiler_flags_to_target
|
||||
# - add a new function add_code_coverage_to_target
|
||||
# - remove some unused code
|
||||
#
|
||||
# USAGE:
|
||||
#
|
||||
# 1. Copy this file into your cmake modules path.
|
||||
@@ -106,10 +117,8 @@
|
||||
# using a CMake option() to enable it just optionally):
|
||||
# include(CodeCoverage)
|
||||
#
|
||||
# 3. Append necessary compiler flags for all supported source files:
|
||||
# append_coverage_compiler_flags()
|
||||
# Or for specific target:
|
||||
# append_coverage_compiler_flags_to_target(YOUR_TARGET_NAME)
|
||||
# 3. Append necessary compiler flags and linker flags for all supported source files:
|
||||
# add_code_coverage_to_target(<target> <PRIVATE|PUBLIC|INTERFACE>)
|
||||
#
|
||||
# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og
|
||||
#
|
||||
@@ -198,55 +207,69 @@ endforeach()
|
||||
|
||||
set(COVERAGE_COMPILER_FLAGS "-g --coverage"
|
||||
CACHE INTERNAL "")
|
||||
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "")
|
||||
set(COVERAGE_C_COMPILER_FLAGS "")
|
||||
set(COVERAGE_CXX_LINKER_FLAGS "")
|
||||
set(COVERAGE_C_LINKER_FLAGS "")
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
|
||||
include(CheckCXXCompilerFlag)
|
||||
include(CheckCCompilerFlag)
|
||||
include(CheckLinkerFlag)
|
||||
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||
set(COVERAGE_C_COMPILER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||
set(COVERAGE_CXX_LINKER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||
set(COVERAGE_C_LINKER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||
|
||||
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
|
||||
if(HAVE_cxx_fprofile_abs_path)
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
include(CheckCCompilerFlag)
|
||||
|
||||
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
|
||||
if(HAVE_c_fprofile_abs_path)
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(CMAKE_Fortran_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the Fortran compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_CXX_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the C++ compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_C_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the C compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
||||
""
|
||||
CACHE STRING "Flags used for linking binaries during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_SHARED_LINKER_FLAGS_COVERAGE
|
||||
""
|
||||
CACHE STRING "Flags used by the shared libraries linker during coverage builds."
|
||||
FORCE )
|
||||
mark_as_advanced(
|
||||
CMAKE_Fortran_FLAGS_COVERAGE
|
||||
CMAKE_CXX_FLAGS_COVERAGE
|
||||
CMAKE_C_FLAGS_COVERAGE
|
||||
CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
||||
CMAKE_SHARED_LINKER_FLAGS_COVERAGE )
|
||||
check_linker_flag(CXX -fprofile-abs-path HAVE_cxx_linker_fprofile_abs_path)
|
||||
if(HAVE_cxx_linker_fprofile_abs_path)
|
||||
set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
|
||||
check_linker_flag(C -fprofile-abs-path HAVE_c_linker_fprofile_abs_path)
|
||||
if(HAVE_c_linker_fprofile_abs_path)
|
||||
set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
|
||||
check_cxx_compiler_flag(-fprofile-update=atomic HAVE_cxx_fprofile_update)
|
||||
if(HAVE_cxx_fprofile_update)
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||
endif()
|
||||
|
||||
check_c_compiler_flag(-fprofile-update=atomic HAVE_c_fprofile_update)
|
||||
if(HAVE_c_fprofile_update)
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||
endif()
|
||||
|
||||
check_linker_flag(CXX -fprofile-update=atomic HAVE_cxx_linker_fprofile_update)
|
||||
if(HAVE_cxx_linker_fprofile_update)
|
||||
set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-update=atomic")
|
||||
endif()
|
||||
|
||||
check_linker_flag(C -fprofile-update=atomic HAVE_c_linker_fprofile_update)
|
||||
if(HAVE_c_linker_fprofile_update)
|
||||
set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-update=atomic")
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
|
||||
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
|
||||
endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
|
||||
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
||||
link_libraries(gcov)
|
||||
endif()
|
||||
|
||||
# Defines a target for running and collection code coverage information
|
||||
# Builds dependencies, runs the given executable and outputs reports.
|
||||
# NOTE! The executable should always have a ZERO as exit code otherwise
|
||||
@@ -431,23 +454,24 @@ function(setup_target_for_coverage_gcovr)
|
||||
|
||||
# Show info where to find the report
|
||||
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
|
||||
COMMAND ;
|
||||
COMMAND echo
|
||||
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
|
||||
)
|
||||
endfunction() # setup_target_for_coverage_gcovr
|
||||
|
||||
function(append_coverage_compiler_flags)
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
message(STATUS "Appending code coverage compiler flags: ${COVERAGE_COMPILER_FLAGS}")
|
||||
endfunction() # append_coverage_compiler_flags
|
||||
function(add_code_coverage_to_target name scope)
|
||||
separate_arguments(COVERAGE_CXX_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_COMPILER_FLAGS}")
|
||||
separate_arguments(COVERAGE_C_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_C_COMPILER_FLAGS}")
|
||||
separate_arguments(COVERAGE_CXX_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_LINKER_FLAGS}")
|
||||
separate_arguments(COVERAGE_C_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_C_LINKER_FLAGS}")
|
||||
|
||||
# Setup coverage for specific library
|
||||
function(append_coverage_compiler_flags_to_target name)
|
||||
separate_arguments(_flag_list NATIVE_COMMAND "${COVERAGE_COMPILER_FLAGS}")
|
||||
target_compile_options(${name} PRIVATE ${_flag_list})
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
||||
target_link_libraries(${name} PRIVATE gcov)
|
||||
endif()
|
||||
endfunction()
|
||||
# Add compiler options to the target
|
||||
target_compile_options(${name} ${scope}
|
||||
$<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
|
||||
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>)
|
||||
|
||||
target_link_libraries (${name} ${scope}
|
||||
$<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS} gcov>
|
||||
$<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS} gcov>
|
||||
)
|
||||
endfunction() # add_code_coverage_to_target
|
||||
@@ -16,13 +16,16 @@ set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
target_compile_definitions (common
|
||||
INTERFACE
|
||||
$<$<CONFIG:Debug>:DEBUG _DEBUG>
|
||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>)
|
||||
# ^^^^ NOTE: CMAKE release builds already have NDEBUG
|
||||
# defined, so no need to add it explicitly except for
|
||||
# this special case of (profile ON) and (assert OFF)
|
||||
# -- presumably this is because we don't want profile
|
||||
# builds asserting unless asserts were specifically
|
||||
# requested
|
||||
#[===[
|
||||
NOTE: CMAKE release builds already have NDEBUG defined, so no need to add it
|
||||
explicitly except for the special case of (profile ON) and (assert OFF).
|
||||
Presumably this is because we don't want profile builds asserting unless
|
||||
asserts were specifically requested.
|
||||
]===]
|
||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>
|
||||
# TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x.
|
||||
OPENSSL_SUPPRESS_DEPRECATED
|
||||
)
|
||||
|
||||
if (MSVC)
|
||||
# remove existing exception flag since we set it to -EHa
|
||||
@@ -69,9 +72,9 @@ if (MSVC)
|
||||
-GS
|
||||
-Zc:forScope
|
||||
>
|
||||
# static runtime
|
||||
$<$<CONFIG:Debug>:-MTd>
|
||||
$<$<NOT:$<CONFIG:Debug>>:-MT>
|
||||
# dynamic runtime
|
||||
$<$<CONFIG:Debug>:-MDd>
|
||||
$<$<NOT:$<CONFIG:Debug>>:-MD>
|
||||
$<$<BOOL:${werr}>:-WX>
|
||||
)
|
||||
target_compile_definitions (common
|
||||
@@ -90,28 +93,16 @@ if (MSVC)
|
||||
-errorreport:none
|
||||
-machine:X64)
|
||||
else ()
|
||||
# HACK : because these need to come first, before any warning demotion
|
||||
string (APPEND CMAKE_CXX_FLAGS " -Wall -Wdeprecated")
|
||||
if (wextra)
|
||||
string (APPEND CMAKE_CXX_FLAGS " -Wextra -Wno-unused-parameter")
|
||||
endif ()
|
||||
# not MSVC
|
||||
target_compile_options (common
|
||||
INTERFACE
|
||||
-Wall
|
||||
-Wdeprecated
|
||||
$<$<BOOL:${is_clang}>:-Wno-deprecated-declarations>
|
||||
$<$<BOOL:${wextra}>:-Wextra -Wno-unused-parameter>
|
||||
$<$<BOOL:${werr}>:-Werror>
|
||||
$<$<COMPILE_LANGUAGE:CXX>:
|
||||
-frtti
|
||||
-Wnon-virtual-dtor
|
||||
>
|
||||
-Wno-sign-compare
|
||||
-Wno-char-subscripts
|
||||
-Wno-format
|
||||
-Wno-unused-local-typedefs
|
||||
-fstack-protector
|
||||
$<$<BOOL:${is_gcc}>:
|
||||
-Wno-unused-but-set-variable
|
||||
-Wno-deprecated
|
||||
>
|
||||
-Wno-sign-compare
|
||||
-Wno-unused-but-set-variable
|
||||
$<$<NOT:$<CONFIG:Debug>>:-fno-strict-aliasing>
|
||||
# tweak gcc optimization for debug
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<CONFIG:Debug>>:-O0>
|
||||
@@ -120,7 +111,7 @@ else ()
|
||||
target_link_libraries (common
|
||||
INTERFACE
|
||||
-rdynamic
|
||||
$<$<BOOL:${is_linux}>:-Wl,-z,relro,-z,now>
|
||||
$<$<BOOL:${is_linux}>:-Wl,-z,relro,-z,now,--build-id>
|
||||
# link to static libc/c++ iff:
|
||||
# * static option set and
|
||||
# * NOT APPLE (AppleClang does not support static libc/c++) and
|
||||
@@ -131,6 +122,17 @@ else ()
|
||||
>)
|
||||
endif ()
|
||||
|
||||
# Antithesis instrumentation will only be built and deployed using machines running Linux.
|
||||
if (voidstar)
|
||||
if (NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
message(FATAL_ERROR "Antithesis instrumentation requires Debug build type, aborting...")
|
||||
elseif (NOT is_linux)
|
||||
message(FATAL_ERROR "Antithesis instrumentation requires Linux, aborting...")
|
||||
elseif (NOT (is_clang AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 16.0))
|
||||
message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting...")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (use_mold)
|
||||
# use mold linker if available
|
||||
execute_process (
|
||||
211
cmake/RippledCore.cmake
Normal file
211
cmake/RippledCore.cmake
Normal file
@@ -0,0 +1,211 @@
|
||||
#[===================================================================[
|
||||
Exported targets.
|
||||
#]===================================================================]
|
||||
|
||||
include(target_protobuf_sources)
|
||||
|
||||
# Protocol buffers cannot participate in a unity build,
|
||||
# because all the generated sources
|
||||
# define a bunch of `static const` variables with the same names,
|
||||
# so we just build them as a separate library.
|
||||
add_library(xrpl.libpb)
|
||||
set_target_properties(xrpl.libpb PROPERTIES UNITY_BUILD OFF)
|
||||
target_protobuf_sources(xrpl.libpb xrpl/proto
|
||||
LANGUAGE cpp
|
||||
IMPORT_DIRS include/xrpl/proto
|
||||
PROTOS include/xrpl/proto/ripple.proto
|
||||
)
|
||||
|
||||
file(GLOB_RECURSE protos "include/xrpl/proto/org/*.proto")
|
||||
target_protobuf_sources(xrpl.libpb xrpl/proto
|
||||
LANGUAGE cpp
|
||||
IMPORT_DIRS include/xrpl/proto
|
||||
PROTOS "${protos}"
|
||||
)
|
||||
target_protobuf_sources(xrpl.libpb xrpl/proto
|
||||
LANGUAGE grpc
|
||||
IMPORT_DIRS include/xrpl/proto
|
||||
PROTOS "${protos}"
|
||||
PLUGIN protoc-gen-grpc=$<TARGET_FILE:gRPC::grpc_cpp_plugin>
|
||||
GENERATE_EXTENSIONS .grpc.pb.h .grpc.pb.cc
|
||||
)
|
||||
|
||||
target_compile_options(xrpl.libpb
|
||||
PUBLIC
|
||||
$<$<BOOL:${MSVC}>:-wd4996>
|
||||
$<$<BOOL:${XCODE}>:
|
||||
--system-header-prefix="google/protobuf"
|
||||
-Wno-deprecated-dynamic-exception-spec
|
||||
>
|
||||
PRIVATE
|
||||
$<$<BOOL:${MSVC}>:-wd4065>
|
||||
$<$<NOT:$<BOOL:${MSVC}>>:-Wno-deprecated-declarations>
|
||||
)
|
||||
|
||||
target_link_libraries(xrpl.libpb
|
||||
PUBLIC
|
||||
protobuf::libprotobuf
|
||||
gRPC::grpc++
|
||||
)
|
||||
|
||||
# TODO: Clean up the number of library targets later.
|
||||
add_library(xrpl.imports.main INTERFACE)
|
||||
|
||||
target_link_libraries(xrpl.imports.main
|
||||
INTERFACE
|
||||
LibArchive::LibArchive
|
||||
OpenSSL::Crypto
|
||||
Ripple::boost
|
||||
Ripple::opts
|
||||
Ripple::syslibs
|
||||
absl::random_random
|
||||
date::date
|
||||
ed25519::ed25519
|
||||
secp256k1::secp256k1
|
||||
xrpl.libpb
|
||||
xxHash::xxhash
|
||||
$<$<BOOL:${voidstar}>:antithesis-sdk-cpp>
|
||||
)
|
||||
|
||||
include(add_module)
|
||||
include(target_link_modules)
|
||||
|
||||
# Level 01
|
||||
add_module(xrpl beast)
|
||||
target_link_libraries(xrpl.libxrpl.beast PUBLIC
|
||||
xrpl.imports.main
|
||||
xrpl.libpb
|
||||
)
|
||||
|
||||
# Level 02
|
||||
add_module(xrpl basics)
|
||||
target_link_libraries(xrpl.libxrpl.basics PUBLIC xrpl.libxrpl.beast)
|
||||
|
||||
# Level 03
|
||||
add_module(xrpl json)
|
||||
target_link_libraries(xrpl.libxrpl.json PUBLIC xrpl.libxrpl.basics)
|
||||
|
||||
add_module(xrpl crypto)
|
||||
target_link_libraries(xrpl.libxrpl.crypto PUBLIC xrpl.libxrpl.basics)
|
||||
|
||||
# Level 04
|
||||
add_module(xrpl protocol)
|
||||
target_link_libraries(xrpl.libxrpl.protocol PUBLIC
|
||||
xrpl.libxrpl.crypto
|
||||
xrpl.libxrpl.json
|
||||
)
|
||||
|
||||
# Level 05
|
||||
add_module(xrpl resource)
|
||||
target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol)
|
||||
|
||||
# Level 06
|
||||
add_module(xrpl net)
|
||||
target_link_libraries(xrpl.libxrpl.net PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.resource
|
||||
)
|
||||
|
||||
add_module(xrpl server)
|
||||
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol)
|
||||
|
||||
add_module(xrpl ledger)
|
||||
target_link_libraries(xrpl.libxrpl.ledger PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
)
|
||||
|
||||
add_library(xrpl.libxrpl)
|
||||
set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl)
|
||||
|
||||
add_library(xrpl::libxrpl ALIAS xrpl.libxrpl)
|
||||
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/libxrpl/*.cpp"
|
||||
)
|
||||
target_sources(xrpl.libxrpl PRIVATE ${sources})
|
||||
|
||||
target_link_modules(xrpl PUBLIC
|
||||
basics
|
||||
beast
|
||||
crypto
|
||||
json
|
||||
protocol
|
||||
resource
|
||||
server
|
||||
net
|
||||
ledger
|
||||
)
|
||||
|
||||
# All headers in libxrpl are in modules.
|
||||
# Uncomment this stanza if you have not yet moved new headers into a module.
|
||||
# target_include_directories(xrpl.libxrpl
|
||||
# PRIVATE
|
||||
# $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
|
||||
# PUBLIC
|
||||
# $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>
|
||||
# $<INSTALL_INTERFACE:include>)
|
||||
|
||||
if(xrpld)
|
||||
add_executable(rippled)
|
||||
if(tests)
|
||||
target_compile_definitions(rippled PUBLIC ENABLE_TESTS)
|
||||
target_compile_definitions(rippled PRIVATE
|
||||
UNIT_TEST_REFERENCE_FEE=${UNIT_TEST_REFERENCE_FEE}
|
||||
)
|
||||
endif()
|
||||
target_include_directories(rippled
|
||||
PRIVATE
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
|
||||
)
|
||||
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/xrpld/*.cpp"
|
||||
)
|
||||
target_sources(rippled PRIVATE ${sources})
|
||||
|
||||
if(tests)
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/test/*.cpp"
|
||||
)
|
||||
target_sources(rippled PRIVATE ${sources})
|
||||
endif()
|
||||
|
||||
target_link_libraries(rippled
|
||||
Ripple::boost
|
||||
Ripple::opts
|
||||
Ripple::libs
|
||||
xrpl.libxrpl
|
||||
)
|
||||
exclude_if_included(rippled)
|
||||
# define a macro for tests that might need to
|
||||
# be exluded or run differently in CI environment
|
||||
if(is_ci)
|
||||
target_compile_definitions(rippled PRIVATE RIPPLED_RUNNING_IN_CI)
|
||||
endif ()
|
||||
|
||||
if(voidstar)
|
||||
target_compile_options(rippled
|
||||
PRIVATE
|
||||
-fsanitize-coverage=trace-pc-guard
|
||||
)
|
||||
# rippled requires access to antithesis-sdk-cpp implementation file
|
||||
# antithesis_instrumentation.h, which is not exported as INTERFACE
|
||||
target_include_directories(rippled
|
||||
PRIVATE
|
||||
${CMAKE_SOURCE_DIR}/external/antithesis-sdk
|
||||
)
|
||||
endif()
|
||||
|
||||
# any files that don't play well with unity should be added here
|
||||
if(tests)
|
||||
set_source_files_properties(
|
||||
# these two seem to produce conflicts in beast teardown template methods
|
||||
src/test/rpc/ValidatorRPC_test.cpp
|
||||
src/test/ledger/Invariants_test.cpp
|
||||
PROPERTIES SKIP_UNITY_BUILD_INCLUSION TRUE)
|
||||
endif()
|
||||
endif()
|
||||
@@ -33,6 +33,8 @@ setup_target_for_coverage_gcovr(
|
||||
FORMAT ${coverage_format}
|
||||
EXECUTABLE rippled
|
||||
EXECUTABLE_ARGS --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --unittest-jobs ${coverage_test_parallelism} --quiet --unittest-log
|
||||
EXCLUDE "src/test" "${CMAKE_BINARY_DIR}/proto_gen" "${CMAKE_BINARY_DIR}/proto_gen_grpc"
|
||||
EXCLUDE "src/test" "src/tests" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
|
||||
DEPENDENCIES rippled
|
||||
)
|
||||
|
||||
add_code_coverage_to_target(opts INTERFACE)
|
||||
@@ -21,16 +21,17 @@ set(doxyfile "${CMAKE_CURRENT_SOURCE_DIR}/docs/Doxyfile")
|
||||
|
||||
file(GLOB_RECURSE doxygen_input
|
||||
docs/*.md
|
||||
src/ripple/*.h
|
||||
src/ripple/*.cpp
|
||||
src/ripple/*.md
|
||||
src/test/*.h
|
||||
src/test/*.md
|
||||
Builds/*/README.md)
|
||||
include/*.h
|
||||
include/*.cpp
|
||||
include/*.md
|
||||
src/*.h
|
||||
src/*.cpp
|
||||
src/*.md
|
||||
Builds/*.md
|
||||
*.md)
|
||||
list(APPEND doxygen_input
|
||||
README.md
|
||||
RELEASENOTES.md
|
||||
src/README.md)
|
||||
external/README.md
|
||||
)
|
||||
set(dependencies "${doxygen_input}" "${doxyfile}")
|
||||
|
||||
function(verbose_find_path variable name)
|
||||
@@ -52,9 +53,9 @@ set(download_script "${CMAKE_BINARY_DIR}/docs/download-cppreference.cmake")
|
||||
file(WRITE
|
||||
"${download_script}"
|
||||
"file(DOWNLOAD \
|
||||
http://upload.cppreference.com/mwiki/images/b/b2/html_book_20190607.zip \
|
||||
https://github.com/PeterFeicht/cppreference-doc/releases/download/v20250209/html-book-20250209.zip \
|
||||
${CMAKE_BINARY_DIR}/docs/cppreference.zip \
|
||||
EXPECTED_HASH MD5=82b3a612d7d35a83e3cb1195a63689ab \
|
||||
EXPECTED_HASH MD5=bda585f72fbca4b817b29a3d5746567b \
|
||||
)\n \
|
||||
execute_process( \
|
||||
COMMAND \"${CMAKE_COMMAND}\" -E tar -xf cppreference.zip \
|
||||
@@ -2,20 +2,45 @@
|
||||
install stuff
|
||||
#]===================================================================]
|
||||
|
||||
include(create_symbolic_link)
|
||||
|
||||
install (
|
||||
TARGETS
|
||||
common
|
||||
opts
|
||||
ripple_syslibs
|
||||
ripple_boost
|
||||
xrpl_core
|
||||
xrpl.imports.main
|
||||
xrpl.libpb
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.beast
|
||||
xrpl.libxrpl.crypto
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.resource
|
||||
xrpl.libxrpl.ledger
|
||||
xrpl.libxrpl.server
|
||||
xrpl.libxrpl.net
|
||||
xrpl.libxrpl
|
||||
antithesis-sdk-cpp
|
||||
EXPORT RippleExports
|
||||
LIBRARY DESTINATION lib
|
||||
ARCHIVE DESTINATION lib
|
||||
RUNTIME DESTINATION bin
|
||||
INCLUDES DESTINATION include)
|
||||
|
||||
install(
|
||||
DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl"
|
||||
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}"
|
||||
)
|
||||
|
||||
install(CODE "
|
||||
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
|
||||
include(create_symbolic_link)
|
||||
create_symbolic_link(xrpl \
|
||||
\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}/ripple)
|
||||
")
|
||||
|
||||
install (EXPORT RippleExports
|
||||
FILE RippleTargets.cmake
|
||||
NAMESPACE Ripple::
|
||||
@@ -26,14 +51,9 @@ write_basic_package_version_file (
|
||||
VERSION ${rippled_version}
|
||||
COMPATIBILITY SameMajorVersion)
|
||||
|
||||
if (is_root_project)
|
||||
if (is_root_project AND TARGET rippled)
|
||||
install (TARGETS rippled RUNTIME DESTINATION bin)
|
||||
set_target_properties(rippled PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON)
|
||||
install (
|
||||
FILES
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/RippleConfig.cmake
|
||||
${CMAKE_CURRENT_BINARY_DIR}/RippleConfigVersion.cmake
|
||||
DESTINATION lib/cmake/ripple)
|
||||
# sample configs should not overwrite existing files
|
||||
# install if-not-exists workaround as suggested by
|
||||
# https://cmake.org/Bug/view.php?id=12646
|
||||
@@ -48,4 +68,16 @@ if (is_root_project)
|
||||
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/rippled-example.cfg\" etc rippled.cfg)
|
||||
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/validators-example.txt\" etc validators.txt)
|
||||
")
|
||||
install(CODE "
|
||||
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
|
||||
include(create_symbolic_link)
|
||||
create_symbolic_link(rippled${suffix} \
|
||||
\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/xrpld${suffix})
|
||||
")
|
||||
endif ()
|
||||
|
||||
install (
|
||||
FILES
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/cmake/RippleConfig.cmake
|
||||
${CMAKE_CURRENT_BINARY_DIR}/RippleConfigVersion.cmake
|
||||
DESTINATION lib/cmake/ripple)
|
||||
@@ -7,6 +7,9 @@ add_library (Ripple::opts ALIAS opts)
|
||||
target_compile_definitions (opts
|
||||
INTERFACE
|
||||
BOOST_ASIO_DISABLE_HANDLER_TYPE_REQUIREMENTS
|
||||
BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT
|
||||
BOOST_CONTAINER_FWD_BAD_DEQUE
|
||||
HAS_UNCAUGHT_EXCEPTIONS=1
|
||||
$<$<BOOL:${boost_show_deprecated}>:
|
||||
BOOST_ASIO_NO_DEPRECATED
|
||||
BOOST_FILESYSTEM_NO_DEPRECATED
|
||||
@@ -18,20 +21,18 @@ target_compile_definitions (opts
|
||||
>
|
||||
$<$<BOOL:${beast_no_unit_test_inline}>:BEAST_NO_UNIT_TEST_INLINE=1>
|
||||
$<$<BOOL:${beast_disable_autolink}>:BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES=1>
|
||||
$<$<BOOL:${single_io_service_thread}>:RIPPLE_SINGLE_IO_SERVICE_THREAD=1>)
|
||||
$<$<BOOL:${single_io_service_thread}>:RIPPLE_SINGLE_IO_SERVICE_THREAD=1>
|
||||
$<$<BOOL:${voidstar}>:ENABLE_VOIDSTAR>)
|
||||
target_compile_options (opts
|
||||
INTERFACE
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
|
||||
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized>
|
||||
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
target_link_libraries (opts
|
||||
INTERFACE
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
@@ -2,16 +2,6 @@
|
||||
convenience variables and sanity checks
|
||||
#]===================================================================]
|
||||
|
||||
include(ProcessorCount)
|
||||
|
||||
if (NOT ep_procs)
|
||||
ProcessorCount(ep_procs)
|
||||
if (ep_procs GREATER 1)
|
||||
# never use more than half of cores for EP builds
|
||||
math (EXPR ep_procs "${ep_procs} / 2")
|
||||
message (STATUS "Using ${ep_procs} cores for ExternalProject builds.")
|
||||
endif ()
|
||||
endif ()
|
||||
get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
|
||||
set (CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE)
|
||||
@@ -8,15 +8,25 @@ ProcessorCount(PROCESSOR_COUNT)
|
||||
|
||||
option(assert "Enables asserts, even in release builds" OFF)
|
||||
|
||||
option(reporting "Build rippled with reporting mode enabled" OFF)
|
||||
option(xrpld "Build xrpld" ON)
|
||||
|
||||
option(tests "Build tests" ON)
|
||||
if(tests)
|
||||
# This setting allows making a separate workflow to test fees other than default 10
|
||||
if(NOT UNIT_TEST_REFERENCE_FEE)
|
||||
set(UNIT_TEST_REFERENCE_FEE "10" CACHE STRING "")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
option(unity "Creates a build using UNITY support in cmake. This is the default" ON)
|
||||
option(unity "Creates a build using UNITY support in cmake." OFF)
|
||||
if(unity)
|
||||
if(NOT is_ci)
|
||||
set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
|
||||
endif()
|
||||
set(CMAKE_UNITY_BUILD ON CACHE BOOL "Do a unity build")
|
||||
endif()
|
||||
if(is_clang AND is_linux)
|
||||
option(voidstar "Enable Antithesis instrumentation." OFF)
|
||||
endif()
|
||||
if(is_gcc OR is_clang)
|
||||
option(coverage "Generates coverage info." OFF)
|
||||
@@ -108,7 +118,7 @@ option(beast_no_unit_test_inline
|
||||
"Prevents unit test definitions from being inserted into global table"
|
||||
OFF)
|
||||
option(single_io_service_thread
|
||||
"Restricts the number of threads calling io_service::run to one. \
|
||||
"Restricts the number of threads calling io_context::run to one. \
|
||||
This can be useful when debugging."
|
||||
OFF)
|
||||
option(boost_show_deprecated
|
||||
20
cmake/RippledValidatorKeys.cmake
Normal file
20
cmake/RippledValidatorKeys.cmake
Normal file
@@ -0,0 +1,20 @@
|
||||
option (validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF)
|
||||
|
||||
if (validator_keys)
|
||||
git_branch (current_branch)
|
||||
# default to tracking VK master branch unless we are on release
|
||||
if (NOT (current_branch STREQUAL "release"))
|
||||
set (current_branch "master")
|
||||
endif ()
|
||||
message (STATUS "Tracking ValidatorKeys branch: ${current_branch}")
|
||||
|
||||
FetchContent_Declare (
|
||||
validator_keys
|
||||
GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
|
||||
GIT_TAG "${current_branch}"
|
||||
)
|
||||
FetchContent_MakeAvailable(validator_keys)
|
||||
set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}")
|
||||
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
|
||||
|
||||
endif ()
|
||||
15
cmake/RippledVersion.cmake
Normal file
15
cmake/RippledVersion.cmake
Normal file
@@ -0,0 +1,15 @@
|
||||
#[===================================================================[
|
||||
read version from source
|
||||
#]===================================================================]
|
||||
|
||||
file(STRINGS src/libxrpl/protocol/BuildInfo.cpp BUILD_INFO)
|
||||
foreach(line_ ${BUILD_INFO})
|
||||
if(line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"")
|
||||
set(rippled_version ${CMAKE_MATCH_1})
|
||||
endif()
|
||||
endforeach()
|
||||
if(rippled_version)
|
||||
message(STATUS "rippled version: ${rippled_version}")
|
||||
else()
|
||||
message(FATAL_ERROR "unable to determine rippled version")
|
||||
endif()
|
||||
37
cmake/add_module.cmake
Normal file
37
cmake/add_module.cmake
Normal file
@@ -0,0 +1,37 @@
|
||||
include(isolate_headers)
|
||||
|
||||
# Create an OBJECT library target named
|
||||
#
|
||||
# ${PROJECT_NAME}.lib${parent}.${name}
|
||||
#
|
||||
# with sources in src/lib${parent}/${name}
|
||||
# and headers in include/${parent}/${name}
|
||||
# that cannot include headers from other directories in include/
|
||||
# unless they come through linked libraries.
|
||||
#
|
||||
# add_module(parent a)
|
||||
# add_module(parent b)
|
||||
# target_link_libraries(project.libparent.b PUBLIC project.libparent.a)
|
||||
function(add_module parent name)
|
||||
set(target ${PROJECT_NAME}.lib${parent}.${name})
|
||||
add_library(${target} OBJECT)
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp"
|
||||
)
|
||||
target_sources(${target} PRIVATE ${sources})
|
||||
target_include_directories(${target} PUBLIC
|
||||
"$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>"
|
||||
)
|
||||
isolate_headers(
|
||||
${target}
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/include"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/include/${parent}/${name}"
|
||||
PUBLIC
|
||||
)
|
||||
isolate_headers(
|
||||
${target}
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}"
|
||||
PRIVATE
|
||||
)
|
||||
endfunction()
|
||||
20
cmake/create_symbolic_link.cmake
Normal file
20
cmake/create_symbolic_link.cmake
Normal file
@@ -0,0 +1,20 @@
|
||||
# file(CREATE_SYMLINK) only works on Windows with administrator privileges.
|
||||
# https://stackoverflow.com/a/61244115/618906
|
||||
function(create_symbolic_link target link)
|
||||
if(WIN32)
|
||||
if(NOT IS_SYMLINK "${link}")
|
||||
if(NOT IS_ABSOLUTE "${target}")
|
||||
# Relative links work do not work on Windows.
|
||||
set(target "${link}/../${target}")
|
||||
endif()
|
||||
file(TO_NATIVE_PATH "${target}" target)
|
||||
file(TO_NATIVE_PATH "${link}" link)
|
||||
execute_process(COMMAND cmd.exe /c mklink /J "${link}" "${target}")
|
||||
endif()
|
||||
else()
|
||||
file(CREATE_LINK "${target}" "${link}" SYMBOLIC)
|
||||
endif()
|
||||
if(NOT IS_SYMLINK "${link}")
|
||||
message(ERROR "failed to create symlink: <${link}>")
|
||||
endif()
|
||||
endfunction()
|
||||
@@ -2,7 +2,6 @@ find_package(Boost 1.82 REQUIRED
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
context
|
||||
coroutine
|
||||
date_time
|
||||
filesystem
|
||||
@@ -15,22 +14,17 @@ find_package(Boost 1.82 REQUIRED
|
||||
|
||||
add_library(ripple_boost INTERFACE)
|
||||
add_library(Ripple::boost ALIAS ripple_boost)
|
||||
if(XCODE)
|
||||
target_include_directories(ripple_boost BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
|
||||
target_compile_options(ripple_boost INTERFACE --system-header-prefix="boost/")
|
||||
else()
|
||||
target_include_directories(ripple_boost SYSTEM BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
|
||||
endif()
|
||||
|
||||
target_link_libraries(ripple_boost
|
||||
INTERFACE
|
||||
Boost::boost
|
||||
Boost::headers
|
||||
Boost::chrono
|
||||
Boost::container
|
||||
Boost::coroutine
|
||||
Boost::date_time
|
||||
Boost::filesystem
|
||||
Boost::json
|
||||
Boost::process
|
||||
Boost::program_options
|
||||
Boost::regex
|
||||
Boost::system
|
||||
48
cmake/isolate_headers.cmake
Normal file
48
cmake/isolate_headers.cmake
Normal file
@@ -0,0 +1,48 @@
|
||||
include(create_symbolic_link)
|
||||
|
||||
# Consider include directory B nested under prefix A:
|
||||
#
|
||||
# /path/to/A/then/to/B/...
|
||||
#
|
||||
# Call C the relative path from A to B.
|
||||
# C is what we want to write in `#include` directives:
|
||||
#
|
||||
# #include <then/to/B/...>
|
||||
#
|
||||
# Examples, all from the `jobqueue` module:
|
||||
#
|
||||
# - Library public headers:
|
||||
# B = /include/xrpl/jobqueue
|
||||
# A = /include/
|
||||
# C = xrpl/jobqueue
|
||||
#
|
||||
# - Library private headers:
|
||||
# B = /src/libxrpl/jobqueue
|
||||
# A = /src/
|
||||
# C = libxrpl/jobqueue
|
||||
#
|
||||
# - Test private headers:
|
||||
# B = /tests/jobqueue
|
||||
# A = /
|
||||
# C = tests/jobqueue
|
||||
#
|
||||
# To isolate headers from each other,
|
||||
# we want to create a symlink Y that points to B,
|
||||
# within a subdirectory X of the `CMAKE_BINARY_DIR`,
|
||||
# that has the same relative path C between X and Y,
|
||||
# and then add X as an include directory of the target,
|
||||
# sometimes `PUBLIC` and sometimes `PRIVATE`.
|
||||
# The Cs are all guaranteed to be unique.
|
||||
# We can guarantee a unique X per target by using
|
||||
# `${CMAKE_CURRENT_BINARY_DIR}/include/${target}`.
|
||||
#
|
||||
# isolate_headers(target A B scope)
|
||||
function(isolate_headers target A B scope)
|
||||
file(RELATIVE_PATH C "${A}" "${B}")
|
||||
set(X "${CMAKE_CURRENT_BINARY_DIR}/modules/${target}")
|
||||
set(Y "${X}/${C}")
|
||||
cmake_path(GET Y PARENT_PATH parent)
|
||||
file(MAKE_DIRECTORY "${parent}")
|
||||
create_symbolic_link("${B}" "${Y}")
|
||||
target_include_directories(${target} ${scope} "$<BUILD_INTERFACE:${X}>")
|
||||
endfunction()
|
||||
24
cmake/target_link_modules.cmake
Normal file
24
cmake/target_link_modules.cmake
Normal file
@@ -0,0 +1,24 @@
|
||||
# Link a library to its modules (see: `add_module`)
|
||||
# and remove the module sources from the library's sources.
|
||||
#
|
||||
# add_module(parent a)
|
||||
# add_module(parent b)
|
||||
# target_link_libraries(project.libparent.b PUBLIC project.libparent.a)
|
||||
# add_library(project.libparent)
|
||||
# target_link_modules(parent PUBLIC a b)
|
||||
function(target_link_modules parent scope)
|
||||
set(library ${PROJECT_NAME}.lib${parent})
|
||||
foreach(name ${ARGN})
|
||||
set(module ${library}.${name})
|
||||
get_target_property(sources ${library} SOURCES)
|
||||
list(LENGTH sources before)
|
||||
get_target_property(dupes ${module} SOURCES)
|
||||
list(LENGTH dupes expected)
|
||||
list(REMOVE_ITEM sources ${dupes})
|
||||
list(LENGTH sources after)
|
||||
math(EXPR actual "${before} - ${after}")
|
||||
message(STATUS "${module} with ${expected} sources took ${actual} sources from ${library}")
|
||||
set_target_properties(${library} PROPERTIES SOURCES "${sources}")
|
||||
target_link_libraries(${library} ${scope} ${module})
|
||||
endforeach()
|
||||
endfunction()
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user