mirror of
https://github.com/XRPLF/rippled.git
synced 2026-01-08 16:56:56 +00:00
Compare commits
511 Commits
master
...
ripple/sma
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8015088340 | ||
|
|
103379836a | ||
|
|
e94321fb41 | ||
|
|
bbc28b3b1c | ||
|
|
53aa5ca903 | ||
|
|
510c0d82e9 | ||
|
|
17565d21d4 | ||
|
|
843e981c8a | ||
|
|
5aab274b7a | ||
|
|
2c30e41191 | ||
|
|
07ff532d30 | ||
|
|
8ea5106b0b | ||
|
|
2c37ef7762 | ||
|
|
3c9f5b6252 | ||
|
|
f57f67a8ae | ||
|
|
f80059e467 | ||
|
|
d734c8dddd | ||
|
|
61b2fe4f64 | ||
|
|
7ee964f514 | ||
|
|
397bc8781e | ||
|
|
a98269f049 | ||
|
|
8bb8c2e38b | ||
|
|
b66bc47ca9 | ||
|
|
0e9c7458bb | ||
|
|
36ecd3b52b | ||
|
|
1d89940653 | ||
|
|
1a1a6806ec | ||
|
|
1977df9c2e | ||
|
|
44d21b8f6d | ||
|
|
3d1b3a49b3 | ||
|
|
0b87a26f04 | ||
|
|
0f23ad820c | ||
|
|
b7139da4d0 | ||
|
|
6ffbef09c2 | ||
|
|
e05f907788 | ||
|
|
9d1f51b01a | ||
|
|
6c95548df5 | ||
|
|
40198d9792 | ||
|
|
69ab39d658 | ||
|
|
f059f0beda | ||
|
|
b9eb66eecc | ||
|
|
e916416642 | ||
|
|
827ecc6e3a | ||
|
|
41c1be2bac | ||
|
|
6a54ed7f14 | ||
|
|
f816ffa55f | ||
|
|
cf748702af | ||
|
|
1eb0fdac65 | ||
|
|
496efb71ca | ||
|
|
9eb84a561e | ||
|
|
62efecbfb1 | ||
|
|
bff5954acf | ||
|
|
42a432c5dc | ||
|
|
881087dd3d | ||
|
|
90e0bbd0fc | ||
|
|
4565cc280b | ||
|
|
b57df290de | ||
|
|
9625514da8 | ||
|
|
a4c60b4160 | ||
|
|
8a403f1241 | ||
|
|
b986395ecc | ||
|
|
020ea3f412 | ||
|
|
51f1fe5f9a | ||
|
|
813bc4d949 | ||
|
|
1e0741690d | ||
|
|
6d2640871d | ||
|
|
c5d178f152 | ||
|
|
6c67f1f525 | ||
|
|
5a17940e2a | ||
|
|
27ac30208d | ||
|
|
c145598ff9 | ||
|
|
50e5608d86 | ||
|
|
c9f17dd85d | ||
|
|
b33b506b90 | ||
|
|
f399749ee2 | ||
|
|
2ed4047840 | ||
|
|
4e25dc9291 | ||
|
|
a18b30b765 | ||
|
|
856470203a | ||
|
|
b124c9f7e3 | ||
|
|
b550dc00ed | ||
|
|
abfcc4ef67 | ||
|
|
e40a4df777 | ||
|
|
dba187f8c5 | ||
|
|
7a7b96107c | ||
|
|
8f2f8d53b4 | ||
|
|
49acc61961 | ||
|
|
500bb68831 | ||
|
|
95d78a8600 | ||
|
|
53eb0f60bc | ||
|
|
41205ae928 | ||
|
|
c33b0ae463 | ||
|
|
16087c9680 | ||
|
|
56bc6d58f6 | ||
|
|
ef5d335e09 | ||
|
|
25c3060fef | ||
|
|
ce9f0b38a4 | ||
|
|
35f7cbf772 | ||
|
|
21c02232a5 | ||
|
|
a791c03dc1 | ||
|
|
800a315383 | ||
|
|
8449c6c365 | ||
|
|
58e03190ac | ||
|
|
fb74dc28e1 | ||
|
|
e4dccfd49b | ||
|
|
57f4b4eb7f | ||
|
|
adbeb94c2b | ||
|
|
a3d4be4eaf | ||
|
|
6ff495fd9b | ||
|
|
ad37461ab2 | ||
|
|
d9c27da529 | ||
|
|
3fb6acd29e | ||
|
|
77b7cef5a7 | ||
|
|
2c187461cc | ||
|
|
13a12c6402 | ||
|
|
362ecbd1cb | ||
|
|
7025e92080 | ||
|
|
f81d0d8c98 | ||
|
|
508937f3d1 | ||
|
|
8580a5795a | ||
|
|
9b53bd9871 | ||
|
|
5fc07e3979 | ||
|
|
2ebc2ca885 | ||
|
|
c2a90b706f | ||
|
|
9ffb434315 | ||
|
|
ff18cfef96 | ||
|
|
03704f712b | ||
|
|
9b332d88c1 | ||
|
|
33309480d4 | ||
|
|
098eadca0a | ||
|
|
3b810c305a | ||
|
|
3968efb5f1 | ||
|
|
12c629a1d2 | ||
|
|
8d2dff2e48 | ||
|
|
c39f9c561c | ||
|
|
173f9f7bb0 | ||
|
|
28a1f90938 | ||
|
|
673fb06c75 | ||
|
|
def7758a23 | ||
|
|
58e5b4ad25 | ||
|
|
578413859c | ||
|
|
0db564d261 | ||
|
|
427b7ea104 | ||
|
|
fa8aa49376 | ||
|
|
3195eb16b2 | ||
|
|
7bf6878b4b | ||
|
|
a891b49c67 | ||
|
|
eed280d169 | ||
|
|
0bc1a115ff | ||
|
|
334bcfa5ef | ||
|
|
106dea4559 | ||
|
|
3ffdcf8114 | ||
|
|
f28ba57b81 | ||
|
|
f3a2ec1fb2 | ||
|
|
1d42c4f6de | ||
|
|
ada83564d8 | ||
|
|
b18dece145 | ||
|
|
63a08560ca | ||
|
|
8ac8a47c99 | ||
|
|
12c4b5a632 | ||
|
|
25c5e3b17f | ||
|
|
8eb233c2ea | ||
|
|
50fc93f742 | ||
|
|
ab45a8a737 | ||
|
|
dfafb141cc | ||
|
|
4e32d2ed98 | ||
|
|
fa69918124 | ||
|
|
cbbb2b1be0 | ||
|
|
cf2d763fa1 | ||
|
|
4021a7eb28 | ||
|
|
2dd1d682ac | ||
|
|
4cb1084c02 | ||
|
|
8d1b3b3994 | ||
|
|
b39d7a6519 | ||
|
|
b0910e359e | ||
|
|
44e027e516 | ||
|
|
0690fda0f1 | ||
|
|
a10f42a3aa | ||
|
|
efd4c1b95d | ||
|
|
d0cc48c6d3 | ||
|
|
f8b4f692f1 | ||
|
|
80a3ae6386 | ||
|
|
48d38c1e2c | ||
|
|
d66e3c949e | ||
|
|
553fb5be3b | ||
|
|
efa917d9f3 | ||
|
|
bd3bc917f8 | ||
|
|
ed5d6f3e22 | ||
|
|
a8e4da0b11 | ||
|
|
1dd60242de | ||
|
|
76611c3f46 | ||
|
|
5efaf0c328 | ||
|
|
0aa23933ea | ||
|
|
21f3c12d85 | ||
|
|
7d5ed0cd8d | ||
|
|
d9960d5ba0 | ||
|
|
91fa6b2295 | ||
|
|
76f774e22d | ||
|
|
f4f7618173 | ||
|
|
66f16469f9 | ||
|
|
1845b1c656 | ||
|
|
7e2e10f02c | ||
|
|
aebec5378c | ||
|
|
0c65a386b5 | ||
|
|
22ca691e75 | ||
|
|
fc6ff69752 | ||
|
|
079e251aca | ||
|
|
67e2c1b563 | ||
|
|
29f5430881 | ||
|
|
b6bd268be2 | ||
|
|
209ee25c32 | ||
|
|
101f285bcd | ||
|
|
e192ffe964 | ||
|
|
566b85b3d6 | ||
|
|
af6beb1d7c | ||
|
|
7d22fe804d | ||
|
|
ce19c13059 | ||
|
|
286dc6322b | ||
|
|
c9346cd40d | ||
|
|
2bf77cc8f6 | ||
|
|
5e33ca56fd | ||
|
|
7c39c810eb | ||
|
|
a7792ebcae | ||
|
|
83ee3788e1 | ||
|
|
ae719b86d3 | ||
|
|
dd722f8b3f | ||
|
|
9cfb7ac340 | ||
|
|
30190a5feb | ||
|
|
3a0e9aab4f | ||
|
|
43caa1ef29 | ||
|
|
1c5683ec78 | ||
|
|
9bee155d59 | ||
|
|
afb6e0e41b | ||
|
|
5523557226 | ||
|
|
b64707f53b | ||
|
|
0b113f371f | ||
|
|
b4c894c1ba | ||
|
|
f34b05f4de | ||
|
|
97ce25f4ce | ||
|
|
92281a4ede | ||
|
|
e80642fc12 | ||
|
|
640ce4988f | ||
|
|
a422855ea7 | ||
|
|
108f90586c | ||
|
|
519d1dbc34 | ||
|
|
3d44758e5a | ||
|
|
3b6cd22e32 | ||
|
|
97bc94a7f6 | ||
|
|
c9c35780d2 | ||
|
|
17f401f374 | ||
|
|
c6c54b3282 | ||
|
|
34619f2504 | ||
|
|
3509de9c5f | ||
|
|
459d0da010 | ||
|
|
91455b6860 | ||
|
|
9e14c14a26 | ||
|
|
f16f243c22 | ||
|
|
fe601308e7 | ||
|
|
c507880d8f | ||
|
|
3f8328bbf8 | ||
|
|
8637d606a4 | ||
|
|
8456b8275e | ||
|
|
3c88786bb0 | ||
|
|
e41f6a71b7 | ||
|
|
51f1be7f5b | ||
|
|
c10a5f9ef6 | ||
|
|
3c141de695 | ||
|
|
46ba8a28fe | ||
|
|
5ecde3cf39 | ||
|
|
620fb26823 | ||
|
|
6b6b213cf5 | ||
|
|
f61086b43c | ||
|
|
176fd2b6e4 | ||
|
|
2df730438d | ||
|
|
db263b696c | ||
|
|
f57b855d74 | ||
|
|
da2b9455f2 | ||
|
|
86525d8583 | ||
|
|
c41e52f57a | ||
|
|
55772a0d07 | ||
|
|
965a9e89ac | ||
|
|
51ee06429b | ||
|
|
cb622488c0 | ||
|
|
32f971fec6 | ||
|
|
5d79bfc531 | ||
|
|
51ef35ab55 | ||
|
|
330a3215bc | ||
|
|
85c2ceacde | ||
|
|
8e4fda160d | ||
|
|
ca85d09f02 | ||
|
|
8dea76baa4 | ||
|
|
299fbe04c4 | ||
|
|
072b1c442c | ||
|
|
294e03ecf5 | ||
|
|
550f90a75e | ||
|
|
d67dcfe3c4 | ||
|
|
57fc1df7d7 | ||
|
|
8d266d3941 | ||
|
|
0fd2f715bb | ||
|
|
a865b4da1c | ||
|
|
e59f5f3b01 | ||
|
|
8729688feb | ||
|
|
c8b06e7de1 | ||
|
|
eaba76f9e6 | ||
|
|
cb702cc238 | ||
|
|
807462b191 | ||
|
|
f1f798bb85 | ||
|
|
c3fd52c177 | ||
|
|
b69b4a0a4a | ||
|
|
50d6072a73 | ||
|
|
19c4226d3d | ||
|
|
d02c306f1e | ||
|
|
cfd26f444c | ||
|
|
2c3024716b | ||
|
|
a12f5de68d | ||
|
|
51c5f2bfc9 | ||
|
|
d24cd50e61 | ||
|
|
85bff20ae5 | ||
|
|
73ff54143d | ||
|
|
737fab5471 | ||
|
|
e6592e93a9 | ||
|
|
5a6c4e8ae0 | ||
|
|
9f5875158c | ||
|
|
c3dc33c861 | ||
|
|
7420f47658 | ||
|
|
08b136528e | ||
|
|
6b8a589447 | ||
|
|
6be8f2124c | ||
|
|
edfed06001 | ||
|
|
1c646dba91 | ||
|
|
6781068058 | ||
|
|
ffeabc9642 | ||
|
|
cfe57c1dfe | ||
|
|
c34d09a971 | ||
|
|
3cbdf818a7 | ||
|
|
bd834c87e0 | ||
|
|
dc8b37a524 | ||
|
|
617a895af5 | ||
|
|
1af1048c58 | ||
|
|
f07ba87e51 | ||
|
|
e66558a883 | ||
|
|
510314d344 | ||
|
|
37b951859c | ||
|
|
9494fc9668 | ||
|
|
17a2606591 | ||
|
|
ccb9f1e42d | ||
|
|
3e4e9a2ddc | ||
|
|
4caebfbd0e | ||
|
|
37c377a1b6 | ||
|
|
bd182c0a3e | ||
|
|
406c26cc72 | ||
|
|
9bd1ce436a | ||
|
|
ebd90c4742 | ||
|
|
ba52d34828 | ||
|
|
2f869b3cfc | ||
|
|
ffa21c27a7 | ||
|
|
1b6312afb3 | ||
|
|
bf32dc2e72 | ||
|
|
a15d65f7a2 | ||
|
|
2de8488855 | ||
|
|
129aa4bfaa | ||
|
|
f69ad4eff6 | ||
|
|
6fe0599cc2 | ||
|
|
e6f8bc720f | ||
|
|
fbd60fc000 | ||
|
|
b1d70db63b | ||
|
|
f03c3aafe4 | ||
|
|
51a9f106d1 | ||
|
|
bfc048e3fe | ||
|
|
83418644f7 | ||
|
|
dbc9dd5bfc | ||
|
|
5c480cf883 | ||
|
|
45ab15d4b5 | ||
|
|
61d628d654 | ||
|
|
adc64e7866 | ||
|
|
3d92375d12 | ||
|
|
cdbe70b2a7 | ||
|
|
f6426ca183 | ||
|
|
e5f7a8442d | ||
|
|
4d4a1cfe82 | ||
|
|
e67e0395df | ||
|
|
f2c7da3705 | ||
|
|
148f669a25 | ||
|
|
f1eaa6a264 | ||
|
|
da4c8c9550 | ||
|
|
3ab0a82cd3 | ||
|
|
bcde2790a4 | ||
|
|
9ebeb413e4 | ||
|
|
6d40b882a4 | ||
|
|
9fe0a154f1 | ||
|
|
cb52c9af00 | ||
|
|
6bf8338038 | ||
|
|
a46d772147 | ||
|
|
b0f4174e47 | ||
|
|
3865dde0b8 | ||
|
|
f3c50318e8 | ||
|
|
811c980821 | ||
|
|
cf5f65b68e | ||
|
|
e7aa924c0e | ||
|
|
c38f2a3f2e | ||
|
|
16c2ff97cc | ||
|
|
32043463a8 | ||
|
|
724e9b1313 | ||
|
|
2e6f00aef2 | ||
|
|
5266f04970 | ||
|
|
db957cf191 | ||
|
|
e0b9812fc5 | ||
|
|
8ac514363d | ||
|
|
e4fdf33158 | ||
|
|
6e814d7ebd | ||
|
|
1e37d00d6c | ||
|
|
c2ea68cca4 | ||
|
|
87ea3ba65d | ||
|
|
dedf3d3983 | ||
|
|
3d86881ce7 | ||
|
|
697d1470f4 | ||
|
|
1506e65558 | ||
|
|
808c86663c | ||
|
|
0b5f8f4051 | ||
|
|
92431a4238 | ||
|
|
285120684c | ||
|
|
0fed78fbcc | ||
|
|
8c38ef726b | ||
|
|
77fef8732b | ||
|
|
2399d90334 | ||
|
|
7775c725f3 | ||
|
|
c61096239c | ||
|
|
6367d68d1e | ||
|
|
c14ce956ad | ||
|
|
155a84c8a3 | ||
|
|
095dc4d9cc | ||
|
|
2e255812ae | ||
|
|
10558c9eff | ||
|
|
896b8c3b54 | ||
|
|
58dd07bbdf | ||
|
|
b13370ac0d | ||
|
|
f847e3287c | ||
|
|
56c1e078f2 | ||
|
|
afc05659ed | ||
|
|
b04d239926 | ||
|
|
dc1caa41b2 | ||
|
|
ceb0ce5634 | ||
|
|
dd30d811e6 | ||
|
|
293d8e4ddb | ||
|
|
77875c9133 | ||
|
|
647b47567e | ||
|
|
b0a1ad3b06 | ||
|
|
1d141bf2e8 | ||
|
|
0d0e279ae2 | ||
|
|
5dc0cee28a | ||
|
|
c15947da56 | ||
|
|
9bc04244e7 | ||
|
|
38c7a27010 | ||
|
|
58741d2791 | ||
|
|
8426470506 | ||
|
|
ccc3280b1a | ||
|
|
2847075705 | ||
|
|
3108ca0549 | ||
|
|
3b849ff497 | ||
|
|
66776b6a85 | ||
|
|
c8c241b50d | ||
|
|
44cb588371 | ||
|
|
6f91b8f8d1 | ||
|
|
3d93379132 | ||
|
|
84fd7d0126 | ||
|
|
7f52287aae | ||
|
|
98b8986868 | ||
|
|
250f2842ee | ||
|
|
9eca1a3a0c | ||
|
|
24b7a03224 | ||
|
|
9007097d24 | ||
|
|
bc445ec6a2 | ||
|
|
4fa0ae521e | ||
|
|
7bdf5fa8b8 | ||
|
|
65b0b976d9 | ||
|
|
a0d275feec | ||
|
|
ece3a8d7be | ||
|
|
463acf51b5 | ||
|
|
1cd16fab87 | ||
|
|
add55c4f33 | ||
|
|
51a9c0ff59 | ||
|
|
6e8a5f0f4e | ||
|
|
8a33702f26 | ||
|
|
a072d49802 | ||
|
|
a0aeeb8e07 | ||
|
|
383b225690 | ||
|
|
ace2247800 | ||
|
|
6a6fed5dce | ||
|
|
1f8aece8cd | ||
|
|
6c6f8cd4f9 | ||
|
|
fb1311e013 | ||
|
|
ce31acf030 | ||
|
|
31ad5ac63b | ||
|
|
1ede0bdec4 | ||
|
|
aef32ead2c | ||
|
|
5b43ec7f73 | ||
|
|
1e9ff88a00 | ||
|
|
bb9bb5f5c5 | ||
|
|
c533abd8b6 | ||
|
|
bb9bc764bc | ||
|
|
b4b53a6cb7 | ||
|
|
9c0204906c | ||
|
|
4670b373c1 | ||
|
|
f03b5883bd | ||
|
|
f8b2fe4dd5 | ||
|
|
be4a0c9c2b | ||
|
|
f37d52d8e9 | ||
|
|
177cdaf550 | ||
|
|
1573a443b7 | ||
|
|
911c0466c0 | ||
|
|
b6a95f9970 |
@@ -1,4 +1,20 @@
|
||||
---
|
||||
BreakBeforeBraces: Custom
|
||||
BraceWrapping:
|
||||
AfterClass: true
|
||||
AfterControlStatement: true
|
||||
AfterEnum: false
|
||||
AfterFunction: true
|
||||
AfterNamespace: false
|
||||
AfterObjCDeclaration: true
|
||||
AfterStruct: true
|
||||
AfterUnion: true
|
||||
BeforeCatch: true
|
||||
BeforeElse: true
|
||||
IndentBraces: false
|
||||
KeepEmptyLinesAtTheStartOfBlocks: false
|
||||
MaxEmptyLinesToKeep: 1
|
||||
---
|
||||
Language: Cpp
|
||||
AccessModifierOffset: -4
|
||||
AlignAfterOpenBracket: AlwaysBreak
|
||||
@@ -18,20 +34,7 @@ AlwaysBreakBeforeMultilineStrings: true
|
||||
AlwaysBreakTemplateDeclarations: true
|
||||
BinPackArguments: false
|
||||
BinPackParameters: false
|
||||
BraceWrapping:
|
||||
AfterClass: true
|
||||
AfterControlStatement: true
|
||||
AfterEnum: false
|
||||
AfterFunction: true
|
||||
AfterNamespace: false
|
||||
AfterObjCDeclaration: true
|
||||
AfterStruct: true
|
||||
AfterUnion: true
|
||||
BeforeCatch: true
|
||||
BeforeElse: true
|
||||
IndentBraces: false
|
||||
BreakBeforeBinaryOperators: false
|
||||
BreakBeforeBraces: Custom
|
||||
BreakBeforeTernaryOperators: true
|
||||
BreakConstructorInitializersBeforeComma: true
|
||||
ColumnLimit: 80
|
||||
@@ -66,8 +69,6 @@ IndentFunctionDeclarationAfterType: false
|
||||
IndentRequiresClause: true
|
||||
IndentWidth: 4
|
||||
IndentWrappedFunctionNames: false
|
||||
KeepEmptyLinesAtTheStartOfBlocks: false
|
||||
MaxEmptyLinesToKeep: 1
|
||||
NamespaceIndentation: None
|
||||
ObjCSpaceAfterProperty: false
|
||||
ObjCSpaceBeforeProtocolList: false
|
||||
@@ -96,7 +97,7 @@ TabWidth: 8
|
||||
UseTab: Never
|
||||
QualifierAlignment: Right
|
||||
---
|
||||
Language: JavaScript
|
||||
---
|
||||
Language: Json
|
||||
Language: Proto
|
||||
BasedOnStyle: Google
|
||||
ColumnLimit: 0
|
||||
IndentWidth: 2
|
||||
|
||||
@@ -33,5 +33,6 @@ slack_app: false
|
||||
|
||||
ignore:
|
||||
- "src/test/"
|
||||
- "src/tests/"
|
||||
- "include/xrpl/beast/test/"
|
||||
- "include/xrpl/beast/unit_test/"
|
||||
|
||||
275
.config/cspell.config.yaml
Normal file
275
.config/cspell.config.yaml
Normal file
@@ -0,0 +1,275 @@
|
||||
ignorePaths:
|
||||
- build/**
|
||||
- src/libxrpl/crypto
|
||||
- src/test/** # Will be removed in the future
|
||||
- CMakeUserPresets.json
|
||||
- Doxyfile
|
||||
- docs/**/*.puml
|
||||
- cmake/**
|
||||
- LICENSE.md
|
||||
language: en
|
||||
allowCompoundWords: true
|
||||
ignoreRandomStrings: true
|
||||
minWordLength: 5
|
||||
dictionaries:
|
||||
- cpp
|
||||
- en_US
|
||||
- en_GB
|
||||
ignoreRegExpList:
|
||||
- /[rs][1-9A-HJ-NP-Za-km-z]{25,34}/g # addresses and seeds
|
||||
- /(XRPL|BEAST)_[A-Z_0-9]+_H_INCLUDED+/g # include guards
|
||||
- /(XRPL|BEAST)_[A-Z_0-9]+_H+/g # include guards
|
||||
- /::[a-z:_]+/g # things from other namespaces
|
||||
- /lib[a-z]+/g # libraries
|
||||
- /[0-9]{4}-[0-9]{2}-[0-9]{2}[,:][A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright dates
|
||||
- /[0-9]{4}[,:]?\s*[A-Za-zÀ-ÖØ-öø-ÿ.\s]+/g # copyright years
|
||||
- /\[[A-Za-z0-9-]+\]\(https:\/\/github.com\/[A-Za-z0-9-]+\)/g # Github usernames
|
||||
- /-[DWw][a-zA-Z0-9_-]+=/g # compile flags
|
||||
- /[\['"`]-[DWw][a-zA-Z0-9_-]+['"`\]]/g # compile flags
|
||||
suggestWords:
|
||||
- xprl->xrpl
|
||||
- unsynched->unsynced
|
||||
- synched->synced
|
||||
- synch->sync
|
||||
words:
|
||||
- abempty
|
||||
- AMMID
|
||||
- amt
|
||||
- amts
|
||||
- asnode
|
||||
- asynchrony
|
||||
- attestation
|
||||
- authorises
|
||||
- autobridge
|
||||
- autobridged
|
||||
- autobridging
|
||||
- bimap
|
||||
- bindir
|
||||
- bookdir
|
||||
- Bougalis
|
||||
- Britto
|
||||
- Btrfs
|
||||
- canonicality
|
||||
- checkme
|
||||
- chrono
|
||||
- citardauq
|
||||
- clawback
|
||||
- clawbacks
|
||||
- coeffs
|
||||
- coldwallet
|
||||
- compr
|
||||
- conanfile
|
||||
- conanrun
|
||||
- connectability
|
||||
- coro
|
||||
- coros
|
||||
- cowid
|
||||
- cryptocondition
|
||||
- cryptoconditional
|
||||
- cryptoconditions
|
||||
- csprng
|
||||
- ctid
|
||||
- currenttxhash
|
||||
- daria
|
||||
- dcmake
|
||||
- dearmor
|
||||
- deleteme
|
||||
- demultiplexer
|
||||
- deserializaton
|
||||
- desync
|
||||
- desynced
|
||||
- determ
|
||||
- distro
|
||||
- doxyfile
|
||||
- dxrpl
|
||||
- endmacro
|
||||
- exceptioned
|
||||
- Falco
|
||||
- finalizers
|
||||
- firewalled
|
||||
- fmtdur
|
||||
- funclets
|
||||
- gcov
|
||||
- gcovr
|
||||
- ghead
|
||||
- Gnutella
|
||||
- gpgcheck
|
||||
- gpgkey
|
||||
- hotwallet
|
||||
- ifndef
|
||||
- inequation
|
||||
- insuf
|
||||
- insuff
|
||||
- iou
|
||||
- ious
|
||||
- isrdc
|
||||
- jemalloc
|
||||
- jlog
|
||||
- keylet
|
||||
- keylets
|
||||
- keyvadb
|
||||
- ledgerentry
|
||||
- ledgerhash
|
||||
- ledgerindex
|
||||
- leftw
|
||||
- legleux
|
||||
- levelization
|
||||
- levelized
|
||||
- libpb
|
||||
- libxrpl
|
||||
- llection
|
||||
- LOCALGOOD
|
||||
- logwstream
|
||||
- lseq
|
||||
- lsmf
|
||||
- ltype
|
||||
- MEMORYSTATUSEX
|
||||
- Merkle
|
||||
- Metafuncton
|
||||
- misprediction
|
||||
- mptbalance
|
||||
- mptflags
|
||||
- mptid
|
||||
- mptissuance
|
||||
- mptissuanceid
|
||||
- mptoken
|
||||
- mptokenid
|
||||
- mptokenissuance
|
||||
- mptokens
|
||||
- mpts
|
||||
- multisig
|
||||
- multisign
|
||||
- multisigned
|
||||
- Nakamoto
|
||||
- nftid
|
||||
- nftoffer
|
||||
- nftoken
|
||||
- nftokenid
|
||||
- nftokenpages
|
||||
- nftokens
|
||||
- nftpage
|
||||
- nikb
|
||||
- nonxrp
|
||||
- noripple
|
||||
- nudb
|
||||
- nullptr
|
||||
- nunl
|
||||
- Nyffenegger
|
||||
- ostr
|
||||
- partitioner
|
||||
- paychan
|
||||
- paychans
|
||||
- permdex
|
||||
- perminute
|
||||
- permissioned
|
||||
- pointee
|
||||
- preauth
|
||||
- preauthorization
|
||||
- preauthorize
|
||||
- preauthorizes
|
||||
- preclaim
|
||||
- protobuf
|
||||
- protos
|
||||
- ptrs
|
||||
- pyenv
|
||||
- qalloc
|
||||
- queuable
|
||||
- Raphson
|
||||
- replayer
|
||||
- rerere
|
||||
- retriable
|
||||
- RIPD
|
||||
- ripdtop
|
||||
- rippleci
|
||||
- rippled
|
||||
- ripplerpc
|
||||
- rippletest
|
||||
- RLUSD
|
||||
- rngfill
|
||||
- rocksdb
|
||||
- Rohrs
|
||||
- roundings
|
||||
- sahyadri
|
||||
- Satoshi
|
||||
- secp
|
||||
- sendq
|
||||
- seqit
|
||||
- sf
|
||||
- shamap
|
||||
- shamapitem
|
||||
- sidechain
|
||||
- SIGGOOD
|
||||
- sle
|
||||
- sles
|
||||
- soci
|
||||
- socidb
|
||||
- sslws
|
||||
- statsd
|
||||
- STATSDCOLLECTOR
|
||||
- stissue
|
||||
- stnum
|
||||
- stobj
|
||||
- stobject
|
||||
- stpath
|
||||
- stpathset
|
||||
- sttx
|
||||
- stvar
|
||||
- stvector
|
||||
- stxchainattestations
|
||||
- superpeer
|
||||
- superpeers
|
||||
- takergets
|
||||
- takerpays
|
||||
- ters
|
||||
- TMEndpointv2
|
||||
- trixie
|
||||
- tx
|
||||
- txid
|
||||
- txids
|
||||
- txjson
|
||||
- txn
|
||||
- txns
|
||||
- txs
|
||||
- umant
|
||||
- unacquired
|
||||
- unambiguity
|
||||
- unauthorizes
|
||||
- unauthorizing
|
||||
- unergonomic
|
||||
- unfetched
|
||||
- unflatten
|
||||
- unfund
|
||||
- unimpair
|
||||
- unroutable
|
||||
- unscalable
|
||||
- unserviced
|
||||
- unshareable
|
||||
- unshares
|
||||
- unsquelch
|
||||
- unsquelched
|
||||
- unsquelching
|
||||
- unvalidated
|
||||
- unveto
|
||||
- unvetoed
|
||||
- upvotes
|
||||
- USDB
|
||||
- variadics
|
||||
- venv
|
||||
- vfalco
|
||||
- vinnie
|
||||
- wasmi
|
||||
- wextra
|
||||
- wptr
|
||||
- writeme
|
||||
- wsrch
|
||||
- wthread
|
||||
- xbridge
|
||||
- xchain
|
||||
- ximinez
|
||||
- XMACRO
|
||||
- xrpkuwait
|
||||
- xrpl
|
||||
- xrpld
|
||||
- xrplf
|
||||
- xxhash
|
||||
- xxhasher
|
||||
@@ -12,3 +12,5 @@ fe9a5365b8a52d4acc42eb27369247e6f238a4f9
|
||||
9a93577314e6a8d4b4a8368cc9d2b15a5d8303e8
|
||||
552377c76f55b403a1c876df873a23d780fcc81c
|
||||
97f0747e103f13e26e45b731731059b32f7679ac
|
||||
b13370ac0d207217354f1fc1c29aef87769fb8a1
|
||||
896b8c3b54a22b0497cb0d1ce95e1095f9a227ce
|
||||
|
||||
5
.gitattributes
vendored
5
.gitattributes
vendored
@@ -1,9 +1,6 @@
|
||||
# Set default behaviour, in case users don't have core.autocrlf set.
|
||||
#* text=auto
|
||||
|
||||
# These annoying files
|
||||
rippled.1 binary
|
||||
LICENSE binary
|
||||
# cspell: disable
|
||||
|
||||
# Visual Studio
|
||||
*.sln text eol=crlf
|
||||
|
||||
6
.github/CODEOWNERS
vendored
6
.github/CODEOWNERS
vendored
@@ -1,8 +1,2 @@
|
||||
# Allow anyone to review any change by default.
|
||||
*
|
||||
|
||||
# Require the rpc-reviewers team to review changes to the rpc code.
|
||||
include/xrpl/protocol/ @xrplf/rpc-reviewers
|
||||
src/libxrpl/protocol/ @xrplf/rpc-reviewers
|
||||
src/xrpld/rpc/ @xrplf/rpc-reviewers
|
||||
src/xrpld/app/misc/ @xrplf/rpc-reviewers
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/bug_report.md
vendored
6
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,7 +1,7 @@
|
||||
---
|
||||
name: Bug Report
|
||||
about: Create a report to help us improve rippled
|
||||
title: "[Title with short description] (Version: [rippled version])"
|
||||
about: Create a report to help us improve xrpld
|
||||
title: "[Title with short description] (Version: [xrpld version])"
|
||||
labels: ""
|
||||
assignees: ""
|
||||
---
|
||||
@@ -27,7 +27,7 @@ assignees: ""
|
||||
## Environment
|
||||
|
||||
<!--Please describe your environment setup (such as Ubuntu 18.04 with Boost 1.70).-->
|
||||
<!-- If you are using a formal release, please use the version returned by './rippled --version' as the version number-->
|
||||
<!-- If you are using a formal release, please use the version returned by './xrpld --version' as the version number-->
|
||||
<!-- If you are working off of develop, please add the git hash via 'git rev-parse HEAD'-->
|
||||
|
||||
## Supporting Files
|
||||
|
||||
42
.github/actions/build-deps/action.yml
vendored
Normal file
42
.github/actions/build-deps/action.yml
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
name: Build Conan dependencies
|
||||
description: "Install Conan dependencies, optionally forcing a rebuild of all dependencies."
|
||||
|
||||
# Note that actions do not support 'type' and all inputs are strings, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
||||
inputs:
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
build_nproc:
|
||||
description: "The number of processors to use for building."
|
||||
required: true
|
||||
force_build:
|
||||
description: 'Force building of all dependencies ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
log_verbosity:
|
||||
description: "The logging verbosity."
|
||||
required: false
|
||||
default: "verbose"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install Conan dependencies
|
||||
shell: bash
|
||||
env:
|
||||
BUILD_NPROC: ${{ inputs.build_nproc }}
|
||||
BUILD_OPTION: ${{ inputs.force_build == 'true' && '*' || 'missing' }}
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
LOG_VERBOSITY: ${{ inputs.log_verbosity }}
|
||||
run: |
|
||||
echo 'Installing dependencies.'
|
||||
conan install \
|
||||
--build="${BUILD_OPTION}" \
|
||||
--options:host='&:tests=True' \
|
||||
--options:host='&:xrpld=True' \
|
||||
--settings:all build_type="${BUILD_TYPE}" \
|
||||
--conf:all tools.build:jobs=${BUILD_NPROC} \
|
||||
--conf:all tools.build:verbosity="${LOG_VERBOSITY}" \
|
||||
--conf:all tools.compilation:verbosity="${LOG_VERBOSITY}" \
|
||||
.
|
||||
34
.github/actions/build/action.yml
vendored
34
.github/actions/build/action.yml
vendored
@@ -1,34 +0,0 @@
|
||||
name: build
|
||||
inputs:
|
||||
generator:
|
||||
default: null
|
||||
configuration:
|
||||
required: true
|
||||
cmake-args:
|
||||
default: null
|
||||
cmake-target:
|
||||
default: all
|
||||
# An implicit input is the environment variable `build_dir`.
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: configure
|
||||
shell: bash
|
||||
run: |
|
||||
cd ${build_dir}
|
||||
cmake \
|
||||
${{ inputs.generator && format('-G "{0}"', inputs.generator) || '' }} \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }} \
|
||||
-Dtests=TRUE \
|
||||
-Dxrpld=TRUE \
|
||||
${{ inputs.cmake-args }} \
|
||||
..
|
||||
- name: build
|
||||
shell: bash
|
||||
run: |
|
||||
cmake \
|
||||
--build ${build_dir} \
|
||||
--config ${{ inputs.configuration }} \
|
||||
--parallel ${NUM_PROCESSORS:-$(nproc)} \
|
||||
--target ${{ inputs.cmake-target }}
|
||||
38
.github/actions/dependencies/action.yml
vendored
38
.github/actions/dependencies/action.yml
vendored
@@ -1,38 +0,0 @@
|
||||
name: dependencies
|
||||
inputs:
|
||||
configuration:
|
||||
required: true
|
||||
# Implicit inputs are the environment variables `build_dir`, CONAN_REMOTE_URL,
|
||||
# CONAN_REMOTE_USERNAME, and CONAN_REMOTE_PASSWORD. The latter two are only
|
||||
# used to upload newly built dependencies to the Conan remote.
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: add Conan remote
|
||||
if: ${{ env.CONAN_REMOTE_URL != '' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Adding Conan remote 'xrplf' at ${{ env.CONAN_REMOTE_URL }}."
|
||||
conan remote add --index 0 --force xrplf ${{ env.CONAN_REMOTE_URL }}
|
||||
echo "Listing Conan remotes."
|
||||
conan remote list
|
||||
- name: install dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p ${{ env.build_dir }}
|
||||
cd ${{ env.build_dir }}
|
||||
conan install \
|
||||
--output-folder . \
|
||||
--build missing \
|
||||
--options:host "&:tests=True" \
|
||||
--options:host "&:xrpld=True" \
|
||||
--settings:all build_type=${{ inputs.configuration }} \
|
||||
..
|
||||
- name: upload dependencies
|
||||
if: ${{ env.CONAN_REMOTE_URL != '' && env.CONAN_REMOTE_USERNAME != '' && env.CONAN_REMOTE_PASSWORD != '' && github.ref_type == 'branch' && github.ref_name == github.event.repository.default_branch }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Logging into Conan remote 'xrplf' at ${{ env.CONAN_REMOTE_URL }}."
|
||||
conan remote login xrplf "${{ env.CONAN_REMOTE_USERNAME }}" --password "${{ env.CONAN_REMOTE_PASSWORD }}"
|
||||
echo "Uploading dependencies."
|
||||
conan upload '*' --confirm --check --remote xrplf
|
||||
43
.github/actions/print-env/action.yml
vendored
Normal file
43
.github/actions/print-env/action.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: Print build environment
|
||||
description: "Print environment and some tooling versions"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Check configuration (Windows)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Checking environment variables.'
|
||||
set
|
||||
|
||||
- name: Check configuration (Linux and macOS)
|
||||
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking compiler version.'
|
||||
${{ runner.os == 'Linux' && '${CC}' || 'clang' }} --version
|
||||
|
||||
echo 'Checking Ninja version.'
|
||||
ninja --version
|
||||
|
||||
echo 'Checking nproc version.'
|
||||
nproc --version
|
||||
|
||||
- name: Check configuration (all)
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Checking Ccache version.'
|
||||
ccache --version
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
46
.github/actions/setup-conan/action.yml
vendored
Normal file
46
.github/actions/setup-conan/action.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
name: Setup Conan
|
||||
description: "Set up Conan configuration, profile, and remote."
|
||||
|
||||
inputs:
|
||||
conan_remote_name:
|
||||
description: "The name of the Conan remote to use."
|
||||
required: false
|
||||
default: xrplf
|
||||
conan_remote_url:
|
||||
description: "The URL of the Conan endpoint to use."
|
||||
required: false
|
||||
default: https://conan.ripplex.io
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
|
||||
steps:
|
||||
- name: Set up Conan configuration
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Installing configuration.'
|
||||
cat conan/global.conf ${{ runner.os == 'Linux' && '>>' || '>' }} $(conan config home)/global.conf
|
||||
|
||||
echo 'Conan configuration:'
|
||||
conan config show '*'
|
||||
|
||||
- name: Set up Conan profile
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Installing profile.'
|
||||
conan config install conan/profiles/default -tf $(conan config home)/profiles/
|
||||
|
||||
echo 'Conan profile:'
|
||||
conan profile show
|
||||
|
||||
- name: Set up Conan remote
|
||||
shell: bash
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
CONAN_REMOTE_URL: ${{ inputs.conan_remote_url }}
|
||||
run: |
|
||||
echo "Adding Conan remote '${CONAN_REMOTE_NAME}' at '${CONAN_REMOTE_URL}'."
|
||||
conan remote add --index 0 --force "${CONAN_REMOTE_NAME}" "${CONAN_REMOTE_URL}"
|
||||
|
||||
echo 'Listing Conan remotes.'
|
||||
conan remote list
|
||||
@@ -3,21 +3,26 @@
|
||||
Levelization is the term used to describe efforts to prevent rippled from
|
||||
having or creating cyclic dependencies.
|
||||
|
||||
rippled code is organized into directories under `src/rippled` (and
|
||||
rippled code is organized into directories under `src/xrpld`, `src/libxrpl` (and
|
||||
`src/test`) representing modules. The modules are intended to be
|
||||
organized into "tiers" or "levels" such that a module from one level can
|
||||
only include code from lower levels. Additionally, a module
|
||||
in one level should never include code in an `impl` folder of any level
|
||||
in one level should never include code in an `impl` or `detail` folder of any level
|
||||
other than it's own.
|
||||
|
||||
The codebase is split into two main areas:
|
||||
|
||||
- **libxrpl** (`src/libxrpl`, `include/xrpl`): Reusable library modules with public interfaces
|
||||
- **xrpld** (`src/xrpld`): Application-specific implementation code
|
||||
|
||||
Unfortunately, over time, enforcement of levelization has been
|
||||
inconsistent, so the current state of the code doesn't necessarily
|
||||
reflect these rules. Whenever possible, developers should refactor any
|
||||
levelization violations they find (by moving files or individual
|
||||
classes). At the very least, don't make things worse.
|
||||
|
||||
The table below summarizes the _desired_ division of modules, based on the
|
||||
state of the rippled code when it was created. The levels are numbered from
|
||||
The table below summarizes the _desired_ division of modules, based on the current
|
||||
state of the rippled code. The levels are numbered from
|
||||
the bottom up with the lower level, lower numbered, more independent
|
||||
modules listed first, and the higher level, higher numbered modules with
|
||||
more dependencies listed later.
|
||||
@@ -25,18 +30,33 @@ more dependencies listed later.
|
||||
**tl;dr:** The modules listed first are more independent than the modules
|
||||
listed later.
|
||||
|
||||
## libxrpl Modules (Reusable Libraries)
|
||||
|
||||
| Level / Tier | Module(s) |
|
||||
| ------------ | ----------------------------------- |
|
||||
| 01 | xrpl/beast |
|
||||
| 02 | xrpl/basics |
|
||||
| 03 | xrpl/json xrpl/crypto |
|
||||
| 04 | xrpl/protocol |
|
||||
| 05 | xrpl/core xrpl/resource xrpl/server |
|
||||
| 06 | xrpl/ledger xrpl/nodestore xrpl/net |
|
||||
| 07 | xrpl/shamap |
|
||||
|
||||
## xrpld Modules (Application Implementation)
|
||||
|
||||
| Level / Tier | Module(s) |
|
||||
| ------------ | -------------------------------- |
|
||||
| 05 | xrpld/conditions xrpld/consensus |
|
||||
| 06 | xrpld/core xrpld/peerfinder |
|
||||
| 07 | xrpld/shamap xrpld/overlay |
|
||||
| 08 | xrpld/app |
|
||||
| 09 | xrpld/rpc |
|
||||
| 10 | xrpld/perflog |
|
||||
|
||||
## Test Modules
|
||||
|
||||
| Level / Tier | Module(s) |
|
||||
| ------------ | -------------------------------------------------------------------------------------------------------- |
|
||||
| 01 | ripple/beast ripple/unity |
|
||||
| 02 | ripple/basics |
|
||||
| 03 | ripple/json ripple/crypto |
|
||||
| 04 | ripple/protocol |
|
||||
| 05 | ripple/core ripple/conditions ripple/consensus ripple/resource ripple/server |
|
||||
| 06 | ripple/peerfinder ripple/ledger ripple/nodestore ripple/net |
|
||||
| 07 | ripple/shamap ripple/overlay |
|
||||
| 08 | ripple/app |
|
||||
| 09 | ripple/rpc |
|
||||
| 10 | ripple/perflog |
|
||||
| 11 | test/jtx test/beast test/csf |
|
||||
| 12 | test/unit_test |
|
||||
| 13 | test/crypto test/conditions test/json test/resource test/shamap test/peerfinder test/basics test/overlay |
|
||||
@@ -45,12 +65,12 @@ listed later.
|
||||
| 16 | test/rpc test/app |
|
||||
|
||||
(Note that `test` levelization is _much_ less important and _much_ less
|
||||
strictly enforced than `ripple` levelization, other than the requirement
|
||||
that `test` code should _never_ be included in `ripple` code.)
|
||||
strictly enforced than `xrpl`/`xrpld` levelization, other than the requirement
|
||||
that `test` code should _never_ be included in `xrpl` or `xrpld` code.)
|
||||
|
||||
## Validation
|
||||
|
||||
The [levelization.sh](levelization.sh) script takes no parameters,
|
||||
The [levelization](generate.sh) script takes no parameters,
|
||||
reads no environment variables, and can be run from any directory,
|
||||
as long as it is in the expected location in the rippled repo.
|
||||
It can be run at any time from within a checked out repo, and will
|
||||
@@ -61,10 +81,10 @@ It generates many files of [results](results):
|
||||
|
||||
- `rawincludes.txt`: The raw dump of the `#includes`
|
||||
- `paths.txt`: A second dump grouping the source module
|
||||
to the destination module, deduped, and with frequency counts.
|
||||
to the destination module, de-duped, and with frequency counts.
|
||||
- `includes/`: A directory where each file represents a module and
|
||||
contains a list of modules and counts that the module _includes_.
|
||||
- `includedby/`: Similar to `includes/`, but the other way around. Each
|
||||
- `included_by/`: Similar to `includes/`, but the other way around. Each
|
||||
file represents a module and contains a list of modules and counts
|
||||
that _include_ the module.
|
||||
- [`loops.txt`](results/loops.txt): A list of direct loops detected
|
||||
@@ -72,15 +92,15 @@ It generates many files of [results](results):
|
||||
desired as described above. In a perfect repo, this file will be
|
||||
empty.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../.github/workflows/levelization.yml) to validate
|
||||
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
- [`ordering.txt`](results/ordering.txt): A list showing relationships
|
||||
between modules where there are no loops as they actually exist, as
|
||||
opposed to how they are desired as described above.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../.github/workflows/levelization.yml) to validate
|
||||
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
- [`levelization.yml`](../../.github/workflows/levelization.yml)
|
||||
- [`levelization.yml`](../../workflows/reusable-check-levelization.yml)
|
||||
Github Actions workflow to test that levelization loops haven't
|
||||
changed. Unfortunately, if changes are detected, it can't tell if
|
||||
they are improvements or not, so if you have resolved any issues or
|
||||
@@ -111,4 +131,4 @@ get those details locally.
|
||||
1. Run `levelization.sh`
|
||||
2. Grep the modules in `paths.txt`.
|
||||
- For example, if a cycle is found `A ~= B`, simply `grep -w
|
||||
A Builds/levelization/results/paths.txt | grep -w B`
|
||||
A .github/scripts/levelization/results/paths.txt | grep -w B`
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Usage: levelization.sh
|
||||
# Usage: generate.sh
|
||||
# This script takes no parameters, reads no environment variables,
|
||||
# and can be run from any directory, as long as it is in the expected
|
||||
# location in the repo.
|
||||
@@ -19,7 +19,7 @@ export LANG=C
|
||||
rm -rfv results
|
||||
mkdir results
|
||||
includes="$( pwd )/results/rawincludes.txt"
|
||||
pushd ../..
|
||||
pushd ../../..
|
||||
echo Raw includes:
|
||||
grep -r '^[ ]*#include.*/.*\.h' include src | \
|
||||
grep -v boost | tee ${includes}
|
||||
@@ -29,7 +29,7 @@ pushd results
|
||||
oldifs=${IFS}
|
||||
IFS=:
|
||||
mkdir includes
|
||||
mkdir includedby
|
||||
mkdir included_by
|
||||
echo Build levelization paths
|
||||
exec 3< ${includes} # open rawincludes.txt for input
|
||||
while read -r -u 3 file include
|
||||
@@ -59,7 +59,7 @@ do
|
||||
echo $level $includelevel | tee -a paths.txt
|
||||
fi
|
||||
done
|
||||
echo Sort and dedup paths
|
||||
echo Sort and deduplicate paths
|
||||
sort -ds paths.txt | uniq -c | tee sortedpaths.txt
|
||||
mv sortedpaths.txt paths.txt
|
||||
exec 3>&- #close fd 3
|
||||
@@ -71,7 +71,7 @@ exec 4<paths.txt # open paths.txt for input
|
||||
while read -r -u 4 count level include
|
||||
do
|
||||
echo ${include} ${count} | tee -a includes/${level}
|
||||
echo ${level} ${count} | tee -a includedby/${include}
|
||||
echo ${level} ${count} | tee -a included_by/${include}
|
||||
done
|
||||
exec 4>&- #close fd 4
|
||||
|
||||
@@ -7,12 +7,6 @@ Loop: test.jtx test.unit_test
|
||||
Loop: xrpld.app xrpld.core
|
||||
xrpld.app > xrpld.core
|
||||
|
||||
Loop: xrpld.app xrpld.ledger
|
||||
xrpld.app > xrpld.ledger
|
||||
|
||||
Loop: xrpld.app xrpld.net
|
||||
xrpld.app > xrpld.net
|
||||
|
||||
Loop: xrpld.app xrpld.overlay
|
||||
xrpld.overlay > xrpld.app
|
||||
|
||||
@@ -23,20 +17,8 @@ Loop: xrpld.app xrpld.rpc
|
||||
xrpld.rpc > xrpld.app
|
||||
|
||||
Loop: xrpld.app xrpld.shamap
|
||||
xrpld.app > xrpld.shamap
|
||||
|
||||
Loop: xrpld.core xrpld.net
|
||||
xrpld.net > xrpld.core
|
||||
|
||||
Loop: xrpld.core xrpld.perflog
|
||||
xrpld.perflog == xrpld.core
|
||||
|
||||
Loop: xrpld.net xrpld.rpc
|
||||
xrpld.rpc ~= xrpld.net
|
||||
xrpld.shamap ~= xrpld.app
|
||||
|
||||
Loop: xrpld.overlay xrpld.rpc
|
||||
xrpld.rpc ~= xrpld.overlay
|
||||
|
||||
Loop: xrpld.perflog xrpld.rpc
|
||||
xrpld.rpc ~= xrpld.perflog
|
||||
|
||||
@@ -1,7 +1,19 @@
|
||||
libxrpl.basics > xrpl.basics
|
||||
libxrpl.core > xrpl.basics
|
||||
libxrpl.core > xrpl.core
|
||||
libxrpl.crypto > xrpl.basics
|
||||
libxrpl.json > xrpl.basics
|
||||
libxrpl.json > xrpl.json
|
||||
libxrpl.ledger > xrpl.basics
|
||||
libxrpl.ledger > xrpl.json
|
||||
libxrpl.ledger > xrpl.ledger
|
||||
libxrpl.ledger > xrpl.protocol
|
||||
libxrpl.net > xrpl.basics
|
||||
libxrpl.net > xrpl.net
|
||||
libxrpl.nodestore > xrpl.basics
|
||||
libxrpl.nodestore > xrpl.json
|
||||
libxrpl.nodestore > xrpl.nodestore
|
||||
libxrpl.nodestore > xrpl.protocol
|
||||
libxrpl.protocol > xrpl.basics
|
||||
libxrpl.protocol > xrpl.json
|
||||
libxrpl.protocol > xrpl.protocol
|
||||
@@ -12,24 +24,28 @@ libxrpl.server > xrpl.basics
|
||||
libxrpl.server > xrpl.json
|
||||
libxrpl.server > xrpl.protocol
|
||||
libxrpl.server > xrpl.server
|
||||
libxrpl.shamap > xrpl.basics
|
||||
libxrpl.shamap > xrpl.protocol
|
||||
libxrpl.shamap > xrpl.shamap
|
||||
test.app > test.jtx
|
||||
test.app > test.rpc
|
||||
test.app > test.toplevel
|
||||
test.app > test.unit_test
|
||||
test.app > xrpl.basics
|
||||
test.app > xrpl.core
|
||||
test.app > xrpld.app
|
||||
test.app > xrpld.core
|
||||
test.app > xrpld.ledger
|
||||
test.app > xrpld.nodestore
|
||||
test.app > xrpld.overlay
|
||||
test.app > xrpld.rpc
|
||||
test.app > xrpl.json
|
||||
test.app > xrpl.ledger
|
||||
test.app > xrpl.nodestore
|
||||
test.app > xrpl.protocol
|
||||
test.app > xrpl.resource
|
||||
test.basics > test.jtx
|
||||
test.basics > test.unit_test
|
||||
test.basics > xrpl.basics
|
||||
test.basics > xrpld.perflog
|
||||
test.basics > xrpl.core
|
||||
test.basics > xrpld.rpc
|
||||
test.basics > xrpl.json
|
||||
test.basics > xrpl.protocol
|
||||
@@ -42,14 +58,14 @@ test.consensus > test.unit_test
|
||||
test.consensus > xrpl.basics
|
||||
test.consensus > xrpld.app
|
||||
test.consensus > xrpld.consensus
|
||||
test.consensus > xrpld.ledger
|
||||
test.consensus > xrpl.json
|
||||
test.consensus > xrpl.ledger
|
||||
test.core > test.jtx
|
||||
test.core > test.toplevel
|
||||
test.core > test.unit_test
|
||||
test.core > xrpl.basics
|
||||
test.core > xrpl.core
|
||||
test.core > xrpld.core
|
||||
test.core > xrpld.perflog
|
||||
test.core > xrpl.json
|
||||
test.core > xrpl.server
|
||||
test.csf > xrpl.basics
|
||||
@@ -61,10 +77,10 @@ test.json > xrpl.json
|
||||
test.jtx > xrpl.basics
|
||||
test.jtx > xrpld.app
|
||||
test.jtx > xrpld.core
|
||||
test.jtx > xrpld.ledger
|
||||
test.jtx > xrpld.net
|
||||
test.jtx > xrpld.rpc
|
||||
test.jtx > xrpl.json
|
||||
test.jtx > xrpl.ledger
|
||||
test.jtx > xrpl.net
|
||||
test.jtx > xrpl.protocol
|
||||
test.jtx > xrpl.resource
|
||||
test.jtx > xrpl.server
|
||||
@@ -73,15 +89,14 @@ test.ledger > test.toplevel
|
||||
test.ledger > xrpl.basics
|
||||
test.ledger > xrpld.app
|
||||
test.ledger > xrpld.core
|
||||
test.ledger > xrpld.ledger
|
||||
test.ledger > xrpl.ledger
|
||||
test.ledger > xrpl.protocol
|
||||
test.nodestore > test.jtx
|
||||
test.nodestore > test.toplevel
|
||||
test.nodestore > test.unit_test
|
||||
test.nodestore > xrpl.basics
|
||||
test.nodestore > xrpld.core
|
||||
test.nodestore > xrpld.nodestore
|
||||
test.nodestore > xrpld.unity
|
||||
test.nodestore > xrpl.nodestore
|
||||
test.overlay > test.jtx
|
||||
test.overlay > test.toplevel
|
||||
test.overlay > test.unit_test
|
||||
@@ -89,8 +104,8 @@ test.overlay > xrpl.basics
|
||||
test.overlay > xrpld.app
|
||||
test.overlay > xrpld.overlay
|
||||
test.overlay > xrpld.peerfinder
|
||||
test.overlay > xrpld.shamap
|
||||
test.overlay > xrpl.protocol
|
||||
test.overlay > xrpl.shamap
|
||||
test.peerfinder > test.beast
|
||||
test.peerfinder > test.unit_test
|
||||
test.peerfinder > xrpl.basics
|
||||
@@ -107,9 +122,9 @@ test.resource > xrpl.resource
|
||||
test.rpc > test.jtx
|
||||
test.rpc > test.toplevel
|
||||
test.rpc > xrpl.basics
|
||||
test.rpc > xrpl.core
|
||||
test.rpc > xrpld.app
|
||||
test.rpc > xrpld.core
|
||||
test.rpc > xrpld.net
|
||||
test.rpc > xrpld.overlay
|
||||
test.rpc > xrpld.rpc
|
||||
test.rpc > xrpl.json
|
||||
@@ -126,14 +141,23 @@ test.server > xrpl.json
|
||||
test.server > xrpl.server
|
||||
test.shamap > test.unit_test
|
||||
test.shamap > xrpl.basics
|
||||
test.shamap > xrpld.nodestore
|
||||
test.shamap > xrpld.shamap
|
||||
test.shamap > xrpl.nodestore
|
||||
test.shamap > xrpl.protocol
|
||||
test.shamap > xrpl.shamap
|
||||
test.toplevel > test.csf
|
||||
test.toplevel > xrpl.json
|
||||
test.unit_test > xrpl.basics
|
||||
tests.libxrpl > xrpl.basics
|
||||
tests.libxrpl > xrpl.json
|
||||
tests.libxrpl > xrpl.net
|
||||
xrpl.core > xrpl.basics
|
||||
xrpl.core > xrpl.json
|
||||
xrpl.json > xrpl.basics
|
||||
xrpl.ledger > xrpl.basics
|
||||
xrpl.ledger > xrpl.protocol
|
||||
xrpl.net > xrpl.basics
|
||||
xrpl.nodestore > xrpl.basics
|
||||
xrpl.nodestore > xrpl.protocol
|
||||
xrpl.protocol > xrpl.basics
|
||||
xrpl.protocol > xrpl.json
|
||||
xrpl.resource > xrpl.basics
|
||||
@@ -142,39 +166,35 @@ xrpl.resource > xrpl.protocol
|
||||
xrpl.server > xrpl.basics
|
||||
xrpl.server > xrpl.json
|
||||
xrpl.server > xrpl.protocol
|
||||
xrpl.shamap > xrpl.basics
|
||||
xrpl.shamap > xrpl.nodestore
|
||||
xrpl.shamap > xrpl.protocol
|
||||
xrpld.app > test.unit_test
|
||||
xrpld.app > xrpl.basics
|
||||
xrpld.app > xrpl.core
|
||||
xrpld.app > xrpld.conditions
|
||||
xrpld.app > xrpld.consensus
|
||||
xrpld.app > xrpld.nodestore
|
||||
xrpld.app > xrpld.perflog
|
||||
xrpld.app > xrpl.json
|
||||
xrpld.app > xrpl.ledger
|
||||
xrpld.app > xrpl.net
|
||||
xrpld.app > xrpl.nodestore
|
||||
xrpld.app > xrpl.protocol
|
||||
xrpld.app > xrpl.resource
|
||||
xrpld.app > xrpl.shamap
|
||||
xrpld.conditions > xrpl.basics
|
||||
xrpld.conditions > xrpl.protocol
|
||||
xrpld.consensus > xrpl.basics
|
||||
xrpld.consensus > xrpl.json
|
||||
xrpld.consensus > xrpl.protocol
|
||||
xrpld.core > xrpl.basics
|
||||
xrpld.core > xrpl.core
|
||||
xrpld.core > xrpl.json
|
||||
xrpld.core > xrpl.net
|
||||
xrpld.core > xrpl.protocol
|
||||
xrpld.ledger > xrpl.basics
|
||||
xrpld.ledger > xrpl.json
|
||||
xrpld.ledger > xrpl.protocol
|
||||
xrpld.net > xrpl.basics
|
||||
xrpld.net > xrpl.json
|
||||
xrpld.net > xrpl.protocol
|
||||
xrpld.net > xrpl.resource
|
||||
xrpld.nodestore > xrpl.basics
|
||||
xrpld.nodestore > xrpld.core
|
||||
xrpld.nodestore > xrpld.unity
|
||||
xrpld.nodestore > xrpl.json
|
||||
xrpld.nodestore > xrpl.protocol
|
||||
xrpld.overlay > xrpl.basics
|
||||
xrpld.overlay > xrpl.core
|
||||
xrpld.overlay > xrpld.core
|
||||
xrpld.overlay > xrpld.peerfinder
|
||||
xrpld.overlay > xrpld.perflog
|
||||
xrpld.overlay > xrpl.json
|
||||
xrpld.overlay > xrpl.protocol
|
||||
xrpld.overlay > xrpl.resource
|
||||
@@ -183,15 +203,17 @@ xrpld.peerfinder > xrpl.basics
|
||||
xrpld.peerfinder > xrpld.core
|
||||
xrpld.peerfinder > xrpl.protocol
|
||||
xrpld.perflog > xrpl.basics
|
||||
xrpld.perflog > xrpl.core
|
||||
xrpld.perflog > xrpld.rpc
|
||||
xrpld.perflog > xrpl.json
|
||||
xrpld.rpc > xrpl.basics
|
||||
xrpld.rpc > xrpl.core
|
||||
xrpld.rpc > xrpld.core
|
||||
xrpld.rpc > xrpld.ledger
|
||||
xrpld.rpc > xrpld.nodestore
|
||||
xrpld.rpc > xrpl.json
|
||||
xrpld.rpc > xrpl.ledger
|
||||
xrpld.rpc > xrpl.net
|
||||
xrpld.rpc > xrpl.nodestore
|
||||
xrpld.rpc > xrpl.protocol
|
||||
xrpld.rpc > xrpl.resource
|
||||
xrpld.rpc > xrpl.server
|
||||
xrpld.shamap > xrpl.basics
|
||||
xrpld.shamap > xrpld.nodestore
|
||||
xrpld.shamap > xrpl.protocol
|
||||
xrpld.shamap > xrpl.shamap
|
||||
47
.github/scripts/rename/README.md
vendored
Normal file
47
.github/scripts/rename/README.md
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
## Renaming ripple(d) to xrpl(d)
|
||||
|
||||
In the initial phases of development of the XRPL, the open source codebase was
|
||||
called "rippled" and it remains with that name even today. Today, over 1000
|
||||
nodes run the application, and code contributions have been submitted by
|
||||
developers located around the world. The XRPL community is larger than ever.
|
||||
In light of the decentralized and diversified nature of XRPL, we will rename any
|
||||
references to `ripple` and `rippled` to `xrpl` and `xrpld`, when appropriate.
|
||||
|
||||
See [here](https://xls.xrpl.org/xls/XLS-0095-rename-rippled-to-xrpld.html) for
|
||||
more information.
|
||||
|
||||
### Scripts
|
||||
|
||||
To facilitate this transition, there will be multiple scripts that developers
|
||||
can run on their own PRs and forks to minimize conflicts. Each script should be
|
||||
run from the repository root.
|
||||
|
||||
1. `.github/scripts/rename/definitions.sh`: This script will rename all
|
||||
definitions, such as include guards, from `RIPPLE_XXX` and `RIPPLED_XXX` to
|
||||
`XRPL_XXX`.
|
||||
2. `.github/scripts/rename/copyright.sh`: This script will remove superfluous
|
||||
copyright notices.
|
||||
3. `.github/scripts/rename/cmake.sh`: This script will rename all CMake files
|
||||
from `RippleXXX.cmake` or `RippledXXX.cmake` to `XrplXXX.cmake`, and any
|
||||
references to `ripple` and `rippled` (with or without capital letters) to
|
||||
`xrpl` and `xrpld`, respectively. The name of the binary will remain as-is,
|
||||
and will only be renamed to `xrpld` by a later script.
|
||||
4. `.github/scripts/rename/binary.sh`: This script will rename the binary from
|
||||
`rippled` to `xrpld`, and reverses the symlink so that `rippled` points to
|
||||
the `xrpld` binary.
|
||||
5. `.github/scripts/rename/namespace.sh`: This script will rename the C++
|
||||
namespaces from `ripple` to `xrpl`.
|
||||
6. `.github/scripts/rename/config.sh`: This script will rename the config from
|
||||
`rippled.cfg` to `xrpld.cfg`, and updating the code accordingly. The old
|
||||
filename will still be accepted.
|
||||
|
||||
You can run all these scripts from the repository root as follows:
|
||||
|
||||
```shell
|
||||
./.github/scripts/rename/definitions.sh .
|
||||
./.github/scripts/rename/copyright.sh .
|
||||
./.github/scripts/rename/cmake.sh .
|
||||
./.github/scripts/rename/binary.sh .
|
||||
./.github/scripts/rename/namespace.sh .
|
||||
./.github/scripts/rename/config.sh .
|
||||
```
|
||||
54
.github/scripts/rename/binary.sh
vendored
Executable file
54
.github/scripts/rename/binary.sh
vendored
Executable file
@@ -0,0 +1,54 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Exit the script as soon as an error occurs.
|
||||
set -e
|
||||
|
||||
# On MacOS, ensure that GNU sed is installed and available as `gsed`.
|
||||
SED_COMMAND=sed
|
||||
if [[ "${OSTYPE}" == 'darwin'* ]]; then
|
||||
if ! command -v gsed &> /dev/null; then
|
||||
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
|
||||
exit 1
|
||||
fi
|
||||
SED_COMMAND=gsed
|
||||
fi
|
||||
|
||||
# This script changes the binary name from `rippled` to `xrpld`, and reverses
|
||||
# the symlink that currently points from `xrpld` to `rippled` so that it points
|
||||
# from `rippled` to `xrpld` instead.
|
||||
# Usage: .github/scripts/rename/binary.sh <repository directory>
|
||||
|
||||
if [ "$#" -ne 1 ]; then
|
||||
echo "Usage: $0 <repository directory>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DIRECTORY=$1
|
||||
echo "Processing directory: ${DIRECTORY}"
|
||||
if [ ! -d "${DIRECTORY}" ]; then
|
||||
echo "Error: Directory '${DIRECTORY}' does not exist."
|
||||
exit 1
|
||||
fi
|
||||
pushd ${DIRECTORY}
|
||||
|
||||
# Remove the binary name override added by the cmake.sh script.
|
||||
${SED_COMMAND} -z -i -E 's@\s+# For the time being.+"rippled"\)@@' cmake/XrplCore.cmake
|
||||
|
||||
# Reverse the symlink.
|
||||
${SED_COMMAND} -i -E 's@create_symbolic_link\(rippled@create_symbolic_link(xrpld@' cmake/XrplInstall.cmake
|
||||
${SED_COMMAND} -i -E 's@/xrpld\$\{suffix\}@/rippled${suffix}@' cmake/XrplInstall.cmake
|
||||
|
||||
# Rename references to the binary.
|
||||
${SED_COMMAND} -i -E 's@rippled@xrpld@g' BUILD.md
|
||||
${SED_COMMAND} -i -E 's@rippled@xrpld@g' CONTRIBUTING.md
|
||||
${SED_COMMAND} -i -E 's@rippled@xrpld@g' .github/ISSUE_TEMPLATE/bug_report.md
|
||||
|
||||
# Restore and/or fix certain renames. The pre-commit hook will update the
|
||||
# formatting upon saving/committing.
|
||||
${SED_COMMAND} -i -E 's@ripple/xrpld@XRPLF/rippled@g' BUILD.md
|
||||
${SED_COMMAND} -i -E 's@XRPLF/xrpld@XRPLF/rippled@g' BUILD.md
|
||||
${SED_COMMAND} -i -E 's@xrpld \(`xrpld`\)@xrpld@g' BUILD.md
|
||||
${SED_COMMAND} -i -E 's@XRPLF/xrpld@XRPLF/rippled@g' CONTRIBUTING.md
|
||||
|
||||
popd
|
||||
echo "Processing complete."
|
||||
92
.github/scripts/rename/cmake.sh
vendored
Executable file
92
.github/scripts/rename/cmake.sh
vendored
Executable file
@@ -0,0 +1,92 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Exit the script as soon as an error occurs.
|
||||
set -e
|
||||
|
||||
# On MacOS, ensure that GNU sed and head are installed and available as `gsed`
|
||||
# and `ghead`, respectively.
|
||||
SED_COMMAND=sed
|
||||
HEAD_COMMAND=head
|
||||
if [[ "${OSTYPE}" == 'darwin'* ]]; then
|
||||
if ! command -v gsed &> /dev/null; then
|
||||
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
|
||||
exit 1
|
||||
fi
|
||||
SED_COMMAND=gsed
|
||||
if ! command -v ghead &> /dev/null; then
|
||||
echo "Error: ghead is not installed. Please install it using 'brew install coreutils'."
|
||||
exit 1
|
||||
fi
|
||||
HEAD_COMMAND=ghead
|
||||
fi
|
||||
|
||||
# This script renames CMake files from `RippleXXX.cmake` or `RippledXXX.cmake`
|
||||
# to `XrplXXX.cmake`, and any references to `ripple` and `rippled` (with or
|
||||
# without capital letters) to `xrpl` and `xrpld`, respectively. The name of the
|
||||
# binary will remain as-is, and will only be renamed to `xrpld` in a different
|
||||
# script, but the proto file will be renamed.
|
||||
# Usage: .github/scripts/rename/cmake.sh <repository directory>
|
||||
|
||||
if [ "$#" -ne 1 ]; then
|
||||
echo "Usage: $0 <repository directory>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DIRECTORY=$1
|
||||
echo "Processing directory: ${DIRECTORY}"
|
||||
if [ ! -d "${DIRECTORY}" ]; then
|
||||
echo "Error: Directory '${DIRECTORY}' does not exist."
|
||||
exit 1
|
||||
fi
|
||||
pushd ${DIRECTORY}
|
||||
|
||||
# Rename the files.
|
||||
find cmake -type f -name 'Rippled*.cmake' -exec bash -c 'mv "${1}" "${1/Rippled/Xrpl}"' - {} \;
|
||||
find cmake -type f -name 'Ripple*.cmake' -exec bash -c 'mv "${1}" "${1/Ripple/Xrpl}"' - {} \;
|
||||
if [ -e cmake/xrpl_add_test.cmake ]; then
|
||||
mv cmake/xrpl_add_test.cmake cmake/XrplAddTest.cmake
|
||||
fi
|
||||
if [ -e include/xrpl/proto/ripple.proto ]; then
|
||||
mv include/xrpl/proto/ripple.proto include/xrpl/proto/xrpl.proto
|
||||
fi
|
||||
|
||||
# Rename inside the files.
|
||||
find cmake -type f -name '*.cmake' | while read -r FILE; do
|
||||
echo "Processing file: ${FILE}"
|
||||
${SED_COMMAND} -i 's/Rippled/Xrpld/g' "${FILE}"
|
||||
${SED_COMMAND} -i 's/Ripple/Xrpl/g' "${FILE}"
|
||||
${SED_COMMAND} -i 's/rippled/xrpld/g' "${FILE}"
|
||||
${SED_COMMAND} -i 's/ripple/xrpl/g' "${FILE}"
|
||||
done
|
||||
${SED_COMMAND} -i -E 's/Rippled?/Xrpl/g' CMakeLists.txt
|
||||
${SED_COMMAND} -i 's/ripple/xrpl/g' CMakeLists.txt
|
||||
${SED_COMMAND} -i 's/include(xrpl_add_test)/include(XrplAddTest)/' src/tests/libxrpl/CMakeLists.txt
|
||||
${SED_COMMAND} -i 's/ripple.pb.h/xrpl.pb.h/' include/xrpl/protocol/messages.h
|
||||
${SED_COMMAND} -i 's/ripple.pb.h/xrpl.pb.h/' BUILD.md
|
||||
${SED_COMMAND} -i 's/ripple.pb.h/xrpl.pb.h/' BUILD.md
|
||||
|
||||
# Restore the name of the validator keys repository.
|
||||
${SED_COMMAND} -i 's@xrpl/validator-keys-tool@ripple/validator-keys-tool@' cmake/XrplValidatorKeys.cmake
|
||||
|
||||
# Ensure the name of the binary and config remain 'rippled' for now.
|
||||
${SED_COMMAND} -i -E 's/xrpld(-example)?\.cfg/rippled\1.cfg/g' cmake/XrplInstall.cmake
|
||||
if grep -q '"xrpld"' cmake/XrplCore.cmake; then
|
||||
# The script has been rerun, so just restore the name of the binary.
|
||||
${SED_COMMAND} -i 's/"xrpld"/"rippled"/' cmake/XrplCore.cmake
|
||||
elif ! grep -q '"rippled"' cmake/XrplCore.cmake; then
|
||||
${HEAD_COMMAND} -n -1 cmake/XrplCore.cmake > cmake.tmp
|
||||
echo ' # For the time being, we will keep the name of the binary as it was.' >> cmake.tmp
|
||||
echo ' set_target_properties(xrpld PROPERTIES OUTPUT_NAME "rippled")' >> cmake.tmp
|
||||
tail -1 cmake/XrplCore.cmake >> cmake.tmp
|
||||
mv cmake.tmp cmake/XrplCore.cmake
|
||||
fi
|
||||
|
||||
# Restore the symlink from 'xrpld' to 'rippled'.
|
||||
${SED_COMMAND} -i -E 's@create_symbolic_link\(xrpld@create_symbolic_link(rippled@' cmake/XrplInstall.cmake
|
||||
|
||||
# Remove the symlink that previously pointed from 'ripple' to 'xrpl' but now is
|
||||
# no longer needed.
|
||||
${SED_COMMAND} -z -i -E 's@install\(CODE.+CMAKE_INSTALL_INCLUDEDIR}/xrpl\)\n"\)\n+@@' cmake/XrplInstall.cmake
|
||||
|
||||
popd
|
||||
echo "Renaming complete."
|
||||
72
.github/scripts/rename/config.sh
vendored
Executable file
72
.github/scripts/rename/config.sh
vendored
Executable file
@@ -0,0 +1,72 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Exit the script as soon as an error occurs.
|
||||
set -e
|
||||
|
||||
# On MacOS, ensure that GNU sed is installed and available as `gsed`.
|
||||
SED_COMMAND=sed
|
||||
if [[ "${OSTYPE}" == 'darwin'* ]]; then
|
||||
if ! command -v gsed &> /dev/null; then
|
||||
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
|
||||
exit 1
|
||||
fi
|
||||
SED_COMMAND=gsed
|
||||
fi
|
||||
|
||||
# This script renames the config from `rippled.cfg` to `xrpld.cfg`, and updates
|
||||
# the code accordingly. The old filename will still be accepted.
|
||||
# Usage: .github/scripts/rename/config.sh <repository directory>
|
||||
|
||||
if [ "$#" -ne 1 ]; then
|
||||
echo "Usage: $0 <repository directory>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DIRECTORY=$1
|
||||
echo "Processing directory: ${DIRECTORY}"
|
||||
if [ ! -d "${DIRECTORY}" ]; then
|
||||
echo "Error: Directory '${DIRECTORY}' does not exist."
|
||||
exit 1
|
||||
fi
|
||||
pushd ${DIRECTORY}
|
||||
|
||||
# Add the xrpld.cfg to the .gitignore.
|
||||
if ! grep -q 'xrpld.cfg' .gitignore; then
|
||||
${SED_COMMAND} -i '/rippled.cfg/a\
|
||||
/xrpld.cfg' .gitignore
|
||||
fi
|
||||
|
||||
# Rename the files.
|
||||
if [ -e rippled.cfg ]; then
|
||||
mv rippled.cfg xrpld.cfg
|
||||
fi
|
||||
if [ -e cfg/rippled-example.cfg ]; then
|
||||
mv cfg/rippled-example.cfg cfg/xrpld-example.cfg
|
||||
fi
|
||||
|
||||
# Rename inside the files.
|
||||
DIRECTORIES=("cfg" "cmake" "include" "src")
|
||||
for DIRECTORY in "${DIRECTORIES[@]}"; do
|
||||
echo "Processing directory: ${DIRECTORY}"
|
||||
|
||||
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" -o -name "*.cmake" -o -name "*.txt" -o -name "*.cfg" -o -name "*.md" \) | while read -r FILE; do
|
||||
echo "Processing file: ${FILE}"
|
||||
${SED_COMMAND} -i -E 's/rippled(-example)?[ .]cfg/xrpld\1.cfg/g' "${FILE}"
|
||||
done
|
||||
done
|
||||
${SED_COMMAND} -i 's/rippled/xrpld/g' cfg/xrpld-example.cfg
|
||||
${SED_COMMAND} -i 's/rippled/xrpld/g' src/test/core/Config_test.cpp
|
||||
${SED_COMMAND} -i 's/ripplevalidators/xrplvalidators/g' src/test/core/Config_test.cpp # cspell: disable-line
|
||||
${SED_COMMAND} -i 's/rippleConfig/xrpldConfig/g' src/test/core/Config_test.cpp
|
||||
${SED_COMMAND} -i 's@ripple/@xrpld/@g' src/test/core/Config_test.cpp
|
||||
${SED_COMMAND} -i 's/Rippled/File/g' src/test/core/Config_test.cpp
|
||||
|
||||
|
||||
# Restore the old config file name in the code that maintains support for now.
|
||||
${SED_COMMAND} -i 's/configLegacyName = "xrpld.cfg"/configLegacyName = "rippled.cfg"/g' src/xrpld/core/detail/Config.cpp
|
||||
|
||||
# Restore an URL.
|
||||
${SED_COMMAND} -i 's/connect-your-xrpld-to-the-xrp-test-net.html/connect-your-rippled-to-the-xrp-test-net.html/g' cfg/xrpld-example.cfg
|
||||
|
||||
popd
|
||||
echo "Renaming complete."
|
||||
103
.github/scripts/rename/copyright.sh
vendored
Executable file
103
.github/scripts/rename/copyright.sh
vendored
Executable file
@@ -0,0 +1,103 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Exit the script as soon as an error occurs.
|
||||
set -e
|
||||
|
||||
# On MacOS, ensure that GNU sed is installed and available as `gsed`.
|
||||
SED_COMMAND=sed
|
||||
if [[ "${OSTYPE}" == 'darwin'* ]]; then
|
||||
if ! command -v gsed &> /dev/null; then
|
||||
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
|
||||
exit 1
|
||||
fi
|
||||
SED_COMMAND=gsed
|
||||
fi
|
||||
|
||||
# This script removes superfluous copyright notices in source and header files
|
||||
# in this project. Specifically, it removes all notices referencing Ripple,
|
||||
# XRPLF, and certain individual contributors upon mutual agreement, so the one
|
||||
# in the LICENSE.md file applies throughout. Copyright notices referencing
|
||||
# external contributions, e.g. from Bitcoin, remain as-is.
|
||||
# Usage: .github/scripts/rename/copyright.sh <repository directory>
|
||||
|
||||
if [ "$#" -ne 1 ]; then
|
||||
echo "Usage: $0 <repository directory>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DIRECTORY=$1
|
||||
echo "Processing directory: ${DIRECTORY}"
|
||||
if [ ! -d "${DIRECTORY}" ]; then
|
||||
echo "Error: Directory '${DIRECTORY}' does not exist."
|
||||
exit 1
|
||||
fi
|
||||
pushd ${DIRECTORY}
|
||||
|
||||
# Prevent sed and echo from removing newlines and tabs in string literals by
|
||||
# temporarily replacing them with placeholders. This only affects one file.
|
||||
PLACEHOLDER_NEWLINE="__NEWLINE__"
|
||||
PLACEHOLDER_TAB="__TAB__"
|
||||
${SED_COMMAND} -i -E "s@\\\n@${PLACEHOLDER_NEWLINE}@g" src/test/rpc/ValidatorInfo_test.cpp
|
||||
${SED_COMMAND} -i -E "s@\\\t@${PLACEHOLDER_TAB}@g" src/test/rpc/ValidatorInfo_test.cpp
|
||||
|
||||
# Process the include/ and src/ directories.
|
||||
DIRECTORIES=("include" "src")
|
||||
for DIRECTORY in "${DIRECTORIES[@]}"; do
|
||||
echo "Processing directory: ${DIRECTORY}"
|
||||
|
||||
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" -o -name "*.macro" \) | while read -r FILE; do
|
||||
echo "Processing file: ${FILE}"
|
||||
# Handle the cases where the copyright notice is enclosed in /* ... */
|
||||
# and usually surrounded by //---- and //======.
|
||||
${SED_COMMAND} -z -i -E 's@^//-------+\n+@@' "${FILE}"
|
||||
${SED_COMMAND} -z -i -E 's@^.*Copyright.+(Ripple|Bougalis|Falco|Hinnant|Null|Ritchford|XRPLF).+PERFORMANCE OF THIS SOFTWARE\.\n\*/\n+@@' "${FILE}" # cspell: ignore Bougalis Falco Hinnant Ritchford
|
||||
${SED_COMMAND} -z -i -E 's@^//=======+\n+@@' "${FILE}"
|
||||
|
||||
# Handle the cases where the copyright notice is commented out with //.
|
||||
${SED_COMMAND} -z -i -E 's@^//\n// Copyright.+Falco \(vinnie dot falco at gmail dot com\)\n//\n+@@' "${FILE}" # cspell: ignore Vinnie Falco
|
||||
done
|
||||
done
|
||||
|
||||
# Restore copyright notices that were removed from specific files, without
|
||||
# restoring the verbiage that is already present in LICENSE.md. Ensure that if
|
||||
# the script is run multiple times, duplicate notices are not added.
|
||||
if ! grep -q 'Raw Material Software' include/xrpl/beast/core/CurrentThreadName.h; then
|
||||
echo -e "// Portions of this file are from JUCE (http://www.juce.com).\n// Copyright (c) 2013 - Raw Material Software Ltd.\n// Please visit http://www.juce.com\n\n$(cat include/xrpl/beast/core/CurrentThreadName.h)" > include/xrpl/beast/core/CurrentThreadName.h
|
||||
fi
|
||||
if ! grep -q 'Dev Null' src/test/app/NetworkID_test.cpp; then
|
||||
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/app/NetworkID_test.cpp)" > src/test/app/NetworkID_test.cpp
|
||||
fi
|
||||
if ! grep -q 'Dev Null' src/test/app/tx/apply_test.cpp; then
|
||||
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/app/tx/apply_test.cpp)" > src/test/app/tx/apply_test.cpp
|
||||
fi
|
||||
if ! grep -q 'Dev Null' src/test/rpc/ManifestRPC_test.cpp; then
|
||||
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/rpc/ManifestRPC_test.cpp)" > src/test/rpc/ManifestRPC_test.cpp
|
||||
fi
|
||||
if ! grep -q 'Dev Null' src/test/rpc/ValidatorInfo_test.cpp; then
|
||||
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/rpc/ValidatorInfo_test.cpp)" > src/test/rpc/ValidatorInfo_test.cpp
|
||||
fi
|
||||
if ! grep -q 'Dev Null' src/xrpld/rpc/handlers/DoManifest.cpp; then
|
||||
echo -e "// Copyright (c) 2019 Dev Null Productions\n\n$(cat src/xrpld/rpc/handlers/DoManifest.cpp)" > src/xrpld/rpc/handlers/DoManifest.cpp
|
||||
fi
|
||||
if ! grep -q 'Dev Null' src/xrpld/rpc/handlers/ValidatorInfo.cpp; then
|
||||
echo -e "// Copyright (c) 2019 Dev Null Productions\n\n$(cat src/xrpld/rpc/handlers/ValidatorInfo.cpp)" > src/xrpld/rpc/handlers/ValidatorInfo.cpp
|
||||
fi
|
||||
if ! grep -q 'Bougalis' include/xrpl/basics/SlabAllocator.h; then
|
||||
echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/SlabAllocator.h)" > include/xrpl/basics/SlabAllocator.h # cspell: ignore Nikolaos Bougalis nikb
|
||||
fi
|
||||
if ! grep -q 'Bougalis' include/xrpl/basics/spinlock.h; then
|
||||
echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/spinlock.h)" > include/xrpl/basics/spinlock.h # cspell: ignore Nikolaos Bougalis nikb
|
||||
fi
|
||||
if ! grep -q 'Bougalis' include/xrpl/basics/tagged_integer.h; then
|
||||
echo -e "// Copyright (c) 2014, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/tagged_integer.h)" > include/xrpl/basics/tagged_integer.h # cspell: ignore Nikolaos Bougalis nikb
|
||||
fi
|
||||
if ! grep -q 'Ritchford' include/xrpl/beast/utility/Zero.h; then
|
||||
echo -e "// Copyright (c) 2014, Tom Ritchford <tom@swirly.com>\n\n$(cat include/xrpl/beast/utility/Zero.h)" > include/xrpl/beast/utility/Zero.h # cspell: ignore Ritchford
|
||||
fi
|
||||
|
||||
# Restore newlines and tabs in string literals in the affected file.
|
||||
${SED_COMMAND} -i -E "s@${PLACEHOLDER_NEWLINE}@\\\n@g" src/test/rpc/ValidatorInfo_test.cpp
|
||||
${SED_COMMAND} -i -E "s@${PLACEHOLDER_TAB}@\\\t@g" src/test/rpc/ValidatorInfo_test.cpp
|
||||
|
||||
popd
|
||||
echo "Removal complete."
|
||||
42
.github/scripts/rename/definitions.sh
vendored
Executable file
42
.github/scripts/rename/definitions.sh
vendored
Executable file
@@ -0,0 +1,42 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Exit the script as soon as an error occurs.
|
||||
set -e
|
||||
|
||||
# On MacOS, ensure that GNU sed is installed and available as `gsed`.
|
||||
SED_COMMAND=sed
|
||||
if [[ "${OSTYPE}" == 'darwin'* ]]; then
|
||||
if ! command -v gsed &> /dev/null; then
|
||||
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
|
||||
exit 1
|
||||
fi
|
||||
SED_COMMAND=gsed
|
||||
fi
|
||||
|
||||
# This script renames definitions, such as include guards, in this project.
|
||||
# Specifically, it renames "RIPPLED_XXX" and "RIPPLE_XXX" to "XRPL_XXX" by
|
||||
# scanning all cmake, header, and source files in the specified directory and
|
||||
# its subdirectories.
|
||||
# Usage: .github/scripts/rename/definitions.sh <repository directory>
|
||||
|
||||
if [ "$#" -ne 1 ]; then
|
||||
echo "Usage: $0 <repository directory>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DIRECTORY=$1
|
||||
echo "Processing directory: ${DIRECTORY}"
|
||||
if [ ! -d "${DIRECTORY}" ]; then
|
||||
echo "Error: Directory '${DIRECTORY}' does not exist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" \) | while read -r FILE; do
|
||||
echo "Processing file: ${FILE}"
|
||||
${SED_COMMAND} -i -E 's@#(define|endif|if|ifdef|ifndef)(.*)(RIPPLED_|RIPPLE_)([A-Z0-9_]+)@#\1\2XRPL_\4@g' "${FILE}"
|
||||
done
|
||||
find "${DIRECTORY}" -type f \( -name "*.cmake" -o -name "*.txt" \) | while read -r FILE; do
|
||||
echo "Processing file: ${FILE}"
|
||||
${SED_COMMAND} -i -E 's@(RIPPLED_|RIPPLE_)([A-Z0-9_]+)@XRPL_\2@g' "${FILE}"
|
||||
done
|
||||
echo "Renaming complete."
|
||||
58
.github/scripts/rename/namespace.sh
vendored
Executable file
58
.github/scripts/rename/namespace.sh
vendored
Executable file
@@ -0,0 +1,58 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Exit the script as soon as an error occurs.
|
||||
set -e
|
||||
|
||||
# On MacOS, ensure that GNU sed is installed and available as `gsed`.
|
||||
SED_COMMAND=sed
|
||||
if [[ "${OSTYPE}" == 'darwin'* ]]; then
|
||||
if ! command -v gsed &> /dev/null; then
|
||||
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
|
||||
exit 1
|
||||
fi
|
||||
SED_COMMAND=gsed
|
||||
fi
|
||||
|
||||
# This script renames the `ripple` namespace to `xrpl` in this project.
|
||||
# Specifically, it renames all occurrences of `namespace ripple` and `ripple::`
|
||||
# to `namespace xrpl` and `xrpl::`, respectively, by scanning all header and
|
||||
# source files in the specified directory and its subdirectories, as well as any
|
||||
# occurrences in the documentation. It also renames them in the test suites.
|
||||
# Usage: .github/scripts/rename/namespace.sh <repository directory>
|
||||
|
||||
if [ "$#" -ne 1 ]; then
|
||||
echo "Usage: $0 <repository directory>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DIRECTORY=$1
|
||||
echo "Processing directory: ${DIRECTORY}"
|
||||
if [ ! -d "${DIRECTORY}" ]; then
|
||||
echo "Error: Directory '${DIRECTORY}' does not exist."
|
||||
exit 1
|
||||
fi
|
||||
pushd ${DIRECTORY}
|
||||
|
||||
DIRECTORIES=("include" "src" "tests")
|
||||
for DIRECTORY in "${DIRECTORIES[@]}"; do
|
||||
echo "Processing directory: ${DIRECTORY}"
|
||||
|
||||
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" \) | while read -r FILE; do
|
||||
echo "Processing file: ${FILE}"
|
||||
${SED_COMMAND} -i 's/namespace ripple/namespace xrpl/g' "${FILE}"
|
||||
${SED_COMMAND} -i 's/ripple::/xrpl::/g' "${FILE}"
|
||||
${SED_COMMAND} -i -E 's/(BEAST_DEFINE_TESTSUITE.+)ripple(.+)/\1xrpl\2/g' "${FILE}"
|
||||
done
|
||||
done
|
||||
|
||||
# Special case for NuDBFactory that has ripple twice in the test suite name.
|
||||
${SED_COMMAND} -i -E 's/(BEAST_DEFINE_TESTSUITE.+)ripple(.+)/\1xrpl\2/g' src/test/nodestore/NuDBFactory_test.cpp
|
||||
|
||||
DIRECTORY=$1
|
||||
find "${DIRECTORY}" -type f -name "*.md" | while read -r FILE; do
|
||||
echo "Processing file: ${FILE}"
|
||||
${SED_COMMAND} -i 's/ripple::/xrpl::/g' "${FILE}"
|
||||
done
|
||||
|
||||
popd
|
||||
echo "Renaming complete."
|
||||
303
.github/scripts/strategy-matrix/generate.py
vendored
Executable file
303
.github/scripts/strategy-matrix/generate.py
vendored
Executable file
@@ -0,0 +1,303 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import itertools
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
THIS_DIR = Path(__file__).parent.resolve()
|
||||
|
||||
|
||||
@dataclass
|
||||
class Config:
|
||||
architecture: list[dict]
|
||||
os: list[dict]
|
||||
build_type: list[str]
|
||||
cmake_args: list[str]
|
||||
|
||||
|
||||
"""
|
||||
Generate a strategy matrix for GitHub Actions CI.
|
||||
|
||||
On each PR commit we will build a selection of Debian, RHEL, Ubuntu, MacOS, and
|
||||
Windows configurations, while upon merge into the develop, release, or master
|
||||
branches, we will build all configurations, and test most of them.
|
||||
|
||||
We will further set additional CMake arguments as follows:
|
||||
- All builds will have the `tests`, `werr`, and `xrpld` options.
|
||||
- All builds will have the `wextra` option except for GCC 12 and Clang 16.
|
||||
- All release builds will have the `assert` option.
|
||||
- Certain Debian Bookworm configurations will change the reference fee, enable
|
||||
codecov, and enable voidstar in PRs.
|
||||
"""
|
||||
|
||||
|
||||
def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||
configurations = []
|
||||
for architecture, os, build_type, cmake_args in itertools.product(
|
||||
config.architecture, config.os, config.build_type, config.cmake_args
|
||||
):
|
||||
# The default CMake target is 'all' for Linux and MacOS and 'install'
|
||||
# for Windows, but it can get overridden for certain configurations.
|
||||
cmake_target = "install" if os["distro_name"] == "windows" else "all"
|
||||
|
||||
# We build and test all configurations by default, except for Windows in
|
||||
# Debug, because it is too slow, as well as when code coverage is
|
||||
# enabled as that mode already runs the tests.
|
||||
build_only = False
|
||||
if os["distro_name"] == "windows" and build_type == "Debug":
|
||||
build_only = True
|
||||
|
||||
# Only generate a subset of configurations in PRs.
|
||||
if not all:
|
||||
# Debian:
|
||||
# - Bookworm using GCC 13: Release and Unity on linux/amd64, set
|
||||
# the reference fee to 500.
|
||||
# - Bookworm using GCC 15: Debug and no Unity on linux/amd64, enable
|
||||
# code coverage (which will be done below).
|
||||
# - Bookworm using Clang 16: Debug and no Unity on linux/arm64,
|
||||
# enable voidstar.
|
||||
# - Bookworm using Clang 17: Release and no Unity on linux/amd64,
|
||||
# set the reference fee to 1000.
|
||||
# - Bookworm using Clang 20: Debug and Unity on linux/amd64.
|
||||
if os["distro_name"] == "debian":
|
||||
skip = True
|
||||
if os["distro_version"] == "bookworm":
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-13"
|
||||
and build_type == "Release"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}"
|
||||
skip = False
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-16"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/arm64"
|
||||
):
|
||||
cmake_args = f"-Dvoidstar=ON {cmake_args}"
|
||||
skip = False
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-17"
|
||||
and build_type == "Release"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
cmake_args = f"-DUNIT_TEST_REFERENCE_FEE=1000 {cmake_args}"
|
||||
skip = False
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-20"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# RHEL:
|
||||
# - 9 using GCC 12: Debug and Unity on linux/amd64.
|
||||
# - 10 using Clang: Release and no Unity on linux/amd64.
|
||||
if os["distro_name"] == "rhel":
|
||||
skip = True
|
||||
if os["distro_version"] == "9":
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
elif os["distro_version"] == "10":
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-any"
|
||||
and build_type == "Release"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# Ubuntu:
|
||||
# - Jammy using GCC 12: Debug and no Unity on linux/arm64.
|
||||
# - Noble using GCC 14: Release and Unity on linux/amd64.
|
||||
# - Noble using Clang 18: Debug and no Unity on linux/amd64.
|
||||
# - Noble using Clang 19: Release and Unity on linux/arm64.
|
||||
if os["distro_name"] == "ubuntu":
|
||||
skip = True
|
||||
if os["distro_version"] == "jammy":
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-12"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/arm64"
|
||||
):
|
||||
skip = False
|
||||
elif os["distro_version"] == "noble":
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-14"
|
||||
and build_type == "Release"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-18"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
skip = False
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "clang-19"
|
||||
and build_type == "Release"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "linux/arm64"
|
||||
):
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# MacOS:
|
||||
# - Debug and no Unity on macos/arm64.
|
||||
if os["distro_name"] == "macos" and not (
|
||||
build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "macos/arm64"
|
||||
):
|
||||
continue
|
||||
|
||||
# Windows:
|
||||
# - Release and Unity on windows/amd64.
|
||||
if os["distro_name"] == "windows" and not (
|
||||
build_type == "Release"
|
||||
and "-Dunity=ON" in cmake_args
|
||||
and architecture["platform"] == "windows/amd64"
|
||||
):
|
||||
continue
|
||||
|
||||
# Additional CMake arguments.
|
||||
cmake_args = f"{cmake_args} -Dtests=ON -Dwerr=ON -Dxrpld=ON"
|
||||
if not f"{os['compiler_name']}-{os['compiler_version']}" in [
|
||||
"gcc-12",
|
||||
"clang-16",
|
||||
]:
|
||||
cmake_args = f"{cmake_args} -Dwextra=ON"
|
||||
if build_type == "Release":
|
||||
cmake_args = f"{cmake_args} -Dassert=ON"
|
||||
|
||||
# We skip all RHEL on arm64 due to a build failure that needs further
|
||||
# investigation.
|
||||
if os["distro_name"] == "rhel" and architecture["platform"] == "linux/arm64":
|
||||
continue
|
||||
|
||||
# We skip all clang 20+ on arm64 due to Boost build error.
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}"
|
||||
in ["clang-20", "clang-21"]
|
||||
and architecture["platform"] == "linux/arm64"
|
||||
):
|
||||
continue
|
||||
|
||||
# Enable code coverage for Debian Bookworm using GCC 15 in Debug and no
|
||||
# Unity on linux/amd64
|
||||
if (
|
||||
f"{os['compiler_name']}-{os['compiler_version']}" == "gcc-15"
|
||||
and build_type == "Debug"
|
||||
and "-Dunity=OFF" in cmake_args
|
||||
and architecture["platform"] == "linux/amd64"
|
||||
):
|
||||
cmake_args = f"-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0 {cmake_args}"
|
||||
|
||||
# Generate a unique name for the configuration, e.g. macos-arm64-debug
|
||||
# or debian-bookworm-gcc-12-amd64-release-unity.
|
||||
config_name = os["distro_name"]
|
||||
if (n := os["distro_version"]) != "":
|
||||
config_name += f"-{n}"
|
||||
if (n := os["compiler_name"]) != "":
|
||||
config_name += f"-{n}"
|
||||
if (n := os["compiler_version"]) != "":
|
||||
config_name += f"-{n}"
|
||||
config_name += (
|
||||
f"-{architecture['platform'][architecture['platform'].find('/') + 1 :]}"
|
||||
)
|
||||
config_name += f"-{build_type.lower()}"
|
||||
if "-Dcoverage=ON" in cmake_args:
|
||||
config_name += "-coverage"
|
||||
if "-Dunity=ON" in cmake_args:
|
||||
config_name += "-unity"
|
||||
|
||||
# Add the configuration to the list, with the most unique fields first,
|
||||
# so that they are easier to identify in the GitHub Actions UI, as long
|
||||
# names get truncated.
|
||||
configurations.append(
|
||||
{
|
||||
"config_name": config_name,
|
||||
"cmake_args": cmake_args,
|
||||
"cmake_target": cmake_target,
|
||||
"build_only": build_only,
|
||||
"build_type": build_type,
|
||||
"os": os,
|
||||
"architecture": architecture,
|
||||
}
|
||||
)
|
||||
|
||||
return configurations
|
||||
|
||||
|
||||
def read_config(file: Path) -> Config:
|
||||
config = json.loads(file.read_text())
|
||||
if (
|
||||
config["architecture"] is None
|
||||
or config["os"] is None
|
||||
or config["build_type"] is None
|
||||
or config["cmake_args"] is None
|
||||
):
|
||||
raise Exception("Invalid configuration file.")
|
||||
|
||||
return Config(**config)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"-a",
|
||||
"--all",
|
||||
help="Set to generate all configurations (generally used when merging a PR) or leave unset to generate a subset of configurations (generally used when committing to a PR).",
|
||||
action="store_true",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--config",
|
||||
help="Path to the JSON file containing the strategy matrix configurations.",
|
||||
required=False,
|
||||
type=Path,
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
matrix = []
|
||||
if args.config is None or args.config == "":
|
||||
matrix += generate_strategy_matrix(
|
||||
args.all, read_config(THIS_DIR / "linux.json")
|
||||
)
|
||||
matrix += generate_strategy_matrix(
|
||||
args.all, read_config(THIS_DIR / "macos.json")
|
||||
)
|
||||
matrix += generate_strategy_matrix(
|
||||
args.all, read_config(THIS_DIR / "windows.json")
|
||||
)
|
||||
else:
|
||||
matrix += generate_strategy_matrix(args.all, read_config(args.config))
|
||||
|
||||
# Generate the strategy matrix.
|
||||
print(f"matrix={json.dumps({'include': matrix})}")
|
||||
212
.github/scripts/strategy-matrix/linux.json
vendored
Normal file
212
.github/scripts/strategy-matrix/linux.json
vendored
Normal file
@@ -0,0 +1,212 @@
|
||||
{
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "linux/amd64",
|
||||
"runner": ["self-hosted", "Linux", "X64", "heavy"]
|
||||
},
|
||||
{
|
||||
"platform": "linux/arm64",
|
||||
"runner": ["self-hosted", "Linux", "ARM64", "heavy-arm64"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "15",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "20",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "15",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "20",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "trixie",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "21",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "8",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "8",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "10",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "jammy",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18",
|
||||
"image_sha": "cc09fd3"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19",
|
||||
"image_sha": "cc09fd3"
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
|
||||
}
|
||||
22
.github/scripts/strategy-matrix/macos.json
vendored
Normal file
22
.github/scripts/strategy-matrix/macos.json
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "macos/arm64",
|
||||
"runner": ["self-hosted", "macOS", "ARM64", "mac-runner-m1"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
{
|
||||
"distro_name": "macos",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": "",
|
||||
"image_sha": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": [
|
||||
"-Dunity=OFF -DCMAKE_POLICY_VERSION_MINIMUM=3.5",
|
||||
"-Dunity=ON -DCMAKE_POLICY_VERSION_MINIMUM=3.5"
|
||||
]
|
||||
}
|
||||
19
.github/scripts/strategy-matrix/windows.json
vendored
Normal file
19
.github/scripts/strategy-matrix/windows.json
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "windows/amd64",
|
||||
"runner": ["self-hosted", "Windows", "devbox"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
{
|
||||
"distro_name": "windows",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": "",
|
||||
"image_sha": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
|
||||
}
|
||||
64
.github/workflows/clang-format.yml
vendored
64
.github/workflows/clang-format.yml
vendored
@@ -1,64 +0,0 @@
|
||||
name: clang-format
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
|
||||
jobs:
|
||||
check:
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
runs-on: ubuntu-24.04
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-clang-format
|
||||
steps:
|
||||
# For jobs running in containers, $GITHUB_WORKSPACE and ${{ github.workspace }} might not be the
|
||||
# same directory. The actions/checkout step is *supposed* to checkout into $GITHUB_WORKSPACE and
|
||||
# then add it to safe.directory (see instructions at https://github.com/actions/checkout)
|
||||
# but that's apparently not happening for some container images. We can't be sure what is actually
|
||||
# happening, so let's pre-emptively add both directories to safe.directory. There's a
|
||||
# Github issue opened in 2022 and not resolved in 2025 https://github.com/actions/runner/issues/2058 ¯\_(ツ)_/¯
|
||||
- run: |
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE
|
||||
git config --global --add safe.directory ${{ github.workspace }}
|
||||
- uses: actions/checkout@v4
|
||||
- name: Format first-party sources
|
||||
run: |
|
||||
clang-format --version
|
||||
find include src tests -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format -i {} +
|
||||
- name: Check for differences
|
||||
id: assert
|
||||
shell: bash
|
||||
run: |
|
||||
set -o pipefail
|
||||
git diff --exit-code | tee "clang-format.patch"
|
||||
- name: Upload patch
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
uses: actions/upload-artifact@v4
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: clang-format.patch
|
||||
if-no-files-found: ignore
|
||||
path: clang-format.patch
|
||||
- name: What happened?
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
env:
|
||||
PREAMBLE: |
|
||||
If you are reading this, you are looking at a failed Github Actions
|
||||
job. That means you pushed one or more files that did not conform
|
||||
to the formatting specified in .clang-format. That may be because
|
||||
you neglected to run 'git clang-format' or 'clang-format' before
|
||||
committing, or that your version of clang-format has an
|
||||
incompatibility with the one on this
|
||||
machine, which is:
|
||||
SUGGESTION: |
|
||||
|
||||
To fix it, you can do one of two things:
|
||||
1. Download and apply the patch generated as an artifact of this
|
||||
job to your repo, commit, and push.
|
||||
2. Run 'git-clang-format --extensions cpp,h,hpp,ipp develop'
|
||||
in your repo, commit, and push.
|
||||
run: |
|
||||
echo "${PREAMBLE}"
|
||||
clang-format --version
|
||||
echo "${SUGGESTION}"
|
||||
exit 1
|
||||
37
.github/workflows/doxygen.yml
vendored
37
.github/workflows/doxygen.yml
vendored
@@ -1,37 +0,0 @@
|
||||
name: Build and publish Doxygen documentation
|
||||
# To test this workflow, push your changes to your fork's `develop` branch.
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- doxygen
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
documentation:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: check environment
|
||||
run: |
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
cmake --version
|
||||
doxygen --version
|
||||
env | sort
|
||||
- name: build
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -Donly_docs=TRUE ..
|
||||
cmake --build . --target docs --parallel $(nproc)
|
||||
- name: publish
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: build/docs/html
|
||||
53
.github/workflows/levelization.yml
vendored
53
.github/workflows/levelization.yml
vendored
@@ -1,53 +0,0 @@
|
||||
name: levelization
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
|
||||
jobs:
|
||||
check:
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CLANG_VERSION: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Check levelization
|
||||
run: Builds/levelization/levelization.sh
|
||||
- name: Check for differences
|
||||
id: assert
|
||||
run: |
|
||||
set -o pipefail
|
||||
git diff --exit-code | tee "levelization.patch"
|
||||
- name: Upload patch
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
uses: actions/upload-artifact@v4
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: levelization.patch
|
||||
if-no-files-found: ignore
|
||||
path: levelization.patch
|
||||
- name: What happened?
|
||||
if: failure() && steps.assert.outcome == 'failure'
|
||||
env:
|
||||
MESSAGE: |
|
||||
If you are reading this, you are looking at a failed Github
|
||||
Actions job. That means you changed the dependency relationships
|
||||
between the modules in rippled. That may be an improvement or a
|
||||
regression. This check doesn't judge.
|
||||
|
||||
A rule of thumb, though, is that if your changes caused
|
||||
something to be removed from loops.txt, that's probably an
|
||||
improvement. If something was added, it's probably a regression.
|
||||
|
||||
To fix it, you can do one of two things:
|
||||
1. Download and apply the patch generated as an artifact of this
|
||||
job to your repo, commit, and push.
|
||||
2. Run './Builds/levelization/levelization.sh' in your repo,
|
||||
commit, and push.
|
||||
|
||||
See Builds/levelization/README.md for more info.
|
||||
run: |
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
91
.github/workflows/libxrpl.yml
vendored
91
.github/workflows/libxrpl.yml
vendored
@@ -1,91 +0,0 @@
|
||||
name: Check libXRPL compatibility with Clio
|
||||
env:
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
CONAN_LOGIN_USERNAME_XRPLF: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
CONAN_PASSWORD_XRPLF: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "src/libxrpl/protocol/BuildInfo.cpp"
|
||||
- ".github/workflows/libxrpl.yml"
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
name: Publish libXRPL
|
||||
outputs:
|
||||
outcome: ${{ steps.upload.outputs.outcome }}
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.channel.outputs.channel }}
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
||||
steps:
|
||||
- name: Wait for essential checks to succeed
|
||||
uses: lewagon/wait-on-check-action@v1.3.4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
running-workflow-name: wait-for-check-regexp
|
||||
check-regexp: "(dependencies|test).*linux.*" # Ignore windows and mac tests but make sure linux passes
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
wait-interval: 10
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Generate channel
|
||||
id: channel
|
||||
shell: bash
|
||||
run: |
|
||||
echo channel="clio/pr_${{ github.event.pull_request.number }}" | tee ${GITHUB_OUTPUT}
|
||||
- name: Export new package
|
||||
shell: bash
|
||||
run: |
|
||||
conan export . ${{ steps.channel.outputs.channel }}
|
||||
- name: Add Conan remote
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Adding Conan remote 'xrplf' at ${{ env.CONAN_REMOTE_URL }}."
|
||||
conan remote add xrplf ${{ env.CONAN_REMOTE_URL }} --insert 0 --force
|
||||
echo "Listing Conan remotes."
|
||||
conan remote list
|
||||
- name: Parse new version
|
||||
id: version
|
||||
shell: bash
|
||||
run: |
|
||||
echo version="$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" \
|
||||
| awk -F '"' '{print $2}')" | tee ${GITHUB_OUTPUT}
|
||||
- name: Try to authenticate to Conan remote
|
||||
id: remote
|
||||
shell: bash
|
||||
run: |
|
||||
# `conan user` implicitly uses the environment variables CONAN_LOGIN_USERNAME_<REMOTE> and CONAN_PASSWORD_<REMOTE>.
|
||||
# https://docs.conan.io/1/reference/commands/misc/user.html#using-environment-variables
|
||||
# https://docs.conan.io/1/reference/env_vars.html#conan-login-username-conan-login-username-remote-name
|
||||
# https://docs.conan.io/1/reference/env_vars.html#conan-password-conan-password-remote-name
|
||||
echo outcome=$(conan user --remote xrplf --password >&2 \
|
||||
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
|
||||
- name: Upload new package
|
||||
id: upload
|
||||
if: (steps.remote.outputs.outcome == 'success')
|
||||
shell: bash
|
||||
run: |
|
||||
echo "conan upload version ${{ steps.version.outputs.version }} on channel ${{ steps.channel.outputs.channel }}"
|
||||
echo outcome=$(conan upload xrpl/${{ steps.version.outputs.version }}@${{ steps.channel.outputs.channel }} --remote ripple --confirm >&2 \
|
||||
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
|
||||
notify_clio:
|
||||
name: Notify Clio
|
||||
runs-on: ubuntu-latest
|
||||
needs: publish
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
||||
steps:
|
||||
- name: Notify Clio about new version
|
||||
if: (needs.publish.outputs.outcome == 'success')
|
||||
shell: bash
|
||||
run: |
|
||||
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
|
||||
-F "client_payload[version]=${{ needs.publish.outputs.version }}@${{ needs.publish.outputs.channel }}" \
|
||||
-F "client_payload[pr]=${{ github.event.pull_request.number }}"
|
||||
112
.github/workflows/macos.yml
vendored
112
.github/workflows/macos.yml
vendored
@@ -1,112 +0,0 @@
|
||||
name: macos
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
push:
|
||||
# If the branches list is ever changed, be sure to change it on all
|
||||
# build/test jobs (nix, macos, windows, instrumentation)
|
||||
branches:
|
||||
# Always build the package branches
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
# Branches that opt-in to running
|
||||
- "ci/**"
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
# This part of Conan configuration is specific to this workflow only; we do not want
|
||||
# to pollute conan/profiles directory with settings which might not work for others
|
||||
env:
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
CONAN_REMOTE_USERNAME: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
CONAN_REMOTE_PASSWORD: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
# This part of the Conan configuration is specific to this workflow only; we
|
||||
# do not want to pollute the 'conan/profiles' directory with settings that
|
||||
# might not work for other workflows.
|
||||
CONAN_GLOBAL_CONF: |
|
||||
core.download:parallel={{os.cpu_count()}}
|
||||
core.upload:parallel={{os.cpu_count()}}
|
||||
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
|
||||
tools.build:verbosity=verbose
|
||||
tools.compilation:verbosity=verbose
|
||||
|
||||
jobs:
|
||||
test:
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- macos
|
||||
generator:
|
||||
- Ninja
|
||||
configuration:
|
||||
- Release
|
||||
runs-on: [self-hosted, macOS, mac-runner-m1]
|
||||
env:
|
||||
# The `build` action requires these variables.
|
||||
build_dir: .build
|
||||
NUM_PROCESSORS: 12
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: install Conan
|
||||
run: |
|
||||
brew install conan
|
||||
- name: install Ninja
|
||||
if: matrix.generator == 'Ninja'
|
||||
run: brew install ninja
|
||||
- name: install python
|
||||
run: |
|
||||
if which python > /dev/null 2>&1; then
|
||||
echo "Python executable exists"
|
||||
else
|
||||
brew install python@3.13
|
||||
ln -s /opt/homebrew/bin/python3 /opt/homebrew/bin/python
|
||||
fi
|
||||
- name: install cmake
|
||||
run: |
|
||||
if which cmake > /dev/null 2>&1; then
|
||||
echo "cmake executable exists"
|
||||
else
|
||||
brew install cmake
|
||||
fi
|
||||
- name: install nproc
|
||||
run: |
|
||||
brew install coreutils
|
||||
- name: check environment
|
||||
run: |
|
||||
env | sort
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
python --version
|
||||
conan --version
|
||||
cmake --version
|
||||
nproc --version
|
||||
echo -n "nproc returns: "
|
||||
nproc
|
||||
system_profiler SPHardwareDataType
|
||||
sysctl -n hw.logicalcpu
|
||||
clang --version
|
||||
- name: configure Conan
|
||||
run: |
|
||||
echo "${CONAN_GLOBAL_CONF}" > $(conan config home)/global.conf
|
||||
conan config install conan/profiles/ -tf $(conan config home)/profiles/
|
||||
conan profile show
|
||||
- name: build dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: build
|
||||
uses: ./.github/actions/build
|
||||
with:
|
||||
generator: ${{ matrix.generator }}
|
||||
configuration: ${{ matrix.configuration }}
|
||||
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
|
||||
- name: test
|
||||
run: |
|
||||
n=$(nproc)
|
||||
echo "Using $n test jobs"
|
||||
|
||||
cd ${build_dir}
|
||||
./rippled --unittest --unittest-jobs $n
|
||||
ctest -j $n --output-on-failure
|
||||
60
.github/workflows/missing-commits.yml
vendored
60
.github/workflows/missing-commits.yml
vendored
@@ -1,60 +0,0 @@
|
||||
name: missing-commits
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
# Only check that the branches are up to date when updating the
|
||||
# relevant branches.
|
||||
- develop
|
||||
- release
|
||||
|
||||
jobs:
|
||||
up_to_date:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check for missing commits
|
||||
id: commits
|
||||
env:
|
||||
SUGGESTION: |
|
||||
|
||||
If you are reading this, then the commits indicated above are
|
||||
missing from "develop" and/or "release". Do a reverse-merge
|
||||
as soon as possible. See CONTRIBUTING.md for instructions.
|
||||
run: |
|
||||
set -o pipefail
|
||||
# Branches ordered by how "canonical" they are. Every commit in
|
||||
# one branch should be in all the branches behind it
|
||||
order=( master release develop )
|
||||
branches=()
|
||||
for branch in "${order[@]}"
|
||||
do
|
||||
# Check that the branches exist so that this job will work on
|
||||
# forked repos, which don't necessarily have master and
|
||||
# release branches.
|
||||
if git ls-remote --exit-code --heads origin \
|
||||
refs/heads/${branch} > /dev/null
|
||||
then
|
||||
branches+=( origin/${branch} )
|
||||
fi
|
||||
done
|
||||
|
||||
prior=()
|
||||
for branch in "${branches[@]}"
|
||||
do
|
||||
if [[ ${#prior[@]} -ne 0 ]]
|
||||
then
|
||||
echo "Checking ${prior[@]} for commits missing from ${branch}"
|
||||
git log --oneline --no-merges "${prior[@]}" \
|
||||
^$branch | tee -a "missing-commits.txt"
|
||||
echo
|
||||
fi
|
||||
prior+=( "${branch}" )
|
||||
done
|
||||
if [[ $( cat missing-commits.txt | wc -l ) -ne 0 ]]
|
||||
then
|
||||
echo "${SUGGESTION}"
|
||||
exit 1
|
||||
fi
|
||||
422
.github/workflows/nix.yml
vendored
422
.github/workflows/nix.yml
vendored
@@ -1,422 +0,0 @@
|
||||
name: nix
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
push:
|
||||
# If the branches list is ever changed, be sure to change it on all
|
||||
# build/test jobs (nix, macos, windows)
|
||||
branches:
|
||||
# Always build the package branches
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
# Branches that opt-in to running
|
||||
- "ci/**"
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
CONAN_REMOTE_USERNAME: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
CONAN_REMOTE_PASSWORD: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
# This part of the Conan configuration is specific to this workflow only; we
|
||||
# do not want to pollute the 'conan/profiles' directory with settings that
|
||||
# might not work for other workflows.
|
||||
CONAN_GLOBAL_CONF: |
|
||||
core.download:parallel={{ os.cpu_count() }}
|
||||
core.upload:parallel={{ os.cpu_count() }}
|
||||
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
|
||||
tools.build:verbosity=verbose
|
||||
tools.compilation:verbosity=verbose
|
||||
|
||||
# This workflow has multiple job matrixes.
|
||||
# They can be considered phases because most of the matrices ("test",
|
||||
# "coverage", "conan", ) depend on the first ("dependencies").
|
||||
#
|
||||
# The first phase has a job in the matrix for each combination of
|
||||
# variables that affects dependency ABI:
|
||||
# platform, compiler, and configuration.
|
||||
# It creates a GitHub artifact holding the Conan profile,
|
||||
# and builds and caches binaries for all the dependencies.
|
||||
# If an Artifactory remote is configured, they are cached there.
|
||||
# If not, they are added to the GitHub artifact.
|
||||
# GitHub's "cache" action has a size limit (10 GB) that is too small
|
||||
# to hold the binaries if they are built locally.
|
||||
# We must use the "{upload,download}-artifact" actions instead.
|
||||
#
|
||||
# The remaining phases have a job in the matrix for each test
|
||||
# configuration. They install dependency binaries from the cache,
|
||||
# whichever was used, and build and test rippled.
|
||||
#
|
||||
# "instrumentation" is independent, but is included here because it also
|
||||
# builds on linux in the same "on:" conditions.
|
||||
|
||||
jobs:
|
||||
dependencies:
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux
|
||||
compiler:
|
||||
- gcc
|
||||
- clang
|
||||
configuration:
|
||||
- Debug
|
||||
- Release
|
||||
include:
|
||||
- compiler: gcc
|
||||
compiler_version: 12
|
||||
distro: ubuntu
|
||||
codename: jammy
|
||||
- compiler: clang
|
||||
compiler_version: 16
|
||||
distro: debian
|
||||
codename: bookworm
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: ghcr.io/xrplf/ci/${{ matrix.distro }}-${{ matrix.codename }}:${{ matrix.compiler }}-${{ matrix.compiler_version }}
|
||||
env:
|
||||
build_dir: .build
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: check environment
|
||||
run: |
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
lsb_release -a || true
|
||||
${{ matrix.compiler }}-${{ matrix.compiler_version }} --version
|
||||
conan --version
|
||||
cmake --version
|
||||
env | sort
|
||||
- name: configure Conan
|
||||
run: |
|
||||
echo "${CONAN_GLOBAL_CONF}" >> $(conan config home)/global.conf
|
||||
conan config install conan/profiles/ -tf $(conan config home)/profiles/
|
||||
conan profile show
|
||||
- name: archive profile
|
||||
# Create this archive before dependencies are added to the local cache.
|
||||
run: tar -czf conan.tar.gz -C ${CONAN_HOME} .
|
||||
- name: build dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: upload archive
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||
with:
|
||||
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
|
||||
path: conan.tar.gz
|
||||
if-no-files-found: error
|
||||
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux
|
||||
compiler:
|
||||
- gcc
|
||||
- clang
|
||||
configuration:
|
||||
- Debug
|
||||
- Release
|
||||
include:
|
||||
- compiler: gcc
|
||||
compiler_version: 12
|
||||
distro: ubuntu
|
||||
codename: jammy
|
||||
- compiler: clang
|
||||
compiler_version: 16
|
||||
distro: debian
|
||||
codename: bookworm
|
||||
cmake-args:
|
||||
-
|
||||
- "-Dunity=ON"
|
||||
needs: dependencies
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: ghcr.io/xrplf/ci/${{ matrix.distro }}-${{ matrix.codename }}:${{ matrix.compiler }}-${{ matrix.compiler_version }}
|
||||
env:
|
||||
build_dir: .build
|
||||
steps:
|
||||
- name: download cache
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
|
||||
- name: extract cache
|
||||
run: |
|
||||
mkdir -p ${CONAN_HOME}
|
||||
tar -xzf conan.tar.gz -C ${CONAN_HOME}
|
||||
- name: check environment
|
||||
run: |
|
||||
env | sort
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
conan --version
|
||||
cmake --version
|
||||
- name: checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: build
|
||||
uses: ./.github/actions/build
|
||||
with:
|
||||
generator: Ninja
|
||||
configuration: ${{ matrix.configuration }}
|
||||
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
|
||||
- name: check linking
|
||||
run: |
|
||||
cd ${build_dir}
|
||||
ldd ./rippled
|
||||
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
|
||||
echo 'The binary is statically linked.'
|
||||
else
|
||||
echo 'The binary is dynamically linked.'
|
||||
exit 1
|
||||
fi
|
||||
- name: test
|
||||
run: |
|
||||
cd ${build_dir}
|
||||
./rippled --unittest --unittest-jobs $(nproc)
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
|
||||
reference-fee-test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux
|
||||
compiler:
|
||||
- gcc
|
||||
configuration:
|
||||
- Debug
|
||||
cmake-args:
|
||||
- "-DUNIT_TEST_REFERENCE_FEE=200"
|
||||
- "-DUNIT_TEST_REFERENCE_FEE=1000"
|
||||
needs: dependencies
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: ghcr.io/xrplf/ci/ubuntu-jammy:gcc-12
|
||||
env:
|
||||
build_dir: .build
|
||||
steps:
|
||||
- name: download cache
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
|
||||
- name: extract cache
|
||||
run: |
|
||||
mkdir -p ${CONAN_HOME}
|
||||
tar -xzf conan.tar.gz -C ${CONAN_HOME}
|
||||
- name: check environment
|
||||
run: |
|
||||
env | sort
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
conan --version
|
||||
cmake --version
|
||||
- name: checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: build
|
||||
uses: ./.github/actions/build
|
||||
with:
|
||||
generator: Ninja
|
||||
configuration: ${{ matrix.configuration }}
|
||||
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
|
||||
- name: test
|
||||
run: |
|
||||
cd ${build_dir}
|
||||
./rippled --unittest --unittest-jobs $(nproc)
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
|
||||
coverage:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux
|
||||
compiler:
|
||||
- gcc
|
||||
configuration:
|
||||
- Debug
|
||||
needs: dependencies
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: ghcr.io/xrplf/ci/ubuntu-jammy:gcc-12
|
||||
env:
|
||||
build_dir: .build
|
||||
steps:
|
||||
- name: download cache
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
|
||||
- name: extract cache
|
||||
run: |
|
||||
mkdir -p ${CONAN_HOME}
|
||||
tar -xzf conan.tar.gz -C ${CONAN_HOME}
|
||||
- name: check environment
|
||||
run: |
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
conan --version
|
||||
cmake --version
|
||||
gcovr --version
|
||||
env | sort
|
||||
ls ${CONAN_HOME}
|
||||
- name: checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
with:
|
||||
configuration: ${{ matrix.configuration }}
|
||||
- name: build
|
||||
uses: ./.github/actions/build
|
||||
with:
|
||||
generator: Ninja
|
||||
configuration: ${{ matrix.configuration }}
|
||||
cmake-args: >-
|
||||
-Dassert=TRUE
|
||||
-Dwerr=TRUE
|
||||
-Dcoverage=ON
|
||||
-Dcoverage_format=xml
|
||||
-DCODE_COVERAGE_VERBOSE=ON
|
||||
-DCMAKE_CXX_FLAGS="-O0"
|
||||
-DCMAKE_C_FLAGS="-O0"
|
||||
cmake-target: coverage
|
||||
- name: move coverage report
|
||||
shell: bash
|
||||
run: |
|
||||
mv "${build_dir}/coverage.xml" ./
|
||||
- name: archive coverage report
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||
with:
|
||||
name: coverage.xml
|
||||
path: coverage.xml
|
||||
retention-days: 30
|
||||
- name: upload coverage report
|
||||
uses: wandalen/wretry.action@v1.4.10
|
||||
with:
|
||||
action: codecov/codecov-action@v4.5.0
|
||||
with: |
|
||||
files: coverage.xml
|
||||
fail_ci_if_error: true
|
||||
disable_search: true
|
||||
verbose: true
|
||||
plugin: noop
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
attempt_limit: 5
|
||||
attempt_delay: 210000 # in milliseconds
|
||||
|
||||
conan:
|
||||
needs: dependencies
|
||||
runs-on: [self-hosted, heavy]
|
||||
container:
|
||||
image: ghcr.io/xrplf/ci/ubuntu-jammy:gcc-12
|
||||
env:
|
||||
build_dir: .build
|
||||
platform: linux
|
||||
compiler: gcc
|
||||
compiler_version: 12
|
||||
configuration: Release
|
||||
steps:
|
||||
- name: download cache
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: ${{ env.platform }}-${{ env.compiler }}-${{ env.configuration }}
|
||||
- name: extract cache
|
||||
run: |
|
||||
mkdir -p ${CONAN_HOME}
|
||||
tar -xzf conan.tar.gz -C ${CONAN_HOME}
|
||||
- name: check environment
|
||||
run: |
|
||||
env | sort
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
conan --version
|
||||
cmake --version
|
||||
- name: checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
with:
|
||||
configuration: ${{ env.configuration }}
|
||||
- name: export
|
||||
run: |
|
||||
conan export . --version head
|
||||
- name: build
|
||||
run: |
|
||||
cd tests/conan
|
||||
mkdir ${build_dir} && cd ${build_dir}
|
||||
conan install .. \
|
||||
--settings:all build_type=${configuration} \
|
||||
--output-folder . \
|
||||
--build missing
|
||||
cmake .. \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=./build/${configuration}/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=${configuration}
|
||||
cmake --build .
|
||||
./example | grep '^[[:digit:]]\+\.[[:digit:]]\+\.[[:digit:]]\+'
|
||||
|
||||
instrumentation-build:
|
||||
needs: dependencies
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: ghcr.io/xrplf/ci/debian-bookworm:clang-16
|
||||
env:
|
||||
build_dir: .build
|
||||
steps:
|
||||
- name: download cache
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: linux-clang-Debug
|
||||
|
||||
- name: extract cache
|
||||
run: |
|
||||
mkdir -p ${CONAN_HOME}
|
||||
tar -xzf conan.tar.gz -C ${CONAN_HOME}
|
||||
|
||||
- name: check environment
|
||||
run: |
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
conan --version
|
||||
cmake --version
|
||||
env | sort
|
||||
ls ${CONAN_HOME}
|
||||
|
||||
- name: checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
|
||||
- name: dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
with:
|
||||
configuration: Debug
|
||||
|
||||
- name: prepare environment
|
||||
run: |
|
||||
mkdir -p ${build_dir}
|
||||
echo "SOURCE_DIR=$(pwd)" >> $GITHUB_ENV
|
||||
echo "BUILD_DIR=$(pwd)/${build_dir}" >> $GITHUB_ENV
|
||||
|
||||
- name: build with instrumentation
|
||||
run: |
|
||||
cd ${BUILD_DIR}
|
||||
cmake -S ${SOURCE_DIR} -B ${BUILD_DIR} \
|
||||
-Dvoidstar=ON \
|
||||
-Dtests=ON \
|
||||
-Dxrpld=ON \
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DSECP256K1_BUILD_BENCHMARK=OFF \
|
||||
-DSECP256K1_BUILD_TESTS=OFF \
|
||||
-DSECP256K1_BUILD_EXHAUSTIVE_TESTS=OFF \
|
||||
-DCMAKE_TOOLCHAIN_FILE=${BUILD_DIR}/build/generators/conan_toolchain.cmake
|
||||
cmake --build . --parallel $(nproc)
|
||||
|
||||
- name: verify instrumentation enabled
|
||||
run: |
|
||||
cd ${BUILD_DIR}
|
||||
./rippled --version | grep libvoidstar
|
||||
|
||||
- name: run unit tests
|
||||
run: |
|
||||
cd ${BUILD_DIR}
|
||||
./rippled -u --unittest-jobs $(( $(nproc)/4 ))
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
143
.github/workflows/on-pr.yml
vendored
Normal file
143
.github/workflows/on-pr.yml
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
# This workflow runs all workflows to check, build and test the project on
|
||||
# various Linux flavors, as well as on MacOS and Windows, on every push to a
|
||||
# user branch. However, it will not run if the pull request is a draft unless it
|
||||
# has the 'DraftRunCI' label.
|
||||
name: PR
|
||||
|
||||
on:
|
||||
merge_group:
|
||||
types:
|
||||
- checks_requested
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
- ready_for_review
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# This job determines whether the rest of the workflow should run. It runs
|
||||
# when the PR is not a draft (which should also cover merge-group) or
|
||||
# has the 'DraftRunCI' label.
|
||||
should-run:
|
||||
if: ${{ !github.event.pull_request.draft || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Determine changed files
|
||||
# This step checks whether any files have changed that should
|
||||
# cause the next jobs to run. We do it this way rather than
|
||||
# using `paths` in the `on:` section, because all required
|
||||
# checks must pass, even for changes that do not modify anything
|
||||
# that affects those checks. We would therefore like to make the
|
||||
# checks required only if the job runs, but GitHub does not
|
||||
# support that directly. By always executing the workflow on new
|
||||
# commits and by using the changed-files action below, we ensure
|
||||
# that Github considers any skipped jobs to have passed, and in
|
||||
# turn the required checks as well.
|
||||
id: changes
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
# These paths are unique to `on-pr.yml`.
|
||||
.github/scripts/levelization/**
|
||||
.github/scripts/rename/**
|
||||
.github/workflows/reusable-check-levelization.yml
|
||||
.github/workflows/reusable-check-rename.yml
|
||||
.github/workflows/reusable-notify-clio.yml
|
||||
.github/workflows/on-pr.yml
|
||||
|
||||
# Keep the paths below in sync with those in `on-trigger.yml`.
|
||||
.github/actions/build-deps/**
|
||||
.github/actions/build-test/**
|
||||
.github/actions/setup-conan/**
|
||||
.github/scripts/strategy-matrix/**
|
||||
.github/workflows/reusable-build.yml
|
||||
.github/workflows/reusable-build-test-config.yml
|
||||
.github/workflows/reusable-build-test.yml
|
||||
.github/workflows/reusable-strategy-matrix.yml
|
||||
.github/workflows/reusable-test.yml
|
||||
.codecov.yml
|
||||
cmake/**
|
||||
conan/**
|
||||
external/**
|
||||
include/**
|
||||
src/**
|
||||
tests/**
|
||||
CMakeLists.txt
|
||||
conanfile.py
|
||||
conan.lock
|
||||
- name: Check whether to run
|
||||
# This step determines whether the rest of the workflow should
|
||||
# run. The rest of the workflow will run if this job runs AND at
|
||||
# least one of:
|
||||
# * Any of the files checked in the `changes` step were modified
|
||||
# * The PR is NOT a draft and is labeled "Ready to merge"
|
||||
# * The workflow is running from the merge queue
|
||||
id: go
|
||||
env:
|
||||
FILES: ${{ steps.changes.outputs.any_changed }}
|
||||
DRAFT: ${{ github.event.pull_request.draft }}
|
||||
READY: ${{ contains(github.event.pull_request.labels.*.name, 'Ready to merge') }}
|
||||
MERGE: ${{ github.event_name == 'merge_group' }}
|
||||
run: |
|
||||
echo "go=${{ (env.DRAFT != 'true' && env.READY == 'true') || env.FILES == 'true' || env.MERGE == 'true' }}" >> "${GITHUB_OUTPUT}"
|
||||
cat "${GITHUB_OUTPUT}"
|
||||
outputs:
|
||||
go: ${{ steps.go.outputs.go == 'true' }}
|
||||
|
||||
check-levelization:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/reusable-check-levelization.yml
|
||||
|
||||
check-rename:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/reusable-check-rename.yml
|
||||
|
||||
build-test:
|
||||
needs: should-run
|
||||
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||
uses: ./.github/workflows/reusable-build-test.yml
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
with:
|
||||
# Enable ccache only for events targeting the XRPLF repository, since
|
||||
# other accounts will not have access to our remote cache storage.
|
||||
ccache_enabled: ${{ github.repository_owner == 'XRPLF' }}
|
||||
os: ${{ matrix.os }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
notify-clio:
|
||||
needs:
|
||||
- should-run
|
||||
- build-test
|
||||
if: ${{ needs.should-run.outputs.go == 'true' && (startsWith(github.base_ref, 'release') || github.base_ref == 'master') }}
|
||||
uses: ./.github/workflows/reusable-notify-clio.yml
|
||||
secrets:
|
||||
clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
|
||||
passed:
|
||||
if: failure() || cancelled()
|
||||
needs:
|
||||
- build-test
|
||||
- check-levelization
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Fail
|
||||
run: false
|
||||
80
.github/workflows/on-trigger.yml
vendored
Normal file
80
.github/workflows/on-trigger.yml
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
# This workflow runs all workflows to build the dependencies required for the
|
||||
# project on various Linux flavors, as well as on MacOS and Windows, on a
|
||||
# scheduled basis, on merge into the 'develop', 'release', or 'master' branches,
|
||||
# or manually. The missing commits check is only run when the code is merged
|
||||
# into the 'develop' or 'release' branches, and the documentation is built when
|
||||
# the code is merged into the 'develop' branch.
|
||||
name: Trigger
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "develop"
|
||||
- "release*"
|
||||
- "master"
|
||||
paths:
|
||||
# These paths are unique to `on-trigger.yml`.
|
||||
- ".github/workflows/on-trigger.yml"
|
||||
|
||||
# Keep the paths below in sync with those in `on-pr.yml`.
|
||||
- ".github/actions/build-deps/**"
|
||||
- ".github/actions/build-test/**"
|
||||
- ".github/actions/setup-conan/**"
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
- ".github/workflows/reusable-build.yml"
|
||||
- ".github/workflows/reusable-build-test-config.yml"
|
||||
- ".github/workflows/reusable-build-test.yml"
|
||||
- ".github/workflows/reusable-strategy-matrix.yml"
|
||||
- ".github/workflows/reusable-test.yml"
|
||||
- ".codecov.yml"
|
||||
- "cmake/**"
|
||||
- "conan/**"
|
||||
- "external/**"
|
||||
- "include/**"
|
||||
- "src/**"
|
||||
- "tests/**"
|
||||
- "CMakeLists.txt"
|
||||
- "conanfile.py"
|
||||
- "conan.lock"
|
||||
|
||||
# Run at 06:32 UTC on every day of the week from Monday through Friday. This
|
||||
# will force all dependencies to be rebuilt, which is useful to verify that
|
||||
# all dependencies can be built successfully. Only the dependencies that
|
||||
# are actually missing from the remote will be uploaded.
|
||||
schedule:
|
||||
- cron: "32 6 * * 1-5"
|
||||
|
||||
# Run when manually triggered via the GitHub UI or API.
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# When a PR is merged into the develop branch it will be assigned a unique
|
||||
# group identifier, so execution will continue even if another PR is merged
|
||||
# while it is still running. In all other cases the group identifier is shared
|
||||
# per branch, so that any in-progress runs are cancelled when a new commit is
|
||||
# pushed.
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' && github.sha || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
build-test:
|
||||
uses: ./.github/workflows/reusable-build-test.yml
|
||||
strategy:
|
||||
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
with:
|
||||
# Enable ccache only for events targeting the XRPLF repository, since
|
||||
# other accounts will not have access to our remote cache storage.
|
||||
# However, we do not enable ccache for events targeting the master or a
|
||||
# release branch, to protect against the rare case that the output
|
||||
# produced by ccache is not identical to a regular compilation.
|
||||
ccache_enabled: ${{ github.repository_owner == 'XRPLF' && !(github.base_ref == 'master' || startsWith(github.base_ref, 'release')) }}
|
||||
os: ${{ matrix.os }}
|
||||
strategy_matrix: ${{ github.event_name == 'schedule' && 'all' || 'minimal' }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
15
.github/workflows/pre-commit.yml
vendored
Normal file
15
.github/workflows/pre-commit.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
name: Run pre-commit hooks
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches: [develop, release, master]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
# Call the workflow in the XRPLF/actions repo that runs the pre-commit hooks.
|
||||
run-hooks:
|
||||
uses: XRPLF/actions/.github/workflows/pre-commit.yml@34790936fae4c6c751f62ec8c06696f9c1a5753a
|
||||
with:
|
||||
runs_on: ubuntu-latest
|
||||
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-a8c7be1" }'
|
||||
72
.github/workflows/publish-docs.yml
vendored
Normal file
72
.github/workflows/publish-docs.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
# This workflow builds the documentation for the repository, and publishes it to
|
||||
# GitHub Pages when changes are merged into the default branch.
|
||||
name: Build and publish documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- ".github/workflows/publish-docs.yml"
|
||||
- "*.md"
|
||||
- "**/*.md"
|
||||
- "docs/**"
|
||||
- "include/**"
|
||||
- "src/libxrpl/**"
|
||||
- "src/xrpld/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
BUILD_DIR: build
|
||||
NPROC_SUBTRACT: 2
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-documentation:sha-a8c7be1
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/get-nproc@2ece4ec6ab7de266859a6f053571425b2bd684b6
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ env.NPROC_SUBTRACT }}
|
||||
|
||||
- name: Check configuration
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Doxygen version.'
|
||||
doxygen --version
|
||||
|
||||
- name: Build documentation
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
run: |
|
||||
mkdir -p "${BUILD_DIR}"
|
||||
cd "${BUILD_DIR}"
|
||||
cmake -Donly_docs=ON ..
|
||||
cmake --build . --target docs --parallel ${BUILD_NPROC}
|
||||
|
||||
- name: Publish documentation
|
||||
if: ${{ github.ref_type == 'branch' && github.ref_name == github.event.repository.default_branch }}
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ${{ env.BUILD_DIR }}/docs/html
|
||||
246
.github/workflows/reusable-build-test-config.yml
vendored
Normal file
246
.github/workflows/reusable-build-test-config.yml
vendored
Normal file
@@ -0,0 +1,246 @@
|
||||
name: Build and test configuration
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_only:
|
||||
description: 'Whether to only build or to build and test the code ("true", "false").'
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
ccache_enabled:
|
||||
description: "Whether to enable ccache."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
runs_on:
|
||||
description: Runner to run the job on as a JSON string
|
||||
required: true
|
||||
type: string
|
||||
|
||||
image:
|
||||
description: "The image to run in (leave empty to run natively)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
config_name:
|
||||
description: "The configuration string (used for naming artifacts and such)."
|
||||
required: true
|
||||
type: string
|
||||
|
||||
nproc_subtract:
|
||||
description: "The number of processors to subtract when calculating parallelism."
|
||||
required: false
|
||||
type: number
|
||||
default: 2
|
||||
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
# Conan installs the generators in the build/generators directory, see the
|
||||
# layout() method in conanfile.py. We then run CMake from the build directory.
|
||||
BUILD_DIR: build
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
name: ${{ inputs.config_name }}
|
||||
runs-on: ${{ fromJSON(inputs.runs_on) }}
|
||||
container: ${{ inputs.image != '' && inputs.image || null }}
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
# Use a namespace to keep the objects separate for each configuration.
|
||||
CCACHE_NAMESPACE: ${{ inputs.config_name }}
|
||||
# Ccache supports both Redis and HTTP endpoints.
|
||||
# * For Redis, use the following format: redis://ip:port, see
|
||||
# https://github.com/ccache/ccache/wiki/Redis-storage. Note that TLS is
|
||||
# not directly supported by ccache, and requires use of a proxy.
|
||||
# * For HTTP use the following format: http://ip:port/cache when using
|
||||
# nginx as backend or http://ip:port|layout=bazel when using Bazel
|
||||
# Remote Cache, see https://github.com/ccache/ccache/wiki/HTTP-storage.
|
||||
# Note that HTTPS is not directly supported by ccache.
|
||||
CCACHE_REMOTE_ONLY: true
|
||||
CCACHE_REMOTE_STORAGE: http://cache.dev.ripplex.io:8080|layout=bazel
|
||||
# Ignore the creation and modification timestamps on files, since the
|
||||
# header files are copied into separate directories by CMake, which will
|
||||
# otherwise result in cache misses.
|
||||
CCACHE_SLOPPINESS: include_file_ctime,include_file_mtime
|
||||
# Determine if coverage and voidstar should be enabled.
|
||||
COVERAGE_ENABLED: ${{ contains(inputs.cmake_args, '-Dcoverage=ON') }}
|
||||
VOIDSTAR_ENABLED: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
|
||||
steps:
|
||||
- name: Cleanup workspace (macOS and Windows)
|
||||
if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }}
|
||||
uses: XRPLF/actions/cleanup-workspace@2ece4ec6ab7de266859a6f053571425b2bd684b6
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/prepare-runner@2ece4ec6ab7de266859a6f053571425b2bd684b6
|
||||
with:
|
||||
disable_ccache: ${{ !inputs.ccache_enabled }}
|
||||
|
||||
- name: Set ccache log file
|
||||
if: ${{ inputs.ccache_enabled && runner.debug == '1' }}
|
||||
run: echo "CCACHE_LOGFILE=${{ runner.temp }}/ccache.log" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Print build environment
|
||||
uses: ./.github/actions/print-env
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/get-nproc@2ece4ec6ab7de266859a6f053571425b2bd684b6
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ inputs.nproc_subtract }}
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_nproc: ${{ steps.nproc.outputs.nproc }}
|
||||
build_type: ${{ inputs.build_type }}
|
||||
# Set the verbosity to "quiet" for Windows to avoid an excessive
|
||||
# amount of logs. For other OSes, the "verbose" logs are more useful.
|
||||
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
|
||||
|
||||
- name: Configure CMake
|
||||
working-directory: ${{ env.BUILD_DIR }}
|
||||
env:
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
CMAKE_ARGS: ${{ inputs.cmake_args }}
|
||||
run: |
|
||||
cmake \
|
||||
-G '${{ runner.os == 'Windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
|
||||
${CMAKE_ARGS} \
|
||||
..
|
||||
|
||||
- name: Build the binary
|
||||
working-directory: ${{ env.BUILD_DIR }}
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
CMAKE_TARGET: ${{ inputs.cmake_target }}
|
||||
run: |
|
||||
cmake \
|
||||
--build . \
|
||||
--config "${BUILD_TYPE}" \
|
||||
--parallel "${BUILD_NPROC}" \
|
||||
--target "${CMAKE_TARGET}"
|
||||
|
||||
- name: Show ccache statistics
|
||||
if: ${{ inputs.ccache_enabled }}
|
||||
run: |
|
||||
ccache --show-stats -vv
|
||||
if [ '${{ runner.debug }}' = '1' ]; then
|
||||
cat "${CCACHE_LOGFILE}"
|
||||
curl ${CCACHE_REMOTE_STORAGE%|*}/status || true
|
||||
fi
|
||||
|
||||
- name: Upload the binary (Linux)
|
||||
if: ${{ github.repository_owner == 'XRPLF' && runner.os == 'Linux' }}
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: xrpld-${{ inputs.config_name }}
|
||||
path: ${{ env.BUILD_DIR }}/xrpld
|
||||
retention-days: 3
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Check linking (Linux)
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
working-directory: ${{ env.BUILD_DIR }}
|
||||
run: |
|
||||
ldd ./xrpld
|
||||
if [ "$(ldd ./xrpld | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
|
||||
echo 'The binary is statically linked.'
|
||||
else
|
||||
echo 'The binary is dynamically linked.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verify presence of instrumentation (Linux)
|
||||
if: ${{ runner.os == 'Linux' && env.VOIDSTAR_ENABLED == 'true' }}
|
||||
working-directory: ${{ env.BUILD_DIR }}
|
||||
run: |
|
||||
./xrpld --version | grep libvoidstar
|
||||
|
||||
- name: Run the separate tests
|
||||
if: ${{ !inputs.build_only }}
|
||||
working-directory: ${{ env.BUILD_DIR }}
|
||||
# Windows locks some of the build files while running tests, and parallel jobs can collide
|
||||
env:
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
PARALLELISM: ${{ runner.os == 'Windows' && '1' || steps.nproc.outputs.nproc }}
|
||||
run: |
|
||||
ctest \
|
||||
--output-on-failure \
|
||||
-C "${BUILD_TYPE}" \
|
||||
-j "${PARALLELISM}"
|
||||
|
||||
- name: Run the embedded tests
|
||||
if: ${{ !inputs.build_only }}
|
||||
working-directory: ${{ runner.os == 'Windows' && format('{0}/{1}', env.BUILD_DIR, inputs.build_type) || env.BUILD_DIR }}
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
run: |
|
||||
./xrpld --unittest --unittest-jobs "${BUILD_NPROC}"
|
||||
|
||||
- name: Debug failure (Linux)
|
||||
if: ${{ failure() && runner.os == 'Linux' && !inputs.build_only }}
|
||||
run: |
|
||||
echo "IPv4 local port range:"
|
||||
cat /proc/sys/net/ipv4/ip_local_port_range
|
||||
echo "Netstat:"
|
||||
netstat -an
|
||||
|
||||
- name: Prepare coverage report
|
||||
if: ${{ !inputs.build_only && env.COVERAGE_ENABLED == 'true' }}
|
||||
working-directory: ${{ env.BUILD_DIR }}
|
||||
env:
|
||||
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||
BUILD_TYPE: ${{ inputs.build_type }}
|
||||
run: |
|
||||
cmake \
|
||||
--build . \
|
||||
--config "${BUILD_TYPE}" \
|
||||
--parallel "${BUILD_NPROC}" \
|
||||
--target coverage
|
||||
|
||||
- name: Upload coverage report
|
||||
if: ${{ github.repository_owner == 'XRPLF' && !inputs.build_only && env.COVERAGE_ENABLED == 'true' }}
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
with:
|
||||
disable_search: true
|
||||
disable_telem: true
|
||||
fail_ci_if_error: true
|
||||
files: ${{ env.BUILD_DIR }}/coverage.xml
|
||||
plugins: noop
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
61
.github/workflows/reusable-build-test.yml
vendored
Normal file
61
.github/workflows/reusable-build-test.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
# This workflow builds and tests the binary for various configurations.
|
||||
name: Build and test
|
||||
|
||||
# This workflow can only be triggered by other workflows. Note that the
|
||||
# workflow_call event does not support the 'choice' input type, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
|
||||
# so we use 'string' instead.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ccache_enabled:
|
||||
description: "Whether to enable ccache."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
os: ${{ inputs.os }}
|
||||
strategy_matrix: ${{ inputs.strategy_matrix }}
|
||||
|
||||
# Build and test the binary for each configuration.
|
||||
build-test-config:
|
||||
needs:
|
||||
- generate-matrix
|
||||
uses: ./.github/workflows/reusable-build-test-config.yml
|
||||
strategy:
|
||||
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
with:
|
||||
build_only: ${{ matrix.build_only }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
ccache_enabled: ${{ inputs.ccache_enabled }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_target: ${{ matrix.cmake_target }}
|
||||
runs_on: ${{ toJSON(matrix.architecture.runner) }}
|
||||
image: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || '' }}
|
||||
config_name: ${{ matrix.config_name }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
46
.github/workflows/reusable-check-levelization.yml
vendored
Normal file
46
.github/workflows/reusable-check-levelization.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
# This workflow checks if the dependencies between the modules are correctly
|
||||
# indexed.
|
||||
name: Check levelization
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on: workflow_call
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-levelization
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
levelization:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Check levelization
|
||||
run: .github/scripts/levelization/generate.sh
|
||||
- name: Check for differences
|
||||
env:
|
||||
MESSAGE: |
|
||||
|
||||
The dependency relationships between the modules in xrpld have
|
||||
changed, which may be an improvement or a regression.
|
||||
|
||||
A rule of thumb is that if your changes caused something to be
|
||||
removed from loops.txt, it's probably an improvement, while if
|
||||
something was added, it's probably a regression.
|
||||
|
||||
Run '.github/scripts/levelization/generate.sh' in your repo, commit
|
||||
and push the changes. See .github/scripts/levelization/README.md for
|
||||
more info.
|
||||
run: |
|
||||
DIFF=$(git status --porcelain)
|
||||
if [ -n "${DIFF}" ]; then
|
||||
# Print the differences to give the contributor a hint about what to
|
||||
# expect when running levelization on their own machine.
|
||||
git diff
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
fi
|
||||
52
.github/workflows/reusable-check-rename.yml
vendored
Normal file
52
.github/workflows/reusable-check-rename.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
# This workflow checks if the codebase is properly renamed, see more info in
|
||||
# .github/scripts/rename/README.md.
|
||||
name: Check rename
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on: workflow_call
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-rename
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
rename:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Check definitions
|
||||
run: .github/scripts/rename/definitions.sh .
|
||||
- name: Check copyright notices
|
||||
run: .github/scripts/rename/copyright.sh .
|
||||
- name: Check CMake configs
|
||||
run: .github/scripts/rename/cmake.sh .
|
||||
- name: Check binary name
|
||||
run: .github/scripts/rename/binary.sh .
|
||||
- name: Check namespaces
|
||||
run: .github/scripts/rename/namespace.sh .
|
||||
- name: Check config name
|
||||
run: .github/scripts/rename/config.sh .
|
||||
- name: Check for differences
|
||||
env:
|
||||
MESSAGE: |
|
||||
|
||||
One or more files contain changes that do not adhere to new naming
|
||||
conventions.
|
||||
|
||||
Run the scripts in '.github/scripts/rename/' in your repo, commit
|
||||
and push the changes. See .github/scripts/rename/README.md for
|
||||
more info.
|
||||
run: |
|
||||
DIFF=$(git status --porcelain)
|
||||
if [ -n "${DIFF}" ]; then
|
||||
# Print the differences to give the contributor a hint about what to
|
||||
# expect when running the renaming scripts on their own machine.
|
||||
git diff
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
fi
|
||||
91
.github/workflows/reusable-notify-clio.yml
vendored
Normal file
91
.github/workflows/reusable-notify-clio.yml
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
# This workflow exports the built libxrpl package to the Conan remote on a
|
||||
# a channel named after the pull request, and notifies the Clio repository about
|
||||
# the new version so it can check for compatibility.
|
||||
name: Notify Clio
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
conan_remote_name:
|
||||
description: "The name of the Conan remote to use."
|
||||
required: false
|
||||
type: string
|
||||
default: xrplf
|
||||
conan_remote_url:
|
||||
description: "The URL of the Conan endpoint to use."
|
||||
required: false
|
||||
type: string
|
||||
default: https://conan.ripplex.io
|
||||
secrets:
|
||||
clio_notify_token:
|
||||
description: "The GitHub token to notify Clio about new versions."
|
||||
required: true
|
||||
conan_remote_username:
|
||||
description: "The username for logging into the Conan remote."
|
||||
required: true
|
||||
conan_remote_password:
|
||||
description: "The password for logging into the Conan remote."
|
||||
required: true
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-clio
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13-sha-5dd7158
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Generate outputs
|
||||
id: generate
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run: |
|
||||
echo 'Generating user and channel.'
|
||||
echo "user=clio" >> "${GITHUB_OUTPUT}"
|
||||
echo "channel=pr_${PR_NUMBER}" >> "${GITHUB_OUTPUT}"
|
||||
echo 'Extracting version.'
|
||||
echo "version=$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')" >> "${GITHUB_OUTPUT}"
|
||||
- name: Calculate conan reference
|
||||
id: conan_ref
|
||||
run: |
|
||||
echo "conan_ref=${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}" >> "${GITHUB_OUTPUT}"
|
||||
- name: Set up Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
with:
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
- name: Log into Conan remote
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
|
||||
- name: Upload package
|
||||
env:
|
||||
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||
run: |
|
||||
conan export --user=${{ steps.generate.outputs.user }} --channel=${{ steps.generate.outputs.channel }} .
|
||||
conan upload --confirm --check --remote="${CONAN_REMOTE_NAME}" xrpl/${{ steps.conan_ref.outputs.conan_ref }}
|
||||
outputs:
|
||||
conan_ref: ${{ steps.conan_ref.outputs.conan_ref }}
|
||||
|
||||
notify:
|
||||
needs: upload
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Notify Clio
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.clio_notify_token }}
|
||||
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||
run: |
|
||||
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
|
||||
-F "client_payload[conan_ref]=${{ needs.upload.outputs.conan_ref }}" \
|
||||
-F "client_payload[pr_url]=${PR_URL}"
|
||||
45
.github/workflows/reusable-strategy-matrix.yml
vendored
Normal file
45
.github/workflows/reusable-strategy-matrix.yml
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
name: Generate strategy matrix
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: false
|
||||
type: string
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
outputs:
|
||||
matrix:
|
||||
description: "The generated strategy matrix."
|
||||
value: ${{ jobs.generate-matrix.outputs.matrix }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
generate-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
matrix: ${{ steps.generate.outputs.matrix }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: 3.13
|
||||
|
||||
- name: Generate strategy matrix
|
||||
working-directory: .github/scripts/strategy-matrix
|
||||
id: generate
|
||||
env:
|
||||
GENERATE_CONFIG: ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }}
|
||||
GENERATE_OPTION: ${{ inputs.strategy_matrix == 'all' && '--all' || '' }}
|
||||
run: ./generate.py ${GENERATE_OPTION} ${GENERATE_CONFIG} >> "${GITHUB_OUTPUT}"
|
||||
110
.github/workflows/upload-conan-deps.yml
vendored
Normal file
110
.github/workflows/upload-conan-deps.yml
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
name: Upload Conan Dependencies
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 3 * * 2-6"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
force_source_build:
|
||||
description: "Force source build of all dependencies"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
force_upload:
|
||||
description: "Force upload of all dependencies"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
pull_request:
|
||||
branches: [develop]
|
||||
paths:
|
||||
# This allows testing changes to the upload workflow in a PR
|
||||
- .github/workflows/upload-conan-deps.yml
|
||||
push:
|
||||
branches: [develop]
|
||||
paths:
|
||||
- .github/workflows/upload-conan-deps.yml
|
||||
- .github/workflows/reusable-strategy-matrix.yml
|
||||
- .github/actions/build-deps/action.yml
|
||||
- .github/actions/setup-conan/action.yml
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
- conanfile.py
|
||||
- conan.lock
|
||||
|
||||
env:
|
||||
CONAN_REMOTE_NAME: xrplf
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
NPROC_SUBTRACT: 2
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||
with:
|
||||
strategy_matrix: ${{ github.event_name == 'pull_request' && 'minimal' || 'all' }}
|
||||
|
||||
# Build and upload the dependencies for each configuration.
|
||||
run-upload-conan-deps:
|
||||
needs:
|
||||
- generate-matrix
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
max-parallel: 10
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || null }}
|
||||
steps:
|
||||
- name: Cleanup workspace (macOS and Windows)
|
||||
if: ${{ runner.os == 'macOS' || runner.os == 'Windows' }}
|
||||
uses: XRPLF/actions/cleanup-workspace@2ece4ec6ab7de266859a6f053571425b2bd684b6
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/prepare-runner@2ece4ec6ab7de266859a6f053571425b2bd684b6
|
||||
with:
|
||||
disable_ccache: true
|
||||
|
||||
- name: Print build environment
|
||||
uses: ./.github/actions/print-env
|
||||
|
||||
- name: Get number of processors
|
||||
uses: XRPLF/actions/get-nproc@2ece4ec6ab7de266859a6f053571425b2bd684b6
|
||||
id: nproc
|
||||
with:
|
||||
subtract: ${{ env.NPROC_SUBTRACT }}
|
||||
|
||||
- name: Setup Conan
|
||||
uses: ./.github/actions/setup-conan
|
||||
with:
|
||||
conan_remote_name: ${{ env.CONAN_REMOTE_NAME }}
|
||||
conan_remote_url: ${{ env.CONAN_REMOTE_URL }}
|
||||
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_nproc: ${{ steps.nproc.outputs.nproc }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
force_build: ${{ github.event_name == 'schedule' || github.event.inputs.force_source_build == 'true' }}
|
||||
# Set the verbosity to "quiet" for Windows to avoid an excessive
|
||||
# amount of logs. For other OSes, the "verbose" logs are more useful.
|
||||
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
|
||||
|
||||
- name: Log into Conan remote
|
||||
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
|
||||
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}"
|
||||
|
||||
- name: Upload Conan packages
|
||||
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
|
||||
env:
|
||||
FORCE_OPTION: ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
|
||||
run: conan upload "*" --remote="${CONAN_REMOTE_NAME}" --confirm ${FORCE_OPTION}
|
||||
106
.github/workflows/windows.yml
vendored
106
.github/workflows/windows.yml
vendored
@@ -1,106 +0,0 @@
|
||||
name: windows
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
push:
|
||||
# If the branches list is ever changed, be sure to change it on all
|
||||
# build/test jobs (nix, macos, windows, instrumentation)
|
||||
branches:
|
||||
# Always build the package branches
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
# Branches that opt-in to running
|
||||
- "ci/**"
|
||||
|
||||
# https://docs.github.com/en/actions/using-jobs/using-concurrency
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
env:
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
CONAN_REMOTE_USERNAME: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
CONAN_REMOTE_PASSWORD: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
# This part of the Conan configuration is specific to this workflow only; we
|
||||
# do not want to pollute the 'conan/profiles' directory with settings that
|
||||
# might not work for other workflows.
|
||||
CONAN_GLOBAL_CONF: |
|
||||
core.download:parallel={{os.cpu_count()}}
|
||||
core.upload:parallel={{os.cpu_count()}}
|
||||
tools.build:jobs=24
|
||||
tools.build:verbosity=verbose
|
||||
tools.compilation:verbosity=verbose
|
||||
|
||||
jobs:
|
||||
test:
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
version:
|
||||
- generator: Visual Studio 17 2022
|
||||
runs-on: windows-2022
|
||||
configuration:
|
||||
- type: Release
|
||||
tests: true
|
||||
- type: Debug
|
||||
# Skip running unit tests on debug builds, because they
|
||||
# take an unreasonable amount of time
|
||||
tests: false
|
||||
runtime: d
|
||||
runs-on: ${{ matrix.version.runs-on }}
|
||||
env:
|
||||
build_dir: .build
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: choose Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
|
||||
with:
|
||||
python-version: 3.13
|
||||
- name: learn Python cache directory
|
||||
id: pip-cache
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
echo "dir=$(pip cache dir)" | tee ${GITHUB_OUTPUT}
|
||||
- name: restore Python cache directory
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684
|
||||
with:
|
||||
path: ${{ steps.pip-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-${{ hashFiles('.github/workflows/windows.yml') }}
|
||||
- name: install Conan
|
||||
run: pip install wheel conan
|
||||
- name: check environment
|
||||
run: |
|
||||
dir env:
|
||||
$env:PATH -split ';'
|
||||
python --version
|
||||
conan --version
|
||||
cmake --version
|
||||
- name: configure Conan
|
||||
shell: bash
|
||||
run: |
|
||||
echo "${CONAN_GLOBAL_CONF}" > $(conan config home)/global.conf
|
||||
conan config install conan/profiles/ -tf $(conan config home)/profiles/
|
||||
conan profile show
|
||||
- name: build dependencies
|
||||
uses: ./.github/actions/dependencies
|
||||
with:
|
||||
configuration: ${{ matrix.configuration.type }}
|
||||
- name: build
|
||||
uses: ./.github/actions/build
|
||||
with:
|
||||
generator: "${{ matrix.version.generator }}"
|
||||
configuration: ${{ matrix.configuration.type }}
|
||||
# Hard code for now. Move to the matrix if varied options are needed
|
||||
cmake-args: "-Dassert=TRUE -Dwerr=TRUE -Dreporting=OFF -Dunity=ON"
|
||||
cmake-target: install
|
||||
- name: test
|
||||
shell: bash
|
||||
if: ${{ matrix.configuration.tests }}
|
||||
run: |
|
||||
cd ${build_dir}/${{ matrix.configuration.type }}
|
||||
./rippled --unittest --unittest-jobs $(nproc)
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
122
.gitignore
vendored
122
.gitignore
vendored
@@ -1,70 +1,48 @@
|
||||
# .gitignore
|
||||
# cspell: disable
|
||||
|
||||
bin/boostbook_catalog.xml
|
||||
bin/config.log
|
||||
bin/project-cache.jam
|
||||
|
||||
# Ignore vim swap files.
|
||||
*.swp
|
||||
|
||||
# Ignore SCons support files.
|
||||
.sconsign.dblite
|
||||
|
||||
# Ignore python compiled files.
|
||||
*.pyc
|
||||
|
||||
# Ignore Macintosh Desktop Services Store files.
|
||||
# Macintosh Desktop Services Store files.
|
||||
.DS_Store
|
||||
|
||||
# Ignore backup/temps
|
||||
# Build, intermediate, and temporary artifacts.
|
||||
*~
|
||||
|
||||
# Ignore object files.
|
||||
*.o
|
||||
.nih_c
|
||||
tags
|
||||
TAGS
|
||||
GTAGS
|
||||
GRTAGS
|
||||
GPATH
|
||||
bin/rippled
|
||||
Debug/*.*
|
||||
Release/*.*
|
||||
*.pdb
|
||||
*.swp
|
||||
/.clangd
|
||||
Debug/
|
||||
Release/
|
||||
/.build/
|
||||
/build/
|
||||
/db/
|
||||
/out.txt
|
||||
/Testing/
|
||||
/tmp/
|
||||
CMakeSettings.json
|
||||
CMakeUserPresets.json
|
||||
|
||||
# Ignore coverage files.
|
||||
# Coverage files.
|
||||
*.gcno
|
||||
*.gcda
|
||||
*.gcov
|
||||
|
||||
# Levelization checking
|
||||
Builds/levelization/results/rawincludes.txt
|
||||
Builds/levelization/results/paths.txt
|
||||
Builds/levelization/results/includes/
|
||||
Builds/levelization/results/includedby/
|
||||
# Profiling data.
|
||||
gmon.out
|
||||
|
||||
# Ignore tmp directory.
|
||||
tmp
|
||||
# Levelization data.
|
||||
.github/scripts/levelization/results/*
|
||||
!.github/scripts/levelization/results/loops.txt
|
||||
!.github/scripts/levelization/results/ordering.txt
|
||||
|
||||
# Ignore database directory.
|
||||
db/
|
||||
db/*.db
|
||||
db/*.db-*
|
||||
# Customized configs.
|
||||
/rippled.cfg
|
||||
/xrpld.cfg
|
||||
/validators.txt
|
||||
|
||||
# Ignore debug logs
|
||||
debug_log.txt
|
||||
# Locally patched Conan recipes
|
||||
external/conan-center-index/
|
||||
|
||||
# Ignore customized configs
|
||||
rippled.cfg
|
||||
validators.txt
|
||||
|
||||
# Doxygen generated documentation output
|
||||
HtmlDocumentation
|
||||
docs/html_doc
|
||||
|
||||
# Xcode user-specific project settings
|
||||
# Xcode
|
||||
.DS_Store
|
||||
/build/
|
||||
# XCode IDE.
|
||||
*.pbxuser
|
||||
!default.pbxuser
|
||||
*.mode1v3
|
||||
@@ -77,38 +55,16 @@ xcuserdata
|
||||
profile
|
||||
*.moved-aside
|
||||
DerivedData
|
||||
.idea/
|
||||
*.hmap
|
||||
|
||||
# Intel Parallel Studio 2013 XE
|
||||
My Amplifier XE Results - RippleD
|
||||
# JetBrains IDE.
|
||||
/.idea/
|
||||
|
||||
# Compiler intermediate output
|
||||
/out.txt
|
||||
# Microsoft Visual Studio IDE.
|
||||
/.vs/
|
||||
/.vscode/
|
||||
|
||||
# Build Log
|
||||
rippled-build.log
|
||||
|
||||
# Profiling data
|
||||
gmon.out
|
||||
|
||||
Builds/VisualStudio2015/*.db
|
||||
Builds/VisualStudio2015/*.user
|
||||
Builds/VisualStudio2015/*.opendb
|
||||
Builds/VisualStudio2015/*.sdf
|
||||
|
||||
# MSVC
|
||||
*.pdb
|
||||
.vs/
|
||||
CMakeSettings.json
|
||||
compile_commands.json
|
||||
.clangd
|
||||
packages
|
||||
pkg_out
|
||||
pkg
|
||||
CMakeUserPresets.json
|
||||
bld.rippled/
|
||||
.vscode
|
||||
|
||||
# Suggested in-tree build directory
|
||||
/.build/
|
||||
# AI tools.
|
||||
/.augment
|
||||
/.claude
|
||||
/CLAUDE.md
|
||||
|
||||
@@ -1,6 +1,58 @@
|
||||
# .pre-commit-config.yaml
|
||||
# To run pre-commit hooks, first install pre-commit:
|
||||
# - `pip install pre-commit==${PRE_COMMIT_VERSION}`
|
||||
#
|
||||
# Then, run the following command to install the git hook scripts:
|
||||
# - `pre-commit install`
|
||||
# You can run all configured hooks against all files with:
|
||||
# - `pre-commit run --all-files`
|
||||
# To manually run a specific hook, use:
|
||||
# - `pre-commit run <hook_id> --all-files`
|
||||
# To run the hooks against only the staged files, use:
|
||||
# - `pre-commit run`
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # frozen: v6.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: mixed-line-ending
|
||||
- id: check-merge-conflict
|
||||
args: [--assume-in-merge]
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||
rev: v18.1.8
|
||||
rev: 7d85583be209cb547946c82fbe51f4bc5dd1d017 # frozen: v18.1.8
|
||||
hooks:
|
||||
- id: clang-format
|
||||
args: [--style=file]
|
||||
"types_or": [c++, c, proto]
|
||||
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: 5ba47274f9b181bce26a5150a725577f3c336011 # frozen: v3.6.2
|
||||
hooks:
|
||||
- id: prettier
|
||||
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 25.11.0
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
- repo: https://github.com/streetsidesoftware/cspell-cli
|
||||
rev: v9.2.0
|
||||
hooks:
|
||||
- id: cspell # Spell check changed files
|
||||
exclude: .config/cspell.config.yaml
|
||||
- id: cspell # Spell check the commit message
|
||||
name: check commit message spelling
|
||||
args:
|
||||
- --no-must-find-files
|
||||
- --no-progress
|
||||
- --no-summary
|
||||
- --files
|
||||
- .git/COMMIT_EDITMSG
|
||||
stages: [commit-msg]
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
external/.*|
|
||||
.github/scripts/levelization/results/.*\.txt
|
||||
)$
|
||||
|
||||
1
.prettierignore
Normal file
1
.prettierignore
Normal file
@@ -0,0 +1 @@
|
||||
external
|
||||
158
BUILD.md
158
BUILD.md
@@ -10,7 +10,7 @@
|
||||
## Branches
|
||||
|
||||
For a stable release, choose the `master` branch or one of the [tagged
|
||||
releases](https://github.com/ripple/rippled/releases).
|
||||
releases](https://github.com/XRPLF/rippled/releases).
|
||||
|
||||
```bash
|
||||
git checkout master
|
||||
@@ -33,24 +33,19 @@ git checkout develop
|
||||
|
||||
See [System Requirements](https://xrpl.org/system-requirements.html).
|
||||
|
||||
Building rippled generally requires git, Python, Conan, CMake, and a C++
|
||||
Building xrpld generally requires git, Python, Conan, CMake, and a C++
|
||||
compiler. Some guidance on setting up such a [C++ development environment can be
|
||||
found here](./docs/build/environment.md).
|
||||
|
||||
- [Python 3.11](https://www.python.org/downloads/), or higher
|
||||
- [Conan 2.17](https://conan.io/downloads.html)[^1], or higher
|
||||
- [CMake 3.22](https://cmake.org/download/)[^2], or higher
|
||||
- [CMake 3.22](https://cmake.org/download/), or higher
|
||||
|
||||
[^1]:
|
||||
It is possible to build with Conan 1.60+, but the instructions are
|
||||
significantly different, which is why we are not recommending it.
|
||||
|
||||
[^2]:
|
||||
CMake 4 is not yet supported by all dependencies required by this project.
|
||||
If you are affected by this issue, follow [conan workaround for cmake
|
||||
4](#workaround-for-cmake-4)
|
||||
|
||||
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
`xrpld` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
The [minimum compiler versions][2] required are:
|
||||
|
||||
| Compiler | Version |
|
||||
@@ -71,7 +66,7 @@ Linux](./docs/build/environment.md#linux).
|
||||
|
||||
### Mac
|
||||
|
||||
Many rippled engineers use macOS for development.
|
||||
Many xrpld engineers use macOS for development.
|
||||
|
||||
Here are [sample instructions for setting up a C++ development environment on
|
||||
macOS](./docs/build/environment.md#macos).
|
||||
@@ -93,6 +88,13 @@ These instructions assume a basic familiarity with Conan and CMake. If you are
|
||||
unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official
|
||||
[Getting Started][3] walkthrough.
|
||||
|
||||
#### Conan lockfile
|
||||
|
||||
To achieve reproducible dependencies, we use a [Conan lockfile](https://docs.conan.io/2/tutorial/versioning/lockfiles.html),
|
||||
which has to be updated every time dependencies change.
|
||||
|
||||
Please see the [instructions on how to regenerate the lockfile](conan/lockfile/README.md).
|
||||
|
||||
#### Default profile
|
||||
|
||||
We recommend that you import the provided `conan/profiles/default` profile:
|
||||
@@ -124,7 +126,7 @@ default profile.
|
||||
### Patched recipes
|
||||
|
||||
The recipes in Conan Center occasionally need to be patched for compatibility
|
||||
with the latest version of `rippled`. We maintain a fork of the Conan Center
|
||||
with the latest version of `xrpld`. We maintain a fork of the Conan Center
|
||||
[here](https://github.com/XRPLF/conan-center-index/) containing the patches.
|
||||
|
||||
To ensure our patched recipes are used, you must add our Conan remote at a
|
||||
@@ -132,24 +134,44 @@ higher index than the default Conan Center remote, so it is consulted first. You
|
||||
can do this by running:
|
||||
|
||||
```bash
|
||||
conan remote add --index 0 xrplf "https://conan.ripplex.io"
|
||||
conan remote add --index 0 xrplf https://conan.ripplex.io
|
||||
```
|
||||
|
||||
Alternatively, you can pull the patched recipes into the repository and use them
|
||||
locally:
|
||||
|
||||
```bash
|
||||
# Extract the version number from the lockfile.
|
||||
function extract_version {
|
||||
version=$(cat conan.lock | sed -nE "s@.+${1}/(.+)#.+@\1@p" | head -n1)
|
||||
echo ${version}
|
||||
}
|
||||
|
||||
# Define which recipes to export.
|
||||
recipes=(ed25519 grpc secp256k1 snappy soci)
|
||||
|
||||
# Selectively check out the recipes from our CCI fork.
|
||||
cd external
|
||||
mkdir -p conan-center-index
|
||||
cd conan-center-index
|
||||
git init
|
||||
git remote add origin git@github.com:XRPLF/conan-center-index.git
|
||||
git sparse-checkout init
|
||||
git sparse-checkout set recipes/snappy
|
||||
git sparse-checkout add recipes/soci
|
||||
for recipe in ${recipes[@]}; do
|
||||
echo "Checking out ${recipe}..."
|
||||
git sparse-checkout add recipes/${recipe}/all
|
||||
done
|
||||
git fetch origin master
|
||||
git checkout master
|
||||
conan export --version 1.1.10 recipes/snappy/all
|
||||
conan export --version 4.0.3 recipes/soci/all
|
||||
rm -rf .git
|
||||
cd ../..
|
||||
|
||||
# Export the recipes into the local cache.
|
||||
for recipe in ${recipes[@]}; do
|
||||
version=$(extract_version ${recipe})
|
||||
echo "Exporting ${recipe}/${version}..."
|
||||
conan export --version $(extract_version ${recipe}) \
|
||||
external/conan-center-index/recipes/${recipe}/all
|
||||
done
|
||||
```
|
||||
|
||||
In the case we switch to a newer version of a dependency that still requires a
|
||||
@@ -158,6 +180,11 @@ updated dependencies with the newer version. However, if we switch to a newer
|
||||
version that no longer requires a patch, no action is required on your part, as
|
||||
the new recipe will be automatically pulled from the official Conan Center.
|
||||
|
||||
> [!NOTE]
|
||||
> You might need to add `--lockfile=""` to your `conan install` command
|
||||
> to avoid automatic use of the existing `conan.lock` file when you run
|
||||
> `conan export` manually on your machine
|
||||
|
||||
### Conan profile tweaks
|
||||
|
||||
#### Missing compiler version
|
||||
@@ -265,7 +292,7 @@ sed -i.bak -e 's|^compiler\.libcxx=.*$|compiler.libcxx=libstdc++11|' $(conan con
|
||||
to do that is to run the shortcut "x64 Native Tools Command Prompt" for the
|
||||
version of Visual Studio that you have installed.
|
||||
|
||||
Windows developers must also build `rippled` and its dependencies for the x64
|
||||
Windows developers must also build `xrpld` and its dependencies for the x64
|
||||
architecture:
|
||||
|
||||
```bash
|
||||
@@ -278,21 +305,6 @@ sed -i.bak -e 's|^arch=.*$|arch=x86_64|' $(conan config home)/profiles/default
|
||||
sed -i.bak -e 's|^compiler\.runtime=.*$|compiler.runtime=static|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
#### Workaround for CMake 4
|
||||
|
||||
If your system CMake is version 4 rather than 3, you may have to configure Conan
|
||||
profile to use CMake version 3 for dependencies, by adding the following two
|
||||
lines to your profile:
|
||||
|
||||
```text
|
||||
[tool_requires]
|
||||
!cmake/*: cmake/[>=3 <4]
|
||||
```
|
||||
|
||||
This will force Conan to download and use a locally cached CMake 3 version, and
|
||||
is needed because some of the dependencies used by this project do not support
|
||||
CMake 4.
|
||||
|
||||
#### Clang workaround for grpc
|
||||
|
||||
If your compiler is clang, version 19 or later, or apple-clang, version 17 or
|
||||
@@ -389,19 +401,6 @@ tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
|
||||
`--settings build_type=$BUILD_TYPE` or in the profile itself,
|
||||
under the section `[settings]` with the key `build_type`.
|
||||
|
||||
If you are using a Microsoft Visual C++ compiler,
|
||||
then you will need to ensure consistency between the `build_type` setting
|
||||
and the `compiler.runtime` setting.
|
||||
|
||||
When `build_type` is `Release`, `compiler.runtime` should be `MT`.
|
||||
|
||||
When `build_type` is `Debug`, `compiler.runtime` should be `MTd`.
|
||||
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release --settings compiler.runtime=MT
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug --settings compiler.runtime=MTd
|
||||
```
|
||||
|
||||
3. Configure CMake and pass the toolchain file generated by Conan, located at
|
||||
`$OUTPUT_FOLDER/build/generators/conan_toolchain.cmake`.
|
||||
|
||||
@@ -423,9 +422,9 @@ tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -Dxrpld=ON -Dtests=ON ..
|
||||
```
|
||||
|
||||
**Note:** You can pass build options for `rippled` in this step.
|
||||
**Note:** You can pass build options for `xrpld` in this step.
|
||||
|
||||
4. Build `rippled`.
|
||||
4. Build `xrpld`.
|
||||
|
||||
For a single-configuration generator, it will build whatever configuration
|
||||
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator, you
|
||||
@@ -444,26 +443,26 @@ tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
|
||||
cmake --build . --config Debug
|
||||
```
|
||||
|
||||
5. Test rippled.
|
||||
5. Test xrpld.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
./rippled --unittest --unittest-jobs N
|
||||
./xrpld --unittest --unittest-jobs N
|
||||
```
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
./Release/rippled --unittest --unittest-jobs N
|
||||
./Debug/rippled --unittest --unittest-jobs N
|
||||
./Release/xrpld --unittest --unittest-jobs N
|
||||
./Debug/xrpld --unittest --unittest-jobs N
|
||||
```
|
||||
|
||||
Replace the `--unittest-jobs` parameter N with the desired unit tests
|
||||
concurrency. Recommended setting is half of the number of available CPU
|
||||
cores.
|
||||
|
||||
The location of `rippled` binary in your build directory depends on your
|
||||
The location of `xrpld` binary in your build directory depends on your
|
||||
CMake generator. Pass `--help` to see the rest of the command line options.
|
||||
|
||||
## Coverage report
|
||||
@@ -482,20 +481,20 @@ Prerequisites for the coverage report:
|
||||
|
||||
A coverage report is created when the following steps are completed, in order:
|
||||
|
||||
1. `rippled` binary built with instrumentation data, enabled by the `coverage`
|
||||
1. `xrpld` binary built with instrumentation data, enabled by the `coverage`
|
||||
option mentioned above
|
||||
2. completed run of unit tests, which populates coverage capture data
|
||||
2. completed one or more run of the unit tests, which populates coverage capture data
|
||||
3. completed run of the `gcovr` tool (which internally invokes either `gcov` or `llvm-cov`)
|
||||
to assemble both instrumentation data and the coverage capture data into a coverage report
|
||||
|
||||
The above steps are automated into a single target `coverage`. The instrumented
|
||||
`rippled` binary can also be used for regular development or testing work, at
|
||||
The last step of the above is automated into a single target `coverage`. The instrumented
|
||||
`xrpld` binary can also be used for regular development or testing work, at
|
||||
the cost of extra disk space utilization and a small performance hit
|
||||
(to store coverage capture). In case of a spurious failure of unit tests, it is
|
||||
possible to re-run the `coverage` target without rebuilding the `rippled` binary
|
||||
(since it is simply a dependency of the coverage report target). It is also possible
|
||||
to select only specific tests for the purpose of the coverage report, by setting
|
||||
the `coverage_test` variable in `cmake`
|
||||
(to store coverage capture data). Since `xrpld` binary is simply a dependency of the
|
||||
coverage report target, it is possible to re-run the `coverage` target without
|
||||
rebuilding the `xrpld` binary. Note, running of the unit tests before the `coverage`
|
||||
target is left to the developer. Each such run will append to the coverage data
|
||||
collected in the build directory.
|
||||
|
||||
The default coverage report format is `html-details`, but the user
|
||||
can override it to any of the formats listed in `Builds/CMake/CodeCoverage.cmake`
|
||||
@@ -504,11 +503,6 @@ to generate more than one format at a time by setting the `coverage_extra_args`
|
||||
variable in `cmake`. The specific command line used to run the `gcovr` tool will be
|
||||
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
|
||||
|
||||
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
|
||||
set to the number of available CPU cores. This may cause spurious test
|
||||
errors on Apple. Developers can override the number of unit test jobs with
|
||||
the `coverage_test_parallelism` variable in `cmake`.
|
||||
|
||||
Example use with some cmake variables set:
|
||||
|
||||
```
|
||||
@@ -526,16 +520,16 @@ stored inside the build directory, as either of:
|
||||
|
||||
## Options
|
||||
|
||||
| Option | Default Value | Description |
|
||||
| ---------- | ------------- | -------------------------------------------------------------------------- |
|
||||
| `assert` | OFF | Enable assertions. |
|
||||
| `coverage` | OFF | Prepare the coverage report. |
|
||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||
| `tests` | OFF | Build tests. |
|
||||
| `unity` | OFF | Configure a unity build. |
|
||||
| `xrpld` | OFF | Build the xrpld (`rippled`) application, and not just the libxrpl library. |
|
||||
| `werr` | OFF | Treat compilation warnings as errors |
|
||||
| `wextra` | OFF | Enable additional compilation warnings |
|
||||
| Option | Default Value | Description |
|
||||
| ---------- | ------------- | ------------------------------------------------------------------ |
|
||||
| `assert` | OFF | Enable assertions. |
|
||||
| `coverage` | OFF | Prepare the coverage report. |
|
||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||
| `tests` | OFF | Build tests. |
|
||||
| `unity` | OFF | Configure a unity build. |
|
||||
| `xrpld` | OFF | Build the xrpld application, and not just the libxrpl library. |
|
||||
| `werr` | OFF | Treat compilation warnings as errors |
|
||||
| `wextra` | OFF | Enable additional compilation warnings |
|
||||
|
||||
[Unity builds][5] may be faster for the first build
|
||||
(at the cost of much more memory) since they concatenate sources into fewer
|
||||
@@ -564,7 +558,13 @@ After any updates or changes to dependencies, you may need to do the following:
|
||||
```
|
||||
|
||||
3. Re-run [conan export](#patched-recipes) if needed.
|
||||
4. Re-run [conan install](#build-and-test).
|
||||
4. [Regenerate lockfile](#conan-lockfile).
|
||||
5. Re-run [conan install](#build-and-test).
|
||||
|
||||
#### ERROR: Package not resolved
|
||||
|
||||
If you're seeing an error like `ERROR: Package 'snappy/1.1.10' not resolved: Unable to find 'snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1756234314.246' in remotes.`,
|
||||
please add `xrplf` remote or re-run `conan export` for [patched recipes](#patched-recipes).
|
||||
|
||||
### `protobuf/port_def.inc` file not found
|
||||
|
||||
@@ -573,7 +573,7 @@ you might have generated CMake files for a different `build_type` than the
|
||||
`CMAKE_BUILD_TYPE` you passed to Conan.
|
||||
|
||||
```
|
||||
/rippled/.build/pb-xrpl.libpb/xrpl/proto/ripple.pb.h:10:10: fatal error: 'google/protobuf/port_def.inc' file not found
|
||||
/xrpld/.build/pb-xrpl.libpb/xrpl/proto/xrpl.pb.h:10:10: fatal error: 'google/protobuf/port_def.inc' file not found
|
||||
10 | #include <google/protobuf/port_def.inc>
|
||||
| ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
1 error generated.
|
||||
|
||||
@@ -28,6 +28,9 @@ elseif(MSVC)
|
||||
add_compile_options(/wd4068) # Ignore unknown pragmas
|
||||
endif()
|
||||
|
||||
# Enable ccache to speed up builds.
|
||||
include(Ccache)
|
||||
|
||||
# make GIT_COMMIT_HASH define available to all sources
|
||||
find_package(Git)
|
||||
if(Git_FOUND)
|
||||
@@ -49,7 +52,7 @@ if(Git_FOUND)
|
||||
endif() #git
|
||||
|
||||
if(thread_safety_analysis)
|
||||
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS)
|
||||
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DXRPL_ENABLE_THREAD_SAFETY_ANNOTATIONS)
|
||||
add_compile_options("-stdlib=libc++")
|
||||
add_link_options("-stdlib=libc++")
|
||||
endif()
|
||||
@@ -62,9 +65,9 @@ if (target)
|
||||
message (FATAL_ERROR "The target option has been removed - use native cmake options to control build")
|
||||
endif ()
|
||||
|
||||
include(RippledSanity)
|
||||
include(RippledVersion)
|
||||
include(RippledSettings)
|
||||
include(XrplSanity)
|
||||
include(XrplVersion)
|
||||
include(XrplSettings)
|
||||
# this check has to remain in the top-level cmake
|
||||
# because of the early return statement
|
||||
if (packages_only)
|
||||
@@ -73,11 +76,11 @@ if (packages_only)
|
||||
endif()
|
||||
return ()
|
||||
endif ()
|
||||
include(RippledCompiler)
|
||||
include(RippledInterface)
|
||||
include(XrplCompiler)
|
||||
include(XrplInterface)
|
||||
|
||||
option(only_docs "Include only the docs target?" FALSE)
|
||||
include(RippledDocs)
|
||||
include(XrplDocs)
|
||||
if(only_docs)
|
||||
return()
|
||||
endif()
|
||||
@@ -89,15 +92,7 @@ find_package(OpenSSL 1.1.1 REQUIRED)
|
||||
set_target_properties(OpenSSL::SSL PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
|
||||
)
|
||||
set(SECP256K1_INSTALL TRUE)
|
||||
set(SECP256K1_BUILD_BENCHMARK FALSE)
|
||||
set(SECP256K1_BUILD_TESTS FALSE)
|
||||
set(SECP256K1_BUILD_EXHAUSTIVE_TESTS FALSE)
|
||||
set(SECP256K1_BUILD_CTIME_TESTS FALSE)
|
||||
set(SECP256K1_BUILD_EXAMPLES FALSE)
|
||||
add_subdirectory(external/secp256k1)
|
||||
add_library(secp256k1::secp256k1 ALIAS secp256k1)
|
||||
add_subdirectory(external/ed25519-donna)
|
||||
|
||||
add_subdirectory(external/antithesis-sdk)
|
||||
find_package(gRPC REQUIRED)
|
||||
find_package(lz4 REQUIRED)
|
||||
@@ -112,16 +107,19 @@ option(rocksdb "Enable RocksDB" ON)
|
||||
if(rocksdb)
|
||||
find_package(RocksDB REQUIRED)
|
||||
set_target_properties(RocksDB::rocksdb PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1
|
||||
INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1
|
||||
)
|
||||
target_link_libraries(ripple_libs INTERFACE RocksDB::rocksdb)
|
||||
target_link_libraries(xrpl_libs INTERFACE RocksDB::rocksdb)
|
||||
endif()
|
||||
|
||||
find_package(nudb REQUIRED)
|
||||
find_package(date REQUIRED)
|
||||
find_package(ed25519 REQUIRED)
|
||||
find_package(nudb REQUIRED)
|
||||
find_package(secp256k1 REQUIRED)
|
||||
find_package(wasmi REQUIRED)
|
||||
find_package(xxHash REQUIRED)
|
||||
|
||||
target_link_libraries(ripple_libs INTERFACE
|
||||
target_link_libraries(xrpl_libs INTERFACE
|
||||
ed25519::ed25519
|
||||
lz4::lz4
|
||||
OpenSSL::Crypto
|
||||
@@ -139,16 +137,16 @@ elseif(TARGET NuDB::nudb)
|
||||
else()
|
||||
message(FATAL_ERROR "unknown nudb target")
|
||||
endif()
|
||||
target_link_libraries(ripple_libs INTERFACE ${nudb})
|
||||
target_link_libraries(xrpl_libs INTERFACE ${nudb})
|
||||
|
||||
if(coverage)
|
||||
include(RippledCov)
|
||||
include(XrplCov)
|
||||
endif()
|
||||
|
||||
set(PROJECT_EXPORT_SET RippleExports)
|
||||
include(RippledCore)
|
||||
include(RippledInstall)
|
||||
include(RippledValidatorKeys)
|
||||
set(PROJECT_EXPORT_SET XrplExports)
|
||||
include(XrplCore)
|
||||
include(XrplInstall)
|
||||
include(XrplValidatorKeys)
|
||||
|
||||
if(tests)
|
||||
include(CTest)
|
||||
|
||||
@@ -24,7 +24,7 @@ your verifying key. Please set up [signature verification][signing].
|
||||
|
||||
In general, external contributions should be developed in your personal
|
||||
[fork][forking]. Contributions from developers with write permissions
|
||||
should be done in [the main repository][rippled] in a branch with
|
||||
should be done in [the main repository][xrpld] in a branch with
|
||||
a permitted prefix. Permitted prefixes are:
|
||||
|
||||
- XLS-[a-zA-Z0-9]+/.+
|
||||
@@ -47,13 +47,18 @@ choose the next available standard number, and open a discussion with an
|
||||
appropriate title to propose your draft standard.
|
||||
|
||||
When you submit a pull request, please link the corresponding XLS in the
|
||||
description. An XLS still in draft status is considered a
|
||||
description. An XLS still in `Draft` status is considered a
|
||||
work-in-progress and open for discussion. Please allow time for
|
||||
questions, suggestions, and changes to the XLS draft. It is the
|
||||
responsibility of the XLS author to update the draft to match the final
|
||||
implementation when its corresponding pull request is merged, unless the
|
||||
author delegates that responsibility to others.
|
||||
|
||||
Any amendment or major RPC change requires either a new XLS or an update
|
||||
to an existing XLS. Neither change will be released (in an amendment's
|
||||
case, marked as `Supported::yes`) until the corresponding XLS's status
|
||||
is `Final`.
|
||||
|
||||
## Before making a pull request
|
||||
|
||||
(Or marking a draft pull request as ready.)
|
||||
@@ -68,7 +73,7 @@ Ensure that your code compiles according to the build instructions in
|
||||
|
||||
Please write tests for your code.
|
||||
If your test can be run offline, in under 60 seconds, then it can be an
|
||||
automatic test run by `rippled --unittest`.
|
||||
automatic test run by `xrpld --unittest`.
|
||||
Otherwise, it must be a manual test.
|
||||
|
||||
If you create new source files, they must be organized as follows:
|
||||
@@ -81,7 +86,7 @@ If you create new source files, they must be organized as follows:
|
||||
|
||||
The source must be formatted according to the style guide below.
|
||||
|
||||
Header includes must be [levelized](./Builds/levelization).
|
||||
Header includes must be [levelized](.github/scripts/levelization).
|
||||
|
||||
Changes should be usually squashed down into a single commit.
|
||||
Some larger or more complicated change sets make more sense,
|
||||
@@ -251,13 +256,13 @@ pre-commit install
|
||||
We are using [Antithesis](https://antithesis.com/) for continuous fuzzing,
|
||||
and keep a copy of [Antithesis C++ SDK](https://github.com/antithesishq/antithesis-sdk-cpp/)
|
||||
in `external/antithesis-sdk`. One of the aims of fuzzing is to identify bugs
|
||||
by finding external conditions which cause contracts violations inside `rippled`.
|
||||
by finding external conditions which cause contracts violations inside `xrpld`.
|
||||
The contracts are expressed as `XRPL_ASSERT` or `UNREACHABLE` (defined in
|
||||
`include/xrpl/beast/utility/instrumentation.h`), which are effectively (outside
|
||||
of Antithesis) wrappers for `assert(...)` with added name. The purpose of name
|
||||
is to provide contracts with stable identity which does not rely on line numbers.
|
||||
|
||||
When `rippled` is built with the Antithesis instrumentation enabled
|
||||
When `xrpld` is built with the Antithesis instrumentation enabled
|
||||
(using `voidstar` CMake option) and ran on the Antithesis platform, the
|
||||
contracts become
|
||||
[test properties](https://antithesis.com/docs/using_antithesis/properties.html);
|
||||
@@ -299,7 +304,7 @@ For this reason:
|
||||
- Example **bad** name
|
||||
`"RFC1751::insert(char* s, int x, int start, int length) : length is greater than or equal zero"`
|
||||
(missing namespace, unnecessary full function signature, description too verbose).
|
||||
Good name: `"ripple::RFC1751::insert : minimum length"`.
|
||||
Good name: `"xrpl::RFC1751::insert : minimum length"`.
|
||||
- In **few** well-justified cases a non-standard name can be used, in which case a
|
||||
comment should be placed to explain the rationale (example in `contract.cpp`)
|
||||
- Do **not** rename a contract without a good reason (e.g. the name no longer
|
||||
@@ -313,7 +318,7 @@ For this reason:
|
||||
|
||||
To execute all unit tests:
|
||||
|
||||
`rippled --unittest --unittest-jobs=<number of cores>`
|
||||
`xrpld --unittest --unittest-jobs=<number of cores>`
|
||||
|
||||
(Note: Using multiple cores on a Mac M1 can cause spurious test failures. The
|
||||
cause is still under investigation. If you observe this problem, try specifying fewer jobs.)
|
||||
@@ -321,7 +326,7 @@ cause is still under investigation. If you observe this problem, try specifying
|
||||
To run a specific set of test suites:
|
||||
|
||||
```
|
||||
rippled --unittest TestSuiteName
|
||||
xrpld --unittest TestSuiteName
|
||||
```
|
||||
|
||||
Note: In this example, all tests with prefix `TestSuiteName` will be run, so if
|
||||
@@ -550,16 +555,16 @@ Rippled uses a linear workflow model that can be summarized as:
|
||||
git fetch --multiple upstreams user1 user2 user3 [...]
|
||||
git checkout -B release-next --no-track upstream/develop
|
||||
|
||||
# Only do an ff-only merge if prbranch1 is either already
|
||||
# Only do an ff-only merge if pr-branch1 is either already
|
||||
# squashed, or needs to be merged with separate commits,
|
||||
# and has no merge commits.
|
||||
# Use -S on the ff-only merge if prbranch1 isn't signed.
|
||||
git merge [-S] --ff-only user1/prbranch1
|
||||
# Use -S on the ff-only merge if pr-branch1 isn't signed.
|
||||
git merge [-S] --ff-only user1/pr-branch1
|
||||
|
||||
git merge --squash user2/prbranch2
|
||||
git merge --squash user2/pr-branch2
|
||||
git commit -S # Use the commit message provided on the PR
|
||||
|
||||
git merge --squash user3/prbranch3
|
||||
git merge --squash user3/pr-branch3
|
||||
git commit -S # Use the commit message provided on the PR
|
||||
|
||||
[...]
|
||||
@@ -871,7 +876,7 @@ git push --delete upstream-push master-next
|
||||
|
||||
#### Special cases: point releases, hotfixes, etc.
|
||||
|
||||
On occassion, a bug or issue is discovered in a version that already
|
||||
On occasion, a bug or issue is discovered in a version that already
|
||||
had a final release. Most of the time, development will have started
|
||||
on the next version, and will usually have changes in `develop`
|
||||
and often in `release`.
|
||||
@@ -1070,7 +1075,7 @@ git fetch upstreams
|
||||
[contrib]: https://docs.github.com/en/get-started/quickstart/contributing-to-projects
|
||||
[squash]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/incorporating-changes-from-a-pull-request/about-pull-request-merges#squash-and-merge-your-commits
|
||||
[forking]: https://github.com/XRPLF/rippled/fork
|
||||
[rippled]: https://github.com/XRPLF/rippled
|
||||
[xrpld]: https://github.com/XRPLF/rippled
|
||||
[signing]: https://docs.github.com/en/authentication/managing-commit-signature-verification/about-commit-signature-verification
|
||||
[setup-upstreams]: ./bin/git/setup-upstreams.sh
|
||||
[squash-branches]: ./bin/git/squash-branches.sh
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
ISC License
|
||||
|
||||
Copyright (c) 2011, Arthur Britto, David Schwartz, Jed McCaleb, Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant.
|
||||
Copyright (c) 2012-2020, the XRP Ledger developers.
|
||||
Copyright (c) 2012-2025, the XRP Ledger developers.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
|
||||
24
README.md
24
README.md
@@ -6,7 +6,7 @@ The [XRP Ledger](https://xrpl.org/) is a decentralized cryptographic ledger powe
|
||||
|
||||
## XRP
|
||||
|
||||
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free asset native to the XRP Ledger, and is designed to bridge the many different currencies in use worldwide. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
|
||||
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free crypto-asset native to the XRP Ledger, and is designed as a gas token for network services and to bridge different currencies. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
|
||||
|
||||
## rippled
|
||||
|
||||
@@ -23,26 +23,26 @@ If you are interested in running an **API Server** (including a **Full History S
|
||||
|
||||
- **[Censorship-Resistant Transaction Processing][]:** No single party decides which transactions succeed or fail, and no one can "roll back" a transaction after it completes. As long as those who choose to participate in the network keep it healthy, they can settle transactions in seconds.
|
||||
- **[Fast, Efficient Consensus Algorithm][]:** The XRP Ledger's consensus algorithm settles transactions in 4 to 5 seconds, processing at a throughput of up to 1500 transactions per second. These properties put XRP at least an order of magnitude ahead of other top digital assets.
|
||||
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction costs.
|
||||
- **[Responsible Software Governance][]:** A team of full-time, world-class developers at Ripple maintain and continually improve the XRP Ledger's underlying software with contributions from the open-source community. Ripple acts as a steward for the technology and an advocate for its interests, and builds constructive relationships with governments and financial institutions worldwide.
|
||||
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction fees.
|
||||
- **[Responsible Software Governance][]:** A team of full-time developers at Ripple & other organizations maintain and continually improve the XRP Ledger's underlying software with contributions from the open-source community. Ripple acts as a steward for the technology and an advocate for its interests.
|
||||
- **[Secure, Adaptable Cryptography][]:** The XRP Ledger relies on industry standard digital signature systems like ECDSA (the same scheme used by Bitcoin) but also supports modern, efficient algorithms like Ed25519. The extensible nature of the XRP Ledger's software makes it possible to add and disable algorithms as the state of the art in cryptography advances.
|
||||
- **[Modern Features for Smart Contracts][]:** Features like Escrow, Checks, and Payment Channels support cutting-edge financial applications including the [Interledger Protocol](https://interledger.org/). This toolbox of advanced features comes with safety features like a process for amending the network and separate checks against invariant constraints.
|
||||
- **[Modern Features][]:** Features like Escrow, Checks, and Payment Channels support financial applications atop of the XRP Ledger. This toolbox of advanced features comes with safety features like a process for amending the network and separate checks against invariant constraints.
|
||||
- **[On-Ledger Decentralized Exchange][]:** In addition to all the features that make XRP useful on its own, the XRP Ledger also has a fully-functional accounting system for tracking and trading obligations denominated in any way users want, and an exchange built into the protocol. The XRP Ledger can settle long, cross-currency payment paths and exchanges of multiple currencies in atomic transactions, bridging gaps of trust with XRP.
|
||||
|
||||
[Censorship-Resistant Transaction Processing]: https://xrpl.org/xrp-ledger-overview.html#censorship-resistant-transaction-processing
|
||||
[Fast, Efficient Consensus Algorithm]: https://xrpl.org/xrp-ledger-overview.html#fast-efficient-consensus-algorithm
|
||||
[Finite XRP Supply]: https://xrpl.org/xrp-ledger-overview.html#finite-xrp-supply
|
||||
[Responsible Software Governance]: https://xrpl.org/xrp-ledger-overview.html#responsible-software-governance
|
||||
[Secure, Adaptable Cryptography]: https://xrpl.org/xrp-ledger-overview.html#secure-adaptable-cryptography
|
||||
[Modern Features for Smart Contracts]: https://xrpl.org/xrp-ledger-overview.html#modern-features-for-smart-contracts
|
||||
[On-Ledger Decentralized Exchange]: https://xrpl.org/xrp-ledger-overview.html#on-ledger-decentralized-exchange
|
||||
[Censorship-Resistant Transaction Processing]: https://xrpl.org/transaction-censorship-detection.html#transaction-censorship-detection
|
||||
[Fast, Efficient Consensus Algorithm]: https://xrpl.org/consensus-research.html#consensus-research
|
||||
[Finite XRP Supply]: https://xrpl.org/what-is-xrp.html
|
||||
[Responsible Software Governance]: https://xrpl.org/contribute-code.html#contribute-code-to-the-xrp-ledger
|
||||
[Secure, Adaptable Cryptography]: https://xrpl.org/cryptographic-keys.html#cryptographic-keys
|
||||
[Modern Features]: https://xrpl.org/use-specialized-payment-types.html
|
||||
[On-Ledger Decentralized Exchange]: https://xrpl.org/decentralized-exchange.html#decentralized-exchange
|
||||
|
||||
## Source Code
|
||||
|
||||
Here are some good places to start learning the source code:
|
||||
|
||||
- Read the markdown files in the source tree: `src/ripple/**/*.md`.
|
||||
- Read [the levelization document](./Builds/levelization) to get an idea of the internal dependency graph.
|
||||
- Read [the levelization document](.github/scripts/levelization) to get an idea of the internal dependency graph.
|
||||
- In the big picture, the `main` function constructs an `ApplicationImp` object, which implements the `Application` virtual interface. Almost every component in the application takes an `Application&` parameter in its constructor, typically named `app` and stored as a member variable `app_`. This allows most components to depend on any other component.
|
||||
|
||||
### Repository Contents
|
||||
|
||||
@@ -42,7 +42,7 @@ For more information on responsible disclosure, please read this [Wikipedia arti
|
||||
|
||||
## Report Handling Process
|
||||
|
||||
Please report the bug directly to us and limit further disclosure. If you want to prove that you knew the bug as of a given time, consider using a cryptographic precommitment: hash the content of your report and publish the hash on a medium of your choice (e.g. on Twitter or as a memo in a transaction) as "proof" that you had written the text at a given point in time.
|
||||
Please report the bug directly to us and limit further disclosure. If you want to prove that you knew the bug as of a given time, consider using a cryptographic pre-commitment: hash the content of your report and publish the hash on a medium of your choice (e.g. on Twitter or as a memo in a transaction) as "proof" that you had written the text at a given point in time.
|
||||
|
||||
Once we receive a report, we:
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ then
|
||||
name=$( basename $0 )
|
||||
cat <<- USAGE
|
||||
Usage: $name <username>
|
||||
|
||||
|
||||
Where <username> is the Github username of the upstream repo. e.g. XRPLF
|
||||
USAGE
|
||||
exit 0
|
||||
@@ -83,4 +83,3 @@ fi
|
||||
_run git fetch --jobs=$(nproc) upstreams
|
||||
|
||||
exit 0
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ then
|
||||
name=$( basename $0 )
|
||||
cat <<- USAGE
|
||||
Usage: $name workbranch base/branch user/branch [user/branch [...]]
|
||||
|
||||
|
||||
* workbranch will be created locally from base/branch
|
||||
* base/branch and user/branch may be specified as user:branch to allow
|
||||
easy copying from Github PRs
|
||||
@@ -66,4 +66,3 @@ git push $push HEAD:$b
|
||||
git fetch $repo
|
||||
-------------------------------------------------------------------
|
||||
PUSH
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# Default validators.txt
|
||||
#
|
||||
# This file is located in the same folder as your rippled.cfg file
|
||||
# This file is located in the same folder as your xrpld.cfg file
|
||||
# and defines which validators your server trusts not to collude.
|
||||
#
|
||||
# This file is UTF-8 with DOS, UNIX, or Mac style line endings.
|
||||
|
||||
@@ -29,18 +29,18 @@
|
||||
#
|
||||
# Purpose
|
||||
#
|
||||
# This file documents and provides examples of all rippled server process
|
||||
# configuration options. When the rippled server instance is launched, it
|
||||
# This file documents and provides examples of all xrpld server process
|
||||
# configuration options. When the xrpld server instance is launched, it
|
||||
# looks for a file with the following name:
|
||||
#
|
||||
# rippled.cfg
|
||||
# xrpld.cfg
|
||||
#
|
||||
# For more information on where the rippled server instance searches for the
|
||||
# For more information on where the xrpld server instance searches for the
|
||||
# file, visit:
|
||||
#
|
||||
# https://xrpl.org/commandline-usage.html#generic-options
|
||||
#
|
||||
# This file should be named rippled.cfg. This file is UTF-8 with DOS, UNIX,
|
||||
# This file should be named xrpld.cfg. This file is UTF-8 with DOS, UNIX,
|
||||
# or Mac style end of lines. Blank lines and lines beginning with '#' are
|
||||
# ignored. Undefined sections are reserved. No escapes are currently defined.
|
||||
#
|
||||
@@ -89,8 +89,8 @@
|
||||
#
|
||||
#
|
||||
#
|
||||
# rippled offers various server protocols to clients making inbound
|
||||
# connections. The listening ports rippled uses are "universal" ports
|
||||
# xrpld offers various server protocols to clients making inbound
|
||||
# connections. The listening ports xrpld uses are "universal" ports
|
||||
# which may be configured to handshake in one or more of the available
|
||||
# supported protocols. These universal ports simplify administration:
|
||||
# A single open port can be used for multiple protocols.
|
||||
@@ -103,7 +103,7 @@
|
||||
#
|
||||
# A list of port names and key/value pairs. A port name must start with a
|
||||
# letter and contain only letters and numbers. The name is not case-sensitive.
|
||||
# For each name in this list, rippled will look for a configuration file
|
||||
# For each name in this list, xrpld will look for a configuration file
|
||||
# section with the same name and use it to create a listening port. The
|
||||
# name is informational only; the choice of name does not affect the function
|
||||
# of the listening port.
|
||||
@@ -134,7 +134,7 @@
|
||||
# ip = 127.0.0.1
|
||||
# protocol = http
|
||||
#
|
||||
# When rippled is used as a command line client (for example, issuing a
|
||||
# When xrpld is used as a command line client (for example, issuing a
|
||||
# server stop command), the first port advertising the http or https
|
||||
# protocol will be used to make the connection.
|
||||
#
|
||||
@@ -175,7 +175,7 @@
|
||||
# same time. It is possible have both Websockets and Secure Websockets
|
||||
# together in one port.
|
||||
#
|
||||
# NOTE If no ports support the peer protocol, rippled cannot
|
||||
# NOTE If no ports support the peer protocol, xrpld cannot
|
||||
# receive incoming peer connections or become a superpeer.
|
||||
#
|
||||
# limit = <number>
|
||||
@@ -194,7 +194,7 @@
|
||||
# required. IP address restrictions, if any, will be checked in addition
|
||||
# to the credentials specified here.
|
||||
#
|
||||
# When acting in the client role, rippled will supply these credentials
|
||||
# When acting in the client role, xrpld will supply these credentials
|
||||
# using HTTP's Basic Authentication headers when making outbound HTTP/S
|
||||
# requests.
|
||||
#
|
||||
@@ -218,7 +218,7 @@
|
||||
# administrative commands.
|
||||
#
|
||||
# NOTE A common configuration value for the admin field is "localhost".
|
||||
# If you are listening on all IPv4/IPv6 addresses by specifing
|
||||
# If you are listening on all IPv4/IPv6 addresses by specifying
|
||||
# ip = :: then you can use admin = ::ffff:127.0.0.1,::1 to allow
|
||||
# administrative access from both IPv4 and IPv6 localhost
|
||||
# connections.
|
||||
@@ -237,7 +237,7 @@
|
||||
# WS, or WSS protocol interfaces. If administrative commands are
|
||||
# disabled for a port, these credentials have no effect.
|
||||
#
|
||||
# When acting in the client role, rippled will supply these credentials
|
||||
# When acting in the client role, xrpld will supply these credentials
|
||||
# in the submitted JSON for any administrative command requests when
|
||||
# invoking JSON-RPC commands on remote servers.
|
||||
#
|
||||
@@ -258,7 +258,7 @@
|
||||
# resource controls will default to those for non-administrative users.
|
||||
#
|
||||
# The secure_gateway IP addresses are intended to represent
|
||||
# proxies. Since rippled trusts these hosts, they must be
|
||||
# proxies. Since xrpld trusts these hosts, they must be
|
||||
# responsible for properly authenticating the remote user.
|
||||
#
|
||||
# If some IP addresses are included for both "admin" and
|
||||
@@ -272,7 +272,7 @@
|
||||
# Use the specified files when configuring SSL on the port.
|
||||
#
|
||||
# NOTE If no files are specified and secure protocols are selected,
|
||||
# rippled will generate an internal self-signed certificate.
|
||||
# xrpld will generate an internal self-signed certificate.
|
||||
#
|
||||
# The files have these meanings:
|
||||
#
|
||||
@@ -297,12 +297,12 @@
|
||||
# Control the ciphers which the server will support over SSL on the port,
|
||||
# specified using the OpenSSL "cipher list format".
|
||||
#
|
||||
# NOTE If unspecified, rippled will automatically configure a modern
|
||||
# NOTE If unspecified, xrpld will automatically configure a modern
|
||||
# cipher suite. This default suite should be widely supported.
|
||||
#
|
||||
# You should not modify this string unless you have a specific
|
||||
# reason and cryptographic expertise. Incorrect modification may
|
||||
# keep rippled from connecting to other instances of rippled or
|
||||
# keep xrpld from connecting to other instances of xrpld or
|
||||
# prevent RPC and WebSocket clients from connecting.
|
||||
#
|
||||
# send_queue_limit = [1..65535]
|
||||
@@ -382,7 +382,7 @@
|
||||
#-----------------
|
||||
#
|
||||
# These settings control security and access attributes of the Peer to Peer
|
||||
# server section of the rippled process. Peer Protocol implements the
|
||||
# server section of the xrpld process. Peer Protocol implements the
|
||||
# Ripple Payment protocol. It is over peer connections that transactions
|
||||
# and validations are passed from to machine to machine, to determine the
|
||||
# contents of validated ledgers.
|
||||
@@ -396,8 +396,8 @@
|
||||
# true - enables compression
|
||||
# false - disables compression [default].
|
||||
#
|
||||
# The rippled server can save bandwidth by compressing its peer-to-peer communications,
|
||||
# at a cost of greater CPU usage. If you enable link compression,
|
||||
# The xrpld server can save bandwidth by compressing its peer-to-peer communications,
|
||||
# at a cost of greater CPU usage. If you enable link compression,
|
||||
# the server automatically compresses communications with peer servers
|
||||
# that also have link compression enabled.
|
||||
# https://xrpl.org/enable-link-compression.html
|
||||
@@ -432,7 +432,7 @@
|
||||
#
|
||||
# [ips_fixed]
|
||||
#
|
||||
# List of IP addresses or hostnames to which rippled should always attempt to
|
||||
# List of IP addresses or hostnames to which xrpld should always attempt to
|
||||
# maintain peer connections with. This is useful for manually forming private
|
||||
# networks, for example to configure a validation server that connects to the
|
||||
# Ripple network through a public-facing server, or for building a set
|
||||
@@ -573,7 +573,7 @@
|
||||
#
|
||||
# minimum_txn_in_ledger_standalone = <number>
|
||||
#
|
||||
# Like minimum_txn_in_ledger when rippled is running in standalone
|
||||
# Like minimum_txn_in_ledger when xrpld is running in standalone
|
||||
# mode. Default: 1000.
|
||||
#
|
||||
# target_txn_in_ledger = <number>
|
||||
@@ -710,7 +710,7 @@
|
||||
#
|
||||
# [validator_token]
|
||||
#
|
||||
# This is an alternative to [validation_seed] that allows rippled to perform
|
||||
# This is an alternative to [validation_seed] that allows xrpld to perform
|
||||
# validation without having to store the validator keys on the network
|
||||
# connected server. The field should contain a single token in the form of a
|
||||
# base64-encoded blob.
|
||||
@@ -745,7 +745,7 @@
|
||||
#
|
||||
# Specify the file by its name or path.
|
||||
# Unless an absolute path is specified, it will be considered relative to
|
||||
# the folder in which the rippled.cfg file is located.
|
||||
# the folder in which the xrpld.cfg file is located.
|
||||
#
|
||||
# Examples:
|
||||
# /home/ripple/validators.txt
|
||||
@@ -840,7 +840,7 @@
|
||||
#
|
||||
# 0: Disable the ledger replay feature [default]
|
||||
# 1: Enable the ledger replay feature. With this feature enabled, when
|
||||
# acquiring a ledger from the network, a rippled node only downloads
|
||||
# acquiring a ledger from the network, a xrpld node only downloads
|
||||
# the ledger header and the transactions instead of the whole ledger.
|
||||
# And the ledger is built by applying the transactions to the parent
|
||||
# ledger.
|
||||
@@ -851,7 +851,7 @@
|
||||
#
|
||||
#----------------
|
||||
#
|
||||
# The rippled server instance uses HTTPS GET requests in a variety of
|
||||
# The xrpld server instance uses HTTPS GET requests in a variety of
|
||||
# circumstances, including but not limited to contacting trusted domains to
|
||||
# fetch information such as mapping an email address to a Ripple Payment
|
||||
# Network address.
|
||||
@@ -891,7 +891,7 @@
|
||||
#
|
||||
#------------
|
||||
#
|
||||
# rippled creates 4 SQLite database to hold bookkeeping information
|
||||
# xrpld creates 4 SQLite database to hold bookkeeping information
|
||||
# about transactions, local credentials, and various other things.
|
||||
# It also creates the NodeDB, which holds all the objects that
|
||||
# make up the current and historical ledgers.
|
||||
@@ -902,7 +902,7 @@
|
||||
# the performance of the server.
|
||||
#
|
||||
# Partial pathnames will be considered relative to the location of
|
||||
# the rippled.cfg file.
|
||||
# the xrpld.cfg file.
|
||||
#
|
||||
# [node_db] Settings for the Node Database (required)
|
||||
#
|
||||
@@ -920,11 +920,11 @@
|
||||
# type = NuDB
|
||||
#
|
||||
# NuDB is a high-performance database written by Ripple Labs and optimized
|
||||
# for rippled and solid-state drives.
|
||||
# for xrpld and solid-state drives.
|
||||
#
|
||||
# NuDB maintains its high speed regardless of the amount of history
|
||||
# stored. Online delete may be selected, but is not required. NuDB is
|
||||
# available on all platforms that rippled runs on.
|
||||
# available on all platforms that xrpld runs on.
|
||||
#
|
||||
# type = RocksDB
|
||||
#
|
||||
@@ -975,6 +975,47 @@
|
||||
# number of ledger records online. Must be greater
|
||||
# than or equal to ledger_history.
|
||||
#
|
||||
# Optional keys for NuDB only:
|
||||
#
|
||||
# nudb_block_size EXPERIMENTAL: Block size in bytes for NuDB storage.
|
||||
# Must be a power of 2 between 4096 and 32768. Default is 4096.
|
||||
#
|
||||
# This parameter controls the fundamental storage unit
|
||||
# size for NuDB's internal data structures. The choice
|
||||
# of block size can significantly impact performance
|
||||
# depending on your storage hardware and filesystem:
|
||||
#
|
||||
# - 4096 bytes: Optimal for most standard SSDs and
|
||||
# traditional filesystems (ext4, NTFS, HFS+).
|
||||
# Provides good balance of performance and storage
|
||||
# efficiency. Recommended for most deployments.
|
||||
# Minimizes memory footprint and provides consistent
|
||||
# low-latency access patterns across diverse hardware.
|
||||
#
|
||||
# - 8192-16384 bytes: May improve performance on
|
||||
# high-end NVMe SSDs and copy-on-write filesystems
|
||||
# like ZFS or Btrfs that benefit from larger block
|
||||
# alignment. Can reduce metadata overhead for large
|
||||
# databases. Offers better sequential throughput and
|
||||
# reduced I/O operations at the cost of higher memory
|
||||
# usage per operation.
|
||||
#
|
||||
# - 32768 bytes (32K): Maximum supported block size
|
||||
# for high-performance scenarios with very fast
|
||||
# storage. May increase memory usage and reduce
|
||||
# efficiency for smaller databases. Best suited for
|
||||
# enterprise environments with abundant RAM.
|
||||
#
|
||||
# Performance testing is recommended before deploying
|
||||
# any non-default block size in production environments.
|
||||
#
|
||||
# Note: This setting cannot be changed after database
|
||||
# creation without rebuilding the entire database.
|
||||
# Choose carefully based on your hardware and expected
|
||||
# database size.
|
||||
#
|
||||
# Example: nudb_block_size=4096
|
||||
#
|
||||
# These keys modify the behavior of online_delete, and thus are only
|
||||
# relevant if online_delete is defined and non-zero:
|
||||
#
|
||||
@@ -1008,10 +1049,10 @@
|
||||
#
|
||||
# recovery_wait_seconds
|
||||
# The online delete process checks periodically
|
||||
# that rippled is still in sync with the network,
|
||||
# that xrpld is still in sync with the network,
|
||||
# and that the validated ledger is less than
|
||||
# 'age_threshold_seconds' old. If not, then continue
|
||||
# sleeping for this number of seconds and
|
||||
# sleeping for this number of seconds and
|
||||
# checking until healthy.
|
||||
# Default is 5.
|
||||
#
|
||||
@@ -1028,8 +1069,8 @@
|
||||
# The server creates and maintains 4 to 5 bookkeeping SQLite databases in
|
||||
# the 'database_path' location. If you omit this configuration setting,
|
||||
# the server creates a directory called "db" located in the same place as
|
||||
# your rippled.cfg file.
|
||||
# Partial pathnames are relative to the location of the rippled executable.
|
||||
# your xrpld.cfg file.
|
||||
# Partial pathnames are relative to the location of the xrpld executable.
|
||||
#
|
||||
# [sqlite] Tuning settings for the SQLite databases (optional)
|
||||
#
|
||||
@@ -1079,7 +1120,7 @@
|
||||
# The default is "wal", which uses a write-ahead
|
||||
# log to implement database transactions.
|
||||
# Alternately, "memory" saves disk I/O, but if
|
||||
# rippled crashes during a transaction, the
|
||||
# xrpld crashes during a transaction, the
|
||||
# database is likely to be corrupted.
|
||||
# See https://www.sqlite.org/pragma.html#pragma_journal_mode
|
||||
# for more details about the available options.
|
||||
@@ -1089,7 +1130,7 @@
|
||||
# synchronous Valid values: off, normal, full, extra
|
||||
# The default is "normal", which works well with
|
||||
# the "wal" journal mode. Alternatively, "off"
|
||||
# allows rippled to continue as soon as data is
|
||||
# allows xrpld to continue as soon as data is
|
||||
# passed to the OS, which can significantly
|
||||
# increase speed, but risks data corruption if
|
||||
# the host computer crashes before writing that
|
||||
@@ -1103,7 +1144,7 @@
|
||||
# The default is "file", which will use files
|
||||
# for temporary database tables and indices.
|
||||
# Alternatively, "memory" may save I/O, but
|
||||
# rippled does not currently use many, if any,
|
||||
# xrpld does not currently use many, if any,
|
||||
# of these temporary objects.
|
||||
# See https://www.sqlite.org/pragma.html#pragma_temp_store
|
||||
# for more details about the available options.
|
||||
@@ -1113,7 +1154,7 @@
|
||||
# page_size Valid values: integer (MUST be power of 2 between 512 and 65536)
|
||||
# The default is 4096 bytes. This setting determines
|
||||
# the size of a page in the transaction.db file.
|
||||
# See https://www.sqlite.org/pragma.html#pragma_page_size
|
||||
# See https://www.sqlite.org/pragma.html#pragma_page_size
|
||||
# for more details about the available options.
|
||||
#
|
||||
# journal_size_limit Valid values: integer
|
||||
@@ -1132,7 +1173,7 @@
|
||||
#
|
||||
# These settings are designed to help server administrators diagnose
|
||||
# problems, and obtain detailed information about the activities being
|
||||
# performed by the rippled process.
|
||||
# performed by the xrpld process.
|
||||
#
|
||||
#
|
||||
#
|
||||
@@ -1149,7 +1190,7 @@
|
||||
#
|
||||
# Configuration parameters for the Beast. Insight stats collection module.
|
||||
#
|
||||
# Insight is a module that collects information from the areas of rippled
|
||||
# Insight is a module that collects information from the areas of xrpld
|
||||
# that have instrumentation. The configuration parameters control where the
|
||||
# collection metrics are sent. The parameters are expressed as key = value
|
||||
# pairs with no white space. The main parameter is the choice of server:
|
||||
@@ -1158,7 +1199,7 @@
|
||||
#
|
||||
# Choice of server to send metrics to. Currently the only choice is
|
||||
# "statsd" which sends UDP packets to a StatsD daemon, which must be
|
||||
# running while rippled is running. More information on StatsD is
|
||||
# running while xrpld is running. More information on StatsD is
|
||||
# available here:
|
||||
# https://github.com/b/statsd_spec
|
||||
#
|
||||
@@ -1168,7 +1209,7 @@
|
||||
# in the format, n.n.n.n:port.
|
||||
#
|
||||
# "prefix" A string prepended to each collected metric. This is used
|
||||
# to distinguish between different running instances of rippled.
|
||||
# to distinguish between different running instances of xrpld.
|
||||
#
|
||||
# If this section is missing, or the server type is unspecified or unknown,
|
||||
# statistics are not collected or reported.
|
||||
@@ -1195,7 +1236,7 @@
|
||||
#
|
||||
# Example:
|
||||
# [perf]
|
||||
# perf_log=/var/log/rippled/perf.log
|
||||
# perf_log=/var/log/xrpld/perf.log
|
||||
# log_interval=2
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
@@ -1205,7 +1246,7 @@
|
||||
#----------
|
||||
#
|
||||
# The vote settings configure settings for the entire Ripple network.
|
||||
# While a single instance of rippled cannot unilaterally enforce network-wide
|
||||
# While a single instance of xrpld cannot unilaterally enforce network-wide
|
||||
# settings, these choices become part of the instance's vote during the
|
||||
# consensus process for each voting ledger.
|
||||
#
|
||||
@@ -1219,7 +1260,7 @@
|
||||
# The reference transaction is the simplest form of transaction.
|
||||
# It represents an XRP payment between two parties.
|
||||
#
|
||||
# If this parameter is unspecified, rippled will use an internal
|
||||
# If this parameter is unspecified, xrpld will use an internal
|
||||
# default. Don't change this without understanding the consequences.
|
||||
#
|
||||
# Example:
|
||||
@@ -1231,7 +1272,7 @@
|
||||
# account's XRP balance that is at or below the reserve may only be
|
||||
# spent on transaction fees, and not transferred out of the account.
|
||||
#
|
||||
# If this parameter is unspecified, rippled will use an internal
|
||||
# If this parameter is unspecified, xrpld will use an internal
|
||||
# default. Don't change this without understanding the consequences.
|
||||
#
|
||||
# Example:
|
||||
@@ -1243,12 +1284,45 @@
|
||||
# each ledger item owned by the account. Ledger items an account may
|
||||
# own include trust lines, open orders, and tickets.
|
||||
#
|
||||
# If this parameter is unspecified, rippled will use an internal
|
||||
# If this parameter is unspecified, xrpld will use an internal
|
||||
# default. Don't change this without understanding the consequences.
|
||||
#
|
||||
# Example:
|
||||
# owner_reserve = 2000000 # 2 XRP
|
||||
#
|
||||
# extension_compute_limit = <gas>
|
||||
#
|
||||
# The extension compute limit is the maximum amount of gas that can be
|
||||
# consumed by a single transaction. The gas limit is used to prevent
|
||||
# transactions from consuming too many resources.
|
||||
#
|
||||
# If this parameter is unspecified, xrpld will use an internal
|
||||
# default. Don't change this without understanding the consequences.
|
||||
#
|
||||
# Example:
|
||||
# extension_compute_limit = 1000000 # 1 million gas
|
||||
#
|
||||
# extension_size_limit = <bytes>
|
||||
#
|
||||
# The extension size limit is the maximum size of a WASM extension in
|
||||
# bytes. The size limit is used to prevent extensions from consuming
|
||||
# too many resources.
|
||||
#
|
||||
# If this parameter is unspecified, xrpld will use an internal
|
||||
# default. Don't change this without understanding the consequences.
|
||||
#
|
||||
# Example:
|
||||
# extension_size_limit = 100000 # 100 kb
|
||||
#
|
||||
# gas_price = <bytes>
|
||||
#
|
||||
# The gas price is the conversion between WASM gas and its price in drops.
|
||||
#
|
||||
# If this parameter is unspecified, xrpld will use an internal
|
||||
# default. Don't change this without understanding the consequences.
|
||||
#
|
||||
# Example:
|
||||
# gas_price = 1000000 # 1 drop per gas
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
# 9. Misc Settings
|
||||
@@ -1285,7 +1359,7 @@
|
||||
# tool instead.
|
||||
#
|
||||
# This flag has no effect on the "sign" and "sign_for" command line options
|
||||
# that rippled makes available.
|
||||
# that xrpld makes available.
|
||||
#
|
||||
# The default value of this field is "false"
|
||||
#
|
||||
@@ -1364,7 +1438,7 @@
|
||||
#--------------------
|
||||
#
|
||||
# Administrators can use these values as a starting point for configuring
|
||||
# their instance of rippled, but each value should be checked to make sure
|
||||
# their instance of xrpld, but each value should be checked to make sure
|
||||
# it meets the business requirements for the organization.
|
||||
#
|
||||
# Server
|
||||
@@ -1374,7 +1448,7 @@
|
||||
# "peer"
|
||||
#
|
||||
# Peer protocol open to everyone. This is required to accept
|
||||
# incoming rippled connections. This does not affect automatic
|
||||
# incoming xrpld connections. This does not affect automatic
|
||||
# or manual outgoing Peer protocol connections.
|
||||
#
|
||||
# "rpc"
|
||||
@@ -1391,7 +1465,7 @@
|
||||
#
|
||||
# ETL commands for Clio. We recommend setting secure_gateway
|
||||
# in this section to a comma-separated list of the addresses
|
||||
# of your Clio servers, in order to bypass rippled's rate limiting.
|
||||
# of your Clio servers, in order to bypass xrpld's rate limiting.
|
||||
#
|
||||
# This port is commented out but can be enabled by removing
|
||||
# the '#' from each corresponding line including the entry under [server]
|
||||
@@ -1408,8 +1482,8 @@
|
||||
# NOTE
|
||||
#
|
||||
# To accept connections on well known ports such as 80 (HTTP) or
|
||||
# 443 (HTTPS), most operating systems will require rippled to
|
||||
# run with administrator privileges, or else rippled will not start.
|
||||
# 443 (HTTPS), most operating systems will require xrpld to
|
||||
# run with administrator privileges, or else xrpld will not start.
|
||||
|
||||
[server]
|
||||
port_rpc_admin_local
|
||||
@@ -1455,7 +1529,7 @@ secure_gateway = 127.0.0.1
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
# This is primary persistent datastore for rippled. This includes transaction
|
||||
# This is primary persistent datastore for xrpld. This includes transaction
|
||||
# metadata, account states, and ledger headers. Helpful information can be
|
||||
# found at https://xrpl.org/capacity-planning.html#node-db-type
|
||||
# type=NuDB is recommended for non-validators with fast SSDs. Validators or
|
||||
@@ -1470,18 +1544,19 @@ secure_gateway = 127.0.0.1
|
||||
# deletion.
|
||||
[node_db]
|
||||
type=NuDB
|
||||
path=/var/lib/rippled/db/nudb
|
||||
path=/var/lib/xrpld/db/nudb
|
||||
nudb_block_size=4096
|
||||
online_delete=512
|
||||
advisory_delete=0
|
||||
|
||||
[database_path]
|
||||
/var/lib/rippled/db
|
||||
/var/lib/xrpld/db
|
||||
|
||||
|
||||
# This needs to be an absolute directory reference, not a relative one.
|
||||
# Modify this value as required.
|
||||
[debug_logfile]
|
||||
/var/log/rippled/debug.log
|
||||
/var/log/xrpld/debug.log
|
||||
|
||||
# To use the XRP test network
|
||||
# (see https://xrpl.org/connect-your-rippled-to-the-xrp-test-net.html),
|
||||
@@ -1491,7 +1566,7 @@ advisory_delete=0
|
||||
|
||||
# File containing trusted validator keys or validator list publishers.
|
||||
# Unless an absolute path is specified, it will be considered relative to the
|
||||
# folder in which the rippled.cfg file is located.
|
||||
# folder in which the xrpld.cfg file is located.
|
||||
[validators_file]
|
||||
validators.txt
|
||||
|
||||
19
clang_format.sh
Executable file
19
clang_format.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/bin/bash
|
||||
# cspell: ignore clangf
|
||||
modified=$1
|
||||
dir=`pwd`
|
||||
clangf=clang-format-10
|
||||
clangf=clang-format
|
||||
if [ "$1" = "--all" ]
|
||||
then
|
||||
modified=`git status|egrep "modified|new file"|egrep "(cpp|h)$" | sed -E -e 's/modified://' -e 's/new file://' -e 's/^[[:space:]]+//'`
|
||||
fi
|
||||
for i in $modified
|
||||
do
|
||||
basedir=$(dirname "$i")
|
||||
file=$(basename "$i")
|
||||
echo "$basedir $file"
|
||||
cd $basedir
|
||||
$clangf -style=file -i "$file"
|
||||
cd $dir
|
||||
done
|
||||
@@ -1,21 +1,3 @@
|
||||
macro(group_sources_in source_dir curdir)
|
||||
file(GLOB children RELATIVE ${source_dir}/${curdir}
|
||||
${source_dir}/${curdir}/*)
|
||||
foreach (child ${children})
|
||||
if (IS_DIRECTORY ${source_dir}/${curdir}/${child})
|
||||
group_sources_in(${source_dir} ${curdir}/${child})
|
||||
else()
|
||||
string(REPLACE "/" "\\" groupname ${curdir})
|
||||
source_group(${groupname} FILES
|
||||
${source_dir}/${curdir}/${child})
|
||||
endif()
|
||||
endforeach()
|
||||
endmacro()
|
||||
|
||||
macro(group_sources curdir)
|
||||
group_sources_in(${PROJECT_SOURCE_DIR} ${curdir})
|
||||
endmacro()
|
||||
|
||||
macro (exclude_from_default target_)
|
||||
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_ALL ON)
|
||||
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON)
|
||||
|
||||
51
cmake/Ccache.cmake
Normal file
51
cmake/Ccache.cmake
Normal file
@@ -0,0 +1,51 @@
|
||||
find_program(CCACHE_PATH "ccache")
|
||||
if (NOT CCACHE_PATH)
|
||||
return()
|
||||
endif ()
|
||||
|
||||
# For Linux and macOS we can use the ccache binary directly.
|
||||
if (NOT MSVC)
|
||||
set(CMAKE_C_COMPILER_LAUNCHER "${CCACHE_PATH}")
|
||||
set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_PATH}")
|
||||
message(STATUS "Found ccache: ${CCACHE_PATH}")
|
||||
return()
|
||||
endif ()
|
||||
|
||||
# For Windows more effort is required. The code below is a modified version of
|
||||
# https://github.com/ccache/ccache/wiki/MS-Visual-Studio#usage-with-cmake.
|
||||
if ("${CCACHE_PATH}" MATCHES "chocolatey")
|
||||
message(DEBUG "Ccache path: ${CCACHE_PATH}")
|
||||
# Chocolatey uses a shim executable that we cannot use directly, in which
|
||||
# case we have to find the executable it points to. If we cannot find the
|
||||
# target executable then we cannot use ccache.
|
||||
find_program(BASH_PATH "bash")
|
||||
if (NOT BASH_PATH)
|
||||
message(WARNING "Could not find bash.")
|
||||
return()
|
||||
endif ()
|
||||
|
||||
execute_process(
|
||||
COMMAND bash -c "export LC_ALL='en_US.UTF-8'; ${CCACHE_PATH} --shimgen-noop | grep -oP 'path to executable: \\K.+' | head -c -1"
|
||||
OUTPUT_VARIABLE CCACHE_PATH)
|
||||
|
||||
if (NOT CCACHE_PATH)
|
||||
message(WARNING "Could not find ccache target.")
|
||||
return()
|
||||
endif ()
|
||||
file(TO_CMAKE_PATH "${CCACHE_PATH}" CCACHE_PATH)
|
||||
endif ()
|
||||
message(STATUS "Found ccache: ${CCACHE_PATH}")
|
||||
|
||||
# Tell cmake to use ccache for compiling with Visual Studio.
|
||||
file(COPY_FILE
|
||||
${CCACHE_PATH} ${CMAKE_BINARY_DIR}/cl.exe
|
||||
ONLY_IF_DIFFERENT)
|
||||
set(CMAKE_VS_GLOBALS
|
||||
"CLToolExe=cl.exe"
|
||||
"CLToolPath=${CMAKE_BINARY_DIR}"
|
||||
"TrackFileAccess=false"
|
||||
"UseMultiToolTask=true")
|
||||
|
||||
# By default Visual Studio generators will use /Zi, which is not compatible with
|
||||
# ccache, so tell it to use /Z7 instead.
|
||||
set(CMAKE_MSVC_DEBUG_INFORMATION_FORMAT "$<$<CONFIG:Debug,RelWithDebInfo>:Embedded>")
|
||||
@@ -101,6 +101,17 @@
|
||||
# 2025-05-12, Jingchen Wu
|
||||
# - add -fprofile-update=atomic to ensure atomic profile generation
|
||||
#
|
||||
# 2025-08-28, Bronek Kozicki
|
||||
# - fix "At least one COMMAND must be given" CMake warning from policy CMP0175
|
||||
#
|
||||
# 2025-09-03, Jingchen Wu
|
||||
# - remove the unused function append_coverage_compiler_flags and append_coverage_compiler_flags_to_target
|
||||
# - add a new function add_code_coverage_to_target
|
||||
# - remove some unused code
|
||||
#
|
||||
# 2025-11-11, Bronek Kozicki
|
||||
# - make EXECUTABLE and EXECUTABLE_ARGS optional
|
||||
#
|
||||
# USAGE:
|
||||
#
|
||||
# 1. Copy this file into your cmake modules path.
|
||||
@@ -109,10 +120,8 @@
|
||||
# using a CMake option() to enable it just optionally):
|
||||
# include(CodeCoverage)
|
||||
#
|
||||
# 3. Append necessary compiler flags for all supported source files:
|
||||
# append_coverage_compiler_flags()
|
||||
# Or for specific target:
|
||||
# append_coverage_compiler_flags_to_target(YOUR_TARGET_NAME)
|
||||
# 3. Append necessary compiler flags and linker flags for all supported source files:
|
||||
# add_code_coverage_to_target(<target> <PRIVATE|PUBLIC|INTERFACE>)
|
||||
#
|
||||
# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og
|
||||
#
|
||||
@@ -201,67 +210,69 @@ endforeach()
|
||||
|
||||
set(COVERAGE_COMPILER_FLAGS "-g --coverage"
|
||||
CACHE INTERNAL "")
|
||||
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "")
|
||||
set(COVERAGE_C_COMPILER_FLAGS "")
|
||||
set(COVERAGE_CXX_LINKER_FLAGS "")
|
||||
set(COVERAGE_C_LINKER_FLAGS "")
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
|
||||
include(CheckCXXCompilerFlag)
|
||||
include(CheckCCompilerFlag)
|
||||
include(CheckLinkerFlag)
|
||||
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||
set(COVERAGE_C_COMPILER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||
set(COVERAGE_CXX_LINKER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||
set(COVERAGE_C_LINKER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||
|
||||
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
|
||||
if(HAVE_cxx_fprofile_abs_path)
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
|
||||
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
|
||||
if(HAVE_c_fprofile_abs_path)
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
|
||||
check_cxx_compiler_flag(-fprofile-update HAVE_cxx_fprofile_update)
|
||||
check_linker_flag(CXX -fprofile-abs-path HAVE_cxx_linker_fprofile_abs_path)
|
||||
if(HAVE_cxx_linker_fprofile_abs_path)
|
||||
set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
|
||||
check_linker_flag(C -fprofile-abs-path HAVE_c_linker_fprofile_abs_path)
|
||||
if(HAVE_c_linker_fprofile_abs_path)
|
||||
set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
|
||||
check_cxx_compiler_flag(-fprofile-update=atomic HAVE_cxx_fprofile_update)
|
||||
if(HAVE_cxx_fprofile_update)
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||
endif()
|
||||
|
||||
check_c_compiler_flag(-fprofile-update HAVE_c_fprofile_update)
|
||||
check_c_compiler_flag(-fprofile-update=atomic HAVE_c_fprofile_update)
|
||||
if(HAVE_c_fprofile_update)
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(CMAKE_Fortran_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the Fortran compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_CXX_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the C++ compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_C_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the C compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
||||
""
|
||||
CACHE STRING "Flags used for linking binaries during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_SHARED_LINKER_FLAGS_COVERAGE
|
||||
""
|
||||
CACHE STRING "Flags used by the shared libraries linker during coverage builds."
|
||||
FORCE )
|
||||
mark_as_advanced(
|
||||
CMAKE_Fortran_FLAGS_COVERAGE
|
||||
CMAKE_CXX_FLAGS_COVERAGE
|
||||
CMAKE_C_FLAGS_COVERAGE
|
||||
CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
||||
CMAKE_SHARED_LINKER_FLAGS_COVERAGE )
|
||||
check_linker_flag(CXX -fprofile-update=atomic HAVE_cxx_linker_fprofile_update)
|
||||
if(HAVE_cxx_linker_fprofile_update)
|
||||
set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-update=atomic")
|
||||
endif()
|
||||
|
||||
check_linker_flag(C -fprofile-update=atomic HAVE_c_linker_fprofile_update)
|
||||
if(HAVE_c_linker_fprofile_update)
|
||||
set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-update=atomic")
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
|
||||
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
|
||||
endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
|
||||
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
||||
link_libraries(gcov)
|
||||
endif()
|
||||
|
||||
# Defines a target for running and collection code coverage information
|
||||
# Builds dependencies, runs the given executable and outputs reports.
|
||||
# NOTE! The executable should always have a ZERO as exit code otherwise
|
||||
@@ -309,6 +320,10 @@ function(setup_target_for_coverage_gcovr)
|
||||
set(Coverage_FORMAT xml)
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED Coverage_EXECUTABLE AND DEFINED Coverage_EXECUTABLE_ARGS)
|
||||
message(FATAL_ERROR "EXECUTABLE_ARGS must not be set if EXECUTABLE is not set")
|
||||
endif()
|
||||
|
||||
if("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
|
||||
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
|
||||
else()
|
||||
@@ -390,17 +405,18 @@ function(setup_target_for_coverage_gcovr)
|
||||
endforeach()
|
||||
|
||||
# Set up commands which will be run to generate coverage data
|
||||
# Run tests
|
||||
set(GCOVR_EXEC_TESTS_CMD
|
||||
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
|
||||
)
|
||||
# If EXECUTABLE is not set, the user is expected to run the tests manually
|
||||
# before running the coverage target NAME
|
||||
if(DEFINED Coverage_EXECUTABLE)
|
||||
set(GCOVR_EXEC_TESTS_CMD
|
||||
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
|
||||
)
|
||||
endif()
|
||||
|
||||
# Create folder
|
||||
if(DEFINED GCOVR_CREATE_FOLDER)
|
||||
set(GCOVR_FOLDER_CMD
|
||||
${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER})
|
||||
else()
|
||||
set(GCOVR_FOLDER_CMD echo) # dummy
|
||||
endif()
|
||||
|
||||
# Running gcovr
|
||||
@@ -417,11 +433,13 @@ function(setup_target_for_coverage_gcovr)
|
||||
if(CODE_COVERAGE_VERBOSE)
|
||||
message(STATUS "Executed command report")
|
||||
|
||||
message(STATUS "Command to run tests: ")
|
||||
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
|
||||
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
|
||||
if(NOT "${GCOVR_EXEC_TESTS_CMD}" STREQUAL "")
|
||||
message(STATUS "Command to run tests: ")
|
||||
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
|
||||
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
|
||||
endif()
|
||||
|
||||
if(NOT GCOVR_FOLDER_CMD STREQUAL "echo")
|
||||
if(NOT "${GCOVR_FOLDER_CMD}" STREQUAL "")
|
||||
message(STATUS "Command to create a folder: ")
|
||||
string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}")
|
||||
message(STATUS "${GCOVR_FOLDER_CMD_SPACED}")
|
||||
@@ -446,23 +464,24 @@ function(setup_target_for_coverage_gcovr)
|
||||
|
||||
# Show info where to find the report
|
||||
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
|
||||
COMMAND ;
|
||||
COMMAND echo
|
||||
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
|
||||
)
|
||||
endfunction() # setup_target_for_coverage_gcovr
|
||||
|
||||
function(append_coverage_compiler_flags)
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
message(STATUS "Appending code coverage compiler flags: ${COVERAGE_COMPILER_FLAGS}")
|
||||
endfunction() # append_coverage_compiler_flags
|
||||
function(add_code_coverage_to_target name scope)
|
||||
separate_arguments(COVERAGE_CXX_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_COMPILER_FLAGS}")
|
||||
separate_arguments(COVERAGE_C_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_C_COMPILER_FLAGS}")
|
||||
separate_arguments(COVERAGE_CXX_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_LINKER_FLAGS}")
|
||||
separate_arguments(COVERAGE_C_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_C_LINKER_FLAGS}")
|
||||
|
||||
# Setup coverage for specific library
|
||||
function(append_coverage_compiler_flags_to_target name)
|
||||
separate_arguments(_flag_list NATIVE_COMMAND "${COVERAGE_COMPILER_FLAGS}")
|
||||
target_compile_options(${name} PRIVATE ${_flag_list})
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
||||
target_link_libraries(${name} PRIVATE gcov)
|
||||
endif()
|
||||
endfunction()
|
||||
# Add compiler options to the target
|
||||
target_compile_options(${name} ${scope}
|
||||
$<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
|
||||
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>)
|
||||
|
||||
target_link_libraries (${name} ${scope}
|
||||
$<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS} gcov>
|
||||
$<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS} gcov>
|
||||
)
|
||||
endfunction() # add_code_coverage_to_target
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
option (validator_keys "Enables building of validator-keys-tool as a separate target (imported via FetchContent)" OFF)
|
||||
|
||||
if (validator_keys)
|
||||
git_branch (current_branch)
|
||||
# default to tracking VK master branch unless we are on release
|
||||
if (NOT (current_branch STREQUAL "release"))
|
||||
set (current_branch "master")
|
||||
endif ()
|
||||
message (STATUS "tracking ValidatorKeys branch: ${current_branch}")
|
||||
|
||||
FetchContent_Declare (
|
||||
validator_keys_src
|
||||
GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
|
||||
GIT_TAG "${current_branch}"
|
||||
)
|
||||
FetchContent_GetProperties (validator_keys_src)
|
||||
if (NOT validator_keys_src_POPULATED)
|
||||
message (STATUS "Pausing to download ValidatorKeys...")
|
||||
FetchContent_Populate (validator_keys_src)
|
||||
endif ()
|
||||
add_subdirectory (${validator_keys_src_SOURCE_DIR} ${CMAKE_BINARY_DIR}/validator-keys)
|
||||
endif ()
|
||||
@@ -7,7 +7,7 @@ function(xrpl_add_test name)
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp"
|
||||
)
|
||||
add_executable(${target} EXCLUDE_FROM_ALL ${ARGN} ${sources})
|
||||
add_executable(${target} ${ARGN} ${sources})
|
||||
|
||||
isolate_headers(
|
||||
${target}
|
||||
@@ -22,20 +22,4 @@ function(xrpl_add_test name)
|
||||
UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
|
||||
|
||||
add_test(NAME ${target} COMMAND ${target})
|
||||
set_tests_properties(
|
||||
${target} PROPERTIES
|
||||
FIXTURES_REQUIRED ${target}_fixture
|
||||
)
|
||||
|
||||
add_test(
|
||||
NAME ${target}.build
|
||||
COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
--build ${CMAKE_BINARY_DIR}
|
||||
--config $<CONFIG>
|
||||
--target ${target}
|
||||
)
|
||||
set_tests_properties(${target}.build PROPERTIES
|
||||
FIXTURES_SETUP ${target}_fixture
|
||||
)
|
||||
endfunction()
|
||||
@@ -7,22 +7,25 @@
|
||||
toolchain file, especially the ABI-impacting ones
|
||||
#]=========================================================]
|
||||
add_library (common INTERFACE)
|
||||
add_library (Ripple::common ALIAS common)
|
||||
add_library (Xrpl::common ALIAS common)
|
||||
# add a single global dependency on this interface lib
|
||||
link_libraries (Ripple::common)
|
||||
link_libraries (Xrpl::common)
|
||||
set_target_properties (common
|
||||
PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ON)
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
target_compile_definitions (common
|
||||
INTERFACE
|
||||
$<$<CONFIG:Debug>:DEBUG _DEBUG>
|
||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>)
|
||||
# ^^^^ NOTE: CMAKE release builds already have NDEBUG
|
||||
# defined, so no need to add it explicitly except for
|
||||
# this special case of (profile ON) and (assert OFF)
|
||||
# -- presumably this is because we don't want profile
|
||||
# builds asserting unless asserts were specifically
|
||||
# requested
|
||||
#[===[
|
||||
NOTE: CMAKE release builds already have NDEBUG defined, so no need to add it
|
||||
explicitly except for the special case of (profile ON) and (assert OFF).
|
||||
Presumably this is because we don't want profile builds asserting unless
|
||||
asserts were specifically requested.
|
||||
]===]
|
||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>
|
||||
# TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x.
|
||||
OPENSSL_SUPPRESS_DEPRECATED
|
||||
)
|
||||
|
||||
if (MSVC)
|
||||
# remove existing exception flag since we set it to -EHa
|
||||
@@ -146,7 +149,7 @@ elseif (use_gold AND is_gcc)
|
||||
ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
#[=========================================================[
|
||||
NOTE: THE gold linker inserts -rpath as DT_RUNPATH by
|
||||
default intead of DT_RPATH, so you might have slightly
|
||||
default instead of DT_RPATH, so you might have slightly
|
||||
unexpected runtime ld behavior if you were expecting
|
||||
DT_RPATH. Specify --disable-new-dtags to gold if you do
|
||||
not want the default DT_RUNPATH behavior. This rpath
|
||||
@@ -12,7 +12,7 @@ if (static OR MSVC)
|
||||
else ()
|
||||
set (Boost_USE_STATIC_RUNTIME OFF)
|
||||
endif ()
|
||||
find_dependency (Boost 1.70
|
||||
find_dependency (Boost
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
@@ -45,12 +45,10 @@ if (static OR APPLE OR MSVC)
|
||||
set (OPENSSL_USE_STATIC_LIBS ON)
|
||||
endif ()
|
||||
set (OPENSSL_MSVC_STATIC_RT ON)
|
||||
find_dependency (OpenSSL 1.1.1 REQUIRED)
|
||||
find_dependency (OpenSSL REQUIRED)
|
||||
find_dependency (ZLIB)
|
||||
find_dependency (date)
|
||||
if (TARGET ZLIB::ZLIB)
|
||||
set_target_properties(OpenSSL::Crypto PROPERTIES
|
||||
INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
|
||||
endif ()
|
||||
|
||||
include ("${CMAKE_CURRENT_LIST_DIR}/RippleTargets.cmake")
|
||||
@@ -13,7 +13,7 @@ set_target_properties(xrpl.libpb PROPERTIES UNITY_BUILD OFF)
|
||||
target_protobuf_sources(xrpl.libpb xrpl/proto
|
||||
LANGUAGE cpp
|
||||
IMPORT_DIRS include/xrpl/proto
|
||||
PROTOS include/xrpl/proto/ripple.proto
|
||||
PROTOS include/xrpl/proto/xrpl.proto
|
||||
)
|
||||
|
||||
file(GLOB_RECURSE protos "include/xrpl/proto/org/*.proto")
|
||||
@@ -53,15 +53,17 @@ add_library(xrpl.imports.main INTERFACE)
|
||||
|
||||
target_link_libraries(xrpl.imports.main
|
||||
INTERFACE
|
||||
LibArchive::LibArchive
|
||||
OpenSSL::Crypto
|
||||
Ripple::boost
|
||||
Ripple::opts
|
||||
Ripple::syslibs
|
||||
absl::random_random
|
||||
date::date
|
||||
ed25519::ed25519
|
||||
LibArchive::LibArchive
|
||||
OpenSSL::Crypto
|
||||
Xrpl::boost
|
||||
Xrpl::libs
|
||||
Xrpl::opts
|
||||
Xrpl::syslibs
|
||||
secp256k1::secp256k1
|
||||
wasmi::wasmi
|
||||
xrpl.libpb
|
||||
xxHash::xxhash
|
||||
$<$<BOOL:${voidstar}>:antithesis-sdk-cpp>
|
||||
@@ -72,10 +74,7 @@ include(target_link_modules)
|
||||
|
||||
# Level 01
|
||||
add_module(xrpl beast)
|
||||
target_link_libraries(xrpl.libxrpl.beast PUBLIC
|
||||
xrpl.imports.main
|
||||
xrpl.libpb
|
||||
)
|
||||
target_link_libraries(xrpl.libxrpl.beast PUBLIC xrpl.imports.main)
|
||||
|
||||
# Level 02
|
||||
add_module(xrpl basics)
|
||||
@@ -96,12 +95,50 @@ target_link_libraries(xrpl.libxrpl.protocol PUBLIC
|
||||
)
|
||||
|
||||
# Level 05
|
||||
add_module(xrpl core)
|
||||
target_link_libraries(xrpl.libxrpl.core PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
)
|
||||
|
||||
# Level 06
|
||||
add_module(xrpl resource)
|
||||
target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol)
|
||||
|
||||
# Level 07
|
||||
add_module(xrpl net)
|
||||
target_link_libraries(xrpl.libxrpl.net PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.resource
|
||||
)
|
||||
|
||||
add_module(xrpl server)
|
||||
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol)
|
||||
|
||||
add_module(xrpl nodestore)
|
||||
target_link_libraries(xrpl.libxrpl.nodestore PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
)
|
||||
|
||||
add_module(xrpl shamap)
|
||||
target_link_libraries(xrpl.libxrpl.shamap PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.crypto
|
||||
xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.nodestore
|
||||
)
|
||||
|
||||
add_module(xrpl ledger)
|
||||
target_link_libraries(xrpl.libxrpl.ledger PUBLIC
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
)
|
||||
|
||||
add_library(xrpl.libxrpl)
|
||||
set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl)
|
||||
@@ -116,11 +153,16 @@ target_sources(xrpl.libxrpl PRIVATE ${sources})
|
||||
target_link_modules(xrpl PUBLIC
|
||||
basics
|
||||
beast
|
||||
core
|
||||
crypto
|
||||
json
|
||||
protocol
|
||||
resource
|
||||
server
|
||||
nodestore
|
||||
shamap
|
||||
net
|
||||
ledger
|
||||
)
|
||||
|
||||
# All headers in libxrpl are in modules.
|
||||
@@ -133,14 +175,14 @@ target_link_modules(xrpl PUBLIC
|
||||
# $<INSTALL_INTERFACE:include>)
|
||||
|
||||
if(xrpld)
|
||||
add_executable(rippled)
|
||||
add_executable(xrpld)
|
||||
if(tests)
|
||||
target_compile_definitions(rippled PUBLIC ENABLE_TESTS)
|
||||
target_compile_definitions(rippled PRIVATE
|
||||
target_compile_definitions(xrpld PUBLIC ENABLE_TESTS)
|
||||
target_compile_definitions(xrpld PRIVATE
|
||||
UNIT_TEST_REFERENCE_FEE=${UNIT_TEST_REFERENCE_FEE}
|
||||
)
|
||||
endif()
|
||||
target_include_directories(rippled
|
||||
target_include_directories(xrpld
|
||||
PRIVATE
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
|
||||
)
|
||||
@@ -148,36 +190,36 @@ if(xrpld)
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/xrpld/*.cpp"
|
||||
)
|
||||
target_sources(rippled PRIVATE ${sources})
|
||||
target_sources(xrpld PRIVATE ${sources})
|
||||
|
||||
if(tests)
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/test/*.cpp"
|
||||
)
|
||||
target_sources(rippled PRIVATE ${sources})
|
||||
target_sources(xrpld PRIVATE ${sources})
|
||||
endif()
|
||||
|
||||
target_link_libraries(rippled
|
||||
Ripple::boost
|
||||
Ripple::opts
|
||||
Ripple::libs
|
||||
target_link_libraries(xrpld
|
||||
Xrpl::boost
|
||||
Xrpl::opts
|
||||
Xrpl::libs
|
||||
xrpl.libxrpl
|
||||
)
|
||||
exclude_if_included(rippled)
|
||||
exclude_if_included(xrpld)
|
||||
# define a macro for tests that might need to
|
||||
# be exluded or run differently in CI environment
|
||||
# be excluded or run differently in CI environment
|
||||
if(is_ci)
|
||||
target_compile_definitions(rippled PRIVATE RIPPLED_RUNNING_IN_CI)
|
||||
target_compile_definitions(xrpld PRIVATE XRPL_RUNNING_IN_CI)
|
||||
endif ()
|
||||
|
||||
if(voidstar)
|
||||
target_compile_options(rippled
|
||||
target_compile_options(xrpld
|
||||
PRIVATE
|
||||
-fsanitize-coverage=trace-pc-guard
|
||||
)
|
||||
# rippled requires access to antithesis-sdk-cpp implementation file
|
||||
# xrpld requires access to antithesis-sdk-cpp implementation file
|
||||
# antithesis_instrumentation.h, which is not exported as INTERFACE
|
||||
target_include_directories(rippled
|
||||
target_include_directories(xrpld
|
||||
PRIVATE
|
||||
${CMAKE_SOURCE_DIR}/external/antithesis-sdk
|
||||
)
|
||||
@@ -11,6 +11,9 @@ if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
|
||||
return()
|
||||
endif()
|
||||
|
||||
include(ProcessorCount)
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
|
||||
include(CodeCoverage)
|
||||
|
||||
# The instructions for these commands come from the `CodeCoverage` module,
|
||||
@@ -26,13 +29,13 @@ list(APPEND GCOVR_ADDITIONAL_ARGS
|
||||
--exclude-throw-branches
|
||||
--exclude-noncode-lines
|
||||
--exclude-unreachable-branches -s
|
||||
-j ${coverage_test_parallelism})
|
||||
-j ${PROCESSOR_COUNT})
|
||||
|
||||
setup_target_for_coverage_gcovr(
|
||||
NAME coverage
|
||||
FORMAT ${coverage_format}
|
||||
EXECUTABLE rippled
|
||||
EXECUTABLE_ARGS --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --unittest-jobs ${coverage_test_parallelism} --quiet --unittest-log
|
||||
EXCLUDE "src/test" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
|
||||
DEPENDENCIES rippled
|
||||
EXCLUDE "src/test" "src/tests" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
|
||||
DEPENDENCIES xrpld xrpl.tests
|
||||
)
|
||||
|
||||
add_code_coverage_to_target(opts INTERFACE)
|
||||
@@ -2,9 +2,7 @@
|
||||
docs target (optional)
|
||||
#]===================================================================]
|
||||
|
||||
option(with_docs "Include the docs target?" FALSE)
|
||||
|
||||
if(NOT (with_docs OR only_docs))
|
||||
if(NOT only_docs)
|
||||
return()
|
||||
endif()
|
||||
|
||||
@@ -8,20 +8,26 @@ install (
|
||||
TARGETS
|
||||
common
|
||||
opts
|
||||
ripple_syslibs
|
||||
ripple_boost
|
||||
xrpl_boost
|
||||
xrpl_libs
|
||||
xrpl_syslibs
|
||||
xrpl.imports.main
|
||||
xrpl.libpb
|
||||
xrpl.libxrpl
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.beast
|
||||
xrpl.libxrpl.core
|
||||
xrpl.libxrpl.crypto
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.ledger
|
||||
xrpl.libxrpl.net
|
||||
xrpl.libxrpl.nodestore
|
||||
xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.resource
|
||||
xrpl.libxrpl.server
|
||||
xrpl.libxrpl
|
||||
xrpl.libxrpl.shamap
|
||||
antithesis-sdk-cpp
|
||||
EXPORT RippleExports
|
||||
EXPORT XrplExports
|
||||
LIBRARY DESTINATION lib
|
||||
ARCHIVE DESTINATION lib
|
||||
RUNTIME DESTINATION bin
|
||||
@@ -32,26 +38,19 @@ install(
|
||||
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}"
|
||||
)
|
||||
|
||||
install(CODE "
|
||||
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
|
||||
include(create_symbolic_link)
|
||||
create_symbolic_link(xrpl \
|
||||
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}/ripple)
|
||||
")
|
||||
|
||||
install (EXPORT RippleExports
|
||||
FILE RippleTargets.cmake
|
||||
NAMESPACE Ripple::
|
||||
DESTINATION lib/cmake/ripple)
|
||||
install (EXPORT XrplExports
|
||||
FILE XrplTargets.cmake
|
||||
NAMESPACE Xrpl::
|
||||
DESTINATION lib/cmake/xrpl)
|
||||
include (CMakePackageConfigHelpers)
|
||||
write_basic_package_version_file (
|
||||
RippleConfigVersion.cmake
|
||||
VERSION ${rippled_version}
|
||||
XrplConfigVersion.cmake
|
||||
VERSION ${xrpld_version}
|
||||
COMPATIBILITY SameMajorVersion)
|
||||
|
||||
if (is_root_project AND TARGET rippled)
|
||||
install (TARGETS rippled RUNTIME DESTINATION bin)
|
||||
set_target_properties(rippled PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON)
|
||||
if (is_root_project AND TARGET xrpld)
|
||||
install (TARGETS xrpld RUNTIME DESTINATION bin)
|
||||
set_target_properties(xrpld PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON)
|
||||
# sample configs should not overwrite existing files
|
||||
# install if-not-exists workaround as suggested by
|
||||
# https://cmake.org/Bug/view.php?id=12646
|
||||
@@ -63,19 +62,19 @@ if (is_root_project AND TARGET rippled)
|
||||
message (\"-- Skipping : \$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/\${DEST}/\${NEWNAME}\")
|
||||
endif ()
|
||||
endmacro()
|
||||
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/rippled-example.cfg\" etc rippled.cfg)
|
||||
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/xrpld-example.cfg\" etc xrpld.cfg)
|
||||
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/validators-example.txt\" etc validators.txt)
|
||||
")
|
||||
install(CODE "
|
||||
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
|
||||
include(create_symbolic_link)
|
||||
create_symbolic_link(rippled${suffix} \
|
||||
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/xrpld${suffix})
|
||||
create_symbolic_link(xrpld${suffix} \
|
||||
\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/rippled${suffix})
|
||||
")
|
||||
endif ()
|
||||
|
||||
install (
|
||||
FILES
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/cmake/RippleConfig.cmake
|
||||
${CMAKE_CURRENT_BINARY_DIR}/RippleConfigVersion.cmake
|
||||
DESTINATION lib/cmake/ripple)
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake
|
||||
${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake
|
||||
DESTINATION lib/cmake/xrpl)
|
||||
@@ -1,9 +1,9 @@
|
||||
#[===================================================================[
|
||||
rippled compile options/settings via an interface library
|
||||
xrpld compile options/settings via an interface library
|
||||
#]===================================================================]
|
||||
|
||||
add_library (opts INTERFACE)
|
||||
add_library (Ripple::opts ALIAS opts)
|
||||
add_library (Xrpl::opts ALIAS opts)
|
||||
target_compile_definitions (opts
|
||||
INTERFACE
|
||||
BOOST_ASIO_DISABLE_HANDLER_TYPE_REQUIREMENTS
|
||||
@@ -21,22 +21,18 @@ target_compile_definitions (opts
|
||||
>
|
||||
$<$<BOOL:${beast_no_unit_test_inline}>:BEAST_NO_UNIT_TEST_INLINE=1>
|
||||
$<$<BOOL:${beast_disable_autolink}>:BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES=1>
|
||||
$<$<BOOL:${single_io_service_thread}>:RIPPLE_SINGLE_IO_SERVICE_THREAD=1>
|
||||
$<$<BOOL:${single_io_service_thread}>:XRPL_SINGLE_IO_SERVICE_THREAD=1>
|
||||
$<$<BOOL:${voidstar}>:ENABLE_VOIDSTAR>)
|
||||
target_compile_options (opts
|
||||
INTERFACE
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
|
||||
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized>
|
||||
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
target_link_libraries (opts
|
||||
INTERFACE
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
@@ -63,12 +59,12 @@ if (san)
|
||||
endif ()
|
||||
|
||||
#[===================================================================[
|
||||
rippled transitive library deps via an interface library
|
||||
xrpld transitive library deps via an interface library
|
||||
#]===================================================================]
|
||||
|
||||
add_library (ripple_syslibs INTERFACE)
|
||||
add_library (Ripple::syslibs ALIAS ripple_syslibs)
|
||||
target_link_libraries (ripple_syslibs
|
||||
add_library (xrpl_syslibs INTERFACE)
|
||||
add_library (Xrpl::syslibs ALIAS xrpl_syslibs)
|
||||
target_link_libraries (xrpl_syslibs
|
||||
INTERFACE
|
||||
$<$<BOOL:${MSVC}>:
|
||||
legacy_stdio_definitions.lib
|
||||
@@ -93,9 +89,9 @@ target_link_libraries (ripple_syslibs
|
||||
if (NOT MSVC)
|
||||
set (THREADS_PREFER_PTHREAD_FLAG ON)
|
||||
find_package (Threads)
|
||||
target_link_libraries (ripple_syslibs INTERFACE Threads::Threads)
|
||||
target_link_libraries (xrpl_syslibs INTERFACE Threads::Threads)
|
||||
endif ()
|
||||
|
||||
add_library (ripple_libs INTERFACE)
|
||||
add_library (Ripple::libs ALIAS ripple_libs)
|
||||
target_link_libraries (ripple_libs INTERFACE Ripple::syslibs)
|
||||
add_library (xrpl_libs INTERFACE)
|
||||
add_library (Xrpl::libs ALIAS xrpl_libs)
|
||||
target_link_libraries (xrpl_libs INTERFACE Xrpl::syslibs)
|
||||
@@ -1,5 +1,5 @@
|
||||
#[===================================================================[
|
||||
convenience variables and sanity checks
|
||||
sanity checks
|
||||
#]===================================================================]
|
||||
|
||||
get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
@@ -16,39 +16,19 @@ if (NOT is_multiconfig)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
get_directory_property(has_parent PARENT_DIRECTORY)
|
||||
if (has_parent)
|
||||
set (is_root_project OFF)
|
||||
else ()
|
||||
set (is_root_project ON)
|
||||
endif ()
|
||||
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES ".*Clang") # both Clang and AppleClang
|
||||
set (is_clang TRUE)
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND
|
||||
CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8.0)
|
||||
message (FATAL_ERROR "This project requires clang 8 or later")
|
||||
CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16.0)
|
||||
message (FATAL_ERROR "This project requires clang 16 or later")
|
||||
endif ()
|
||||
# TODO min AppleClang version check ?
|
||||
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
|
||||
set (is_gcc TRUE)
|
||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8.0)
|
||||
message (FATAL_ERROR "This project requires GCC 8 or later")
|
||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 12.0)
|
||||
message (FATAL_ERROR "This project requires GCC 12 or later")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
set (is_linux TRUE)
|
||||
else ()
|
||||
set (is_linux FALSE)
|
||||
endif ()
|
||||
|
||||
if ("$ENV{CI}" STREQUAL "true" OR "$ENV{CONTINUOUS_INTEGRATION}" STREQUAL "true")
|
||||
set (is_ci TRUE)
|
||||
else ()
|
||||
set (is_ci FALSE)
|
||||
endif ()
|
||||
|
||||
# check for in-source build and fail
|
||||
if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
|
||||
message (FATAL_ERROR "Builds (in-source) are not allowed in "
|
||||
@@ -61,8 +41,8 @@ if (MSVC AND CMAKE_GENERATOR_PLATFORM STREQUAL "Win32")
|
||||
endif ()
|
||||
|
||||
if (NOT CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||
message (FATAL_ERROR "Rippled requires a 64 bit target architecture.\n"
|
||||
"The most likely cause of this warning is trying to build rippled with a 32-bit OS.")
|
||||
message (FATAL_ERROR "Xrpld requires a 64 bit target architecture.\n"
|
||||
"The most likely cause of this warning is trying to build xrpld with a 32-bit OS.")
|
||||
endif ()
|
||||
|
||||
if (APPLE AND NOT HOMEBREW)
|
||||
@@ -1,10 +1,25 @@
|
||||
#[===================================================================[
|
||||
declare user options/settings
|
||||
declare options and variables
|
||||
#]===================================================================]
|
||||
|
||||
include(ProcessorCount)
|
||||
if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
set (is_linux TRUE)
|
||||
else()
|
||||
set(is_linux FALSE)
|
||||
endif()
|
||||
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
if("$ENV{CI}" STREQUAL "true" OR "$ENV{CONTINUOUS_INTEGRATION}" STREQUAL "true")
|
||||
set(is_ci TRUE)
|
||||
else()
|
||||
set(is_ci FALSE)
|
||||
endif()
|
||||
|
||||
get_directory_property(has_parent PARENT_DIRECTORY)
|
||||
if(has_parent)
|
||||
set(is_root_project OFF)
|
||||
else()
|
||||
set(is_root_project ON)
|
||||
endif()
|
||||
|
||||
option(assert "Enables asserts, even in release builds" OFF)
|
||||
|
||||
@@ -25,31 +40,30 @@ if(unity)
|
||||
endif()
|
||||
set(CMAKE_UNITY_BUILD ON CACHE BOOL "Do a unity build")
|
||||
endif()
|
||||
|
||||
if(is_clang AND is_linux)
|
||||
option(voidstar "Enable Antithesis instrumentation." OFF)
|
||||
endif()
|
||||
|
||||
if(is_gcc OR is_clang)
|
||||
include(ProcessorCount)
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
|
||||
option(coverage "Generates coverage info." OFF)
|
||||
option(profile "Add profiling flags" OFF)
|
||||
set(coverage_test_parallelism "${PROCESSOR_COUNT}" CACHE STRING
|
||||
"Unit tests parallelism for the purpose of coverage report.")
|
||||
set(coverage_format "html-details" CACHE STRING
|
||||
"Output format of the coverage report.")
|
||||
set(coverage_extra_args "" CACHE STRING
|
||||
"Additional arguments to pass to gcovr.")
|
||||
set(coverage_test "" CACHE STRING
|
||||
"On gcc & clang, the specific unit test(s) to run for coverage. Default is all tests.")
|
||||
if(coverage_test AND NOT coverage)
|
||||
set(coverage ON CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
option(wextra "compile with extra gcc/clang warnings enabled" ON)
|
||||
else()
|
||||
set(profile OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
|
||||
if(is_linux)
|
||||
option(BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
|
||||
option(BUILD_SHARED_LIBS "build shared xrpl libraries" OFF)
|
||||
option(static "link protobuf, openssl, libc++, and boost statically" ON)
|
||||
option(perf "Enables flags that assist with perf recording" OFF)
|
||||
option(use_gold "enables detection of gold (binutils) linker" ON)
|
||||
@@ -58,17 +72,19 @@ else()
|
||||
# we are not ready to allow shared-libs on windows because it would require
|
||||
# export declarations. On macos it's more feasible, but static openssl
|
||||
# produces odd linker errors, thus we disable shared lib builds for now.
|
||||
set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE)
|
||||
set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared xrpl libraries - OFF for win/macos" FORCE)
|
||||
set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
|
||||
set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
|
||||
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
|
||||
set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
|
||||
endif()
|
||||
|
||||
if(is_clang)
|
||||
option(use_lld "enables detection of lld linker" ON)
|
||||
else()
|
||||
set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
|
||||
endif()
|
||||
|
||||
option(jemalloc "Enables jemalloc for heap profiling" OFF)
|
||||
option(werr "treat warnings as errors" OFF)
|
||||
option(local_protobuf
|
||||
@@ -102,38 +118,26 @@ if(san)
|
||||
message(FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
|
||||
endif()
|
||||
endif()
|
||||
set(container_label "" CACHE STRING "tag to use for package building containers")
|
||||
option(packages_only
|
||||
"ONLY generate package building targets. This is special use-case and almost \
|
||||
certainly not what you want. Use with caution as you won't be able to build \
|
||||
any compiled targets locally." OFF)
|
||||
option(have_package_container
|
||||
"Sometimes you already have the tagged container you want to use for package \
|
||||
building and you don't want docker to rebuild it. This flag will detach the \
|
||||
dependency of the package build from the container build. It's an advanced \
|
||||
use case and most likely you should not be touching this flag." OFF)
|
||||
|
||||
# the remaining options are obscure and rarely used
|
||||
option(beast_no_unit_test_inline
|
||||
"Prevents unit test definitions from being inserted into global table"
|
||||
OFF)
|
||||
option(single_io_service_thread
|
||||
"Restricts the number of threads calling io_service::run to one. \
|
||||
"Restricts the number of threads calling io_context::run to one. \
|
||||
This can be useful when debugging."
|
||||
OFF)
|
||||
option(boost_show_deprecated
|
||||
"Allow boost to fail on deprecated usage. Only useful if you're trying\
|
||||
to find deprecated calls."
|
||||
OFF)
|
||||
option(beast_hashers
|
||||
"Use local implementations for sha/ripemd hashes (experimental, not recommended)"
|
||||
OFF)
|
||||
|
||||
if(WIN32)
|
||||
option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
|
||||
else()
|
||||
set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
|
||||
endif()
|
||||
|
||||
if(coverage)
|
||||
message(STATUS "coverage build requested - forcing Debug build")
|
||||
set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
|
||||
20
cmake/XrplValidatorKeys.cmake
Normal file
20
cmake/XrplValidatorKeys.cmake
Normal file
@@ -0,0 +1,20 @@
|
||||
option (validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF)
|
||||
|
||||
if (validator_keys)
|
||||
git_branch (current_branch)
|
||||
# default to tracking VK master branch unless we are on release
|
||||
if (NOT (current_branch STREQUAL "release"))
|
||||
set (current_branch "master")
|
||||
endif ()
|
||||
message (STATUS "Tracking ValidatorKeys branch: ${current_branch}")
|
||||
|
||||
FetchContent_Declare (
|
||||
validator_keys
|
||||
GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
|
||||
GIT_TAG "${current_branch}"
|
||||
)
|
||||
FetchContent_MakeAvailable(validator_keys)
|
||||
set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}")
|
||||
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
|
||||
|
||||
endif ()
|
||||
@@ -5,11 +5,11 @@
|
||||
file(STRINGS src/libxrpl/protocol/BuildInfo.cpp BUILD_INFO)
|
||||
foreach(line_ ${BUILD_INFO})
|
||||
if(line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"")
|
||||
set(rippled_version ${CMAKE_MATCH_1})
|
||||
set(xrpld_version ${CMAKE_MATCH_1})
|
||||
endif()
|
||||
endforeach()
|
||||
if(rippled_version)
|
||||
message(STATUS "rippled version: ${rippled_version}")
|
||||
if(xrpld_version)
|
||||
message(STATUS "xrpld version: ${xrpld_version}")
|
||||
else()
|
||||
message(FATAL_ERROR "unable to determine rippled version")
|
||||
message(FATAL_ERROR "unable to determine xrpld version")
|
||||
endif()
|
||||
@@ -12,16 +12,10 @@ find_package(Boost 1.82 REQUIRED
|
||||
thread
|
||||
)
|
||||
|
||||
add_library(ripple_boost INTERFACE)
|
||||
add_library(Ripple::boost ALIAS ripple_boost)
|
||||
if(XCODE)
|
||||
target_include_directories(ripple_boost BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
|
||||
target_compile_options(ripple_boost INTERFACE --system-header-prefix="boost/")
|
||||
else()
|
||||
target_include_directories(ripple_boost SYSTEM BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
|
||||
endif()
|
||||
add_library(xrpl_boost INTERFACE)
|
||||
add_library(Xrpl::boost ALIAS xrpl_boost)
|
||||
|
||||
target_link_libraries(ripple_boost
|
||||
target_link_libraries(xrpl_boost
|
||||
INTERFACE
|
||||
Boost::headers
|
||||
Boost::chrono
|
||||
@@ -30,12 +24,13 @@ target_link_libraries(ripple_boost
|
||||
Boost::date_time
|
||||
Boost::filesystem
|
||||
Boost::json
|
||||
Boost::process
|
||||
Boost::program_options
|
||||
Boost::regex
|
||||
Boost::system
|
||||
Boost::thread)
|
||||
if(Boost_COMPILER)
|
||||
target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking)
|
||||
target_link_libraries(xrpl_boost INTERFACE Boost::disable_autolinking)
|
||||
endif()
|
||||
if(san AND is_clang)
|
||||
# TODO: gcc does not support -fsanitize-blacklist...can we do something else
|
||||
|
||||
63
conan.lock
Normal file
63
conan.lock
Normal file
@@ -0,0 +1,63 @@
|
||||
{
|
||||
"version": "0.5",
|
||||
"requires": [
|
||||
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
|
||||
"xxhash/0.8.3#681d36a0a6111fc56e5e45ea182c19cc%1756234289.683",
|
||||
"wasmi/0.42.1#2a96357d4e6bf40dfe201106d849c24f%1764802092.014",
|
||||
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1756234266.869",
|
||||
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1756234262.318",
|
||||
"snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1756234314.246",
|
||||
"secp256k1/0.7.0#9c4ab67bdc3860c16ea5b36aed8f74ea%1765202256.763",
|
||||
"rocksdb/10.5.1#4a197eca381a3e5ae8adf8cffa5aacd0%1762797952.535",
|
||||
"re2/20230301#ca3b241baec15bd31ea9187150e0b333%1764175362.029",
|
||||
"protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1764863245.83",
|
||||
"openssl/3.5.4#a1d5835cc6ed5c5b8f3cd5b9b5d24205%1760106486.594",
|
||||
"nudb/2.0.9#fb8dfd1a5557f5e0528114c2da17721e%1763150366.909",
|
||||
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1756234228.999",
|
||||
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1756223727.64",
|
||||
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1756230911.03",
|
||||
"libarchive/3.8.1#ffee18995c706e02bf96e7a2f7042e0d%1764175360.142",
|
||||
"jemalloc/5.3.0#e951da9cf599e956cebc117880d2d9f8%1729241615.244",
|
||||
"grpc/1.72.0#f244a57bff01e708c55a1100b12e1589%1763158050.628",
|
||||
"ed25519/2015.03#ae761bdc52730a843f0809bdf6c1b1f6%1764270189.893",
|
||||
"doctest/2.4.12#eb9fb352fb2fdfc8abb17ec270945165%1762797941.757",
|
||||
"date/3.0.4#862e11e80030356b53c2c38599ceb32b%1763584497.32",
|
||||
"c-ares/1.34.5#5581c2b62a608b40bb85d965ab3ec7c8%1764175359.429",
|
||||
"bzip2/1.0.8#c470882369c2d95c5c77e970c0c7e321%1764175359.429",
|
||||
"boost/1.88.0#8852c0b72ce8271fb8ff7c53456d4983%1756223752.326",
|
||||
"abseil/20250127.0#9e8e8cfc89a1324139fc0ee3bd4d8c8c%1753819045.301"
|
||||
],
|
||||
"build_requires": [
|
||||
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
|
||||
"strawberryperl/5.32.1.1#707032463aa0620fa17ec0d887f5fe41%1756234281.733",
|
||||
"protobuf/6.32.1#f481fd276fc23a33b85a3ed1e898b693%1764863245.83",
|
||||
"nasm/2.16.01#31e26f2ee3c4346ecd347911bd126904%1756234232.901",
|
||||
"msys2/cci.latest#1996656c3c98e5765b25b60ff5cf77b4%1764840888.758",
|
||||
"m4/1.4.19#70dc8bbb33e981d119d2acc0175cf381%1763158052.846",
|
||||
"cmake/4.2.0#ae0a44f44a1ef9ab68fd4b3e9a1f8671%1764175359.44",
|
||||
"cmake/3.31.10#313d16a1aa16bbdb2ca0792467214b76%1764175359.429",
|
||||
"b2/5.3.3#107c15377719889654eb9a162a673975%1756234226.28",
|
||||
"automake/1.16.5#b91b7c384c3deaa9d535be02da14d04f%1755524470.56",
|
||||
"autoconf/2.71#51077f068e61700d65bb05541ea1e4b0%1731054366.86",
|
||||
"abseil/20250127.0#9e8e8cfc89a1324139fc0ee3bd4d8c8c%1753819045.301"
|
||||
],
|
||||
"python_requires": [],
|
||||
"overrides": {
|
||||
"protobuf/5.27.0": [
|
||||
"protobuf/6.32.1"
|
||||
],
|
||||
"lz4/1.9.4": [
|
||||
"lz4/1.10.0"
|
||||
],
|
||||
"boost/1.83.0": [
|
||||
"boost/1.88.0"
|
||||
],
|
||||
"sqlite3/3.44.2": [
|
||||
"sqlite3/3.49.1"
|
||||
],
|
||||
"lz4/[>=1.9.4 <2]": [
|
||||
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504"
|
||||
]
|
||||
},
|
||||
"config_requires": []
|
||||
}
|
||||
5
conan/global.conf
Normal file
5
conan/global.conf
Normal file
@@ -0,0 +1,5 @@
|
||||
# Global configuration for Conan. This is used to set the number of parallel
|
||||
# downloads and uploads.
|
||||
core:non_interactive=True
|
||||
core.download:parallel={{ os.cpu_count() }}
|
||||
core.upload:parallel={{ os.cpu_count() }}
|
||||
12
conan/lockfile/README.md
Normal file
12
conan/lockfile/README.md
Normal file
@@ -0,0 +1,12 @@
|
||||
# Conan lockfile
|
||||
|
||||
To achieve reproducible dependencies, we use a [Conan lockfile](https://docs.conan.io/2/tutorial/versioning/lockfiles.html).
|
||||
|
||||
The `conan.lock` file in the repository contains a "snapshot" of the current
|
||||
dependencies. It is implicitly used when running `conan` commands, so you don't
|
||||
need to specify it.
|
||||
|
||||
You have to update this file every time you add a new dependency or change a
|
||||
revision or version of an existing dependency.
|
||||
|
||||
To update a lockfile, run from the repository root: `./conan/lockfile/regenerate.sh`
|
||||
8
conan/lockfile/linux.profile
Normal file
8
conan/lockfile/linux.profile
Normal file
@@ -0,0 +1,8 @@
|
||||
[settings]
|
||||
arch=x86_64
|
||||
build_type=Release
|
||||
compiler=gcc
|
||||
compiler.cppstd=20
|
||||
compiler.libcxx=libstdc++11
|
||||
compiler.version=13
|
||||
os=Linux
|
||||
8
conan/lockfile/macos.profile
Normal file
8
conan/lockfile/macos.profile
Normal file
@@ -0,0 +1,8 @@
|
||||
[settings]
|
||||
arch=armv8
|
||||
build_type=Release
|
||||
compiler=apple-clang
|
||||
compiler.cppstd=20
|
||||
compiler.libcxx=libc++
|
||||
compiler.version=17.0
|
||||
os=Macos
|
||||
36
conan/lockfile/regenerate.sh
Executable file
36
conan/lockfile/regenerate.sh
Executable file
@@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -ex
|
||||
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
trap "rm -rf $TEMP_DIR" EXIT
|
||||
|
||||
echo "Using temporary CONAN_HOME: $TEMP_DIR"
|
||||
|
||||
# We use a temporary Conan home to avoid polluting the user's existing Conan
|
||||
# configuration and to not use local cache (which leads to non-reproducible lockfiles).
|
||||
export CONAN_HOME="$TEMP_DIR"
|
||||
|
||||
# Ensure that the xrplf remote is the first to be consulted, so any recipes we
|
||||
# patched are used. We also add it there to not created huge diff when the
|
||||
# official Conan Center Index is updated.
|
||||
conan remote add --force --index 0 xrplf https://conan.ripplex.io
|
||||
|
||||
# Delete any existing lockfile.
|
||||
rm -f conan.lock
|
||||
|
||||
# Create a new lockfile that is compatible with Linux, macOS, and Windows. The
|
||||
# first create command will create a new lockfile, while the subsequent create
|
||||
# commands will merge any additional dependencies into the created lockfile.
|
||||
conan lock create . \
|
||||
--options '&:jemalloc=True' \
|
||||
--options '&:rocksdb=True' \
|
||||
--profile:all=conan/lockfile/linux.profile
|
||||
conan lock create . \
|
||||
--options '&:jemalloc=True' \
|
||||
--options '&:rocksdb=True' \
|
||||
--profile:all=conan/lockfile/macos.profile
|
||||
conan lock create . \
|
||||
--options '&:jemalloc=True' \
|
||||
--options '&:rocksdb=True' \
|
||||
--profile:all=conan/lockfile/windows.profile
|
||||
9
conan/lockfile/windows.profile
Normal file
9
conan/lockfile/windows.profile
Normal file
@@ -0,0 +1,9 @@
|
||||
[settings]
|
||||
arch=x86_64
|
||||
build_type=Release
|
||||
compiler=msvc
|
||||
compiler.cppstd=20
|
||||
compiler.runtime=dynamic
|
||||
compiler.runtime_type=Release
|
||||
compiler.version=194
|
||||
os=Windows
|
||||
@@ -20,18 +20,6 @@ compiler.libcxx={{detect_api.detect_libcxx(compiler, version, compiler_exe)}}
|
||||
{% endif %}
|
||||
|
||||
[conf]
|
||||
{% if compiler == "clang" and compiler_version >= 19 %}
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
{% endif %}
|
||||
{% if compiler == "apple-clang" and compiler_version >= 17 %}
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
{% endif %}
|
||||
{% if compiler == "clang" and compiler_version == 16 %}
|
||||
tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
|
||||
{% endif %}
|
||||
{% if compiler == "gcc" and compiler_version < 13 %}
|
||||
tools.build:cxxflags=['-Wno-restrict']
|
||||
tools.build:cxxflags+=['-Wno-restrict']
|
||||
{% endif %}
|
||||
|
||||
[tool_requires]
|
||||
!cmake/*: cmake/[>=3 <4]
|
||||
|
||||
291
conanfile.py
291
conanfile.py
@@ -1,147 +1,170 @@
|
||||
from conan import ConanFile, __version__ as conan_version
|
||||
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
|
||||
import re
|
||||
|
||||
class Xrpl(ConanFile):
|
||||
name = 'xrpl'
|
||||
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
|
||||
|
||||
license = 'ISC'
|
||||
author = 'John Freeman <jfreeman@ripple.com>'
|
||||
url = 'https://github.com/xrplf/rippled'
|
||||
description = 'The XRP Ledger'
|
||||
settings = 'os', 'compiler', 'build_type', 'arch'
|
||||
from conan import ConanFile
|
||||
from conan import __version__ as conan_version
|
||||
|
||||
|
||||
class Xrpl(ConanFile):
|
||||
name = "xrpl"
|
||||
|
||||
license = "ISC"
|
||||
author = "John Freeman <jfreeman@ripple.com>"
|
||||
url = "https://github.com/xrplf/rippled"
|
||||
description = "The XRP Ledger"
|
||||
settings = "os", "compiler", "build_type", "arch"
|
||||
options = {
|
||||
'assertions': [True, False],
|
||||
'coverage': [True, False],
|
||||
'fPIC': [True, False],
|
||||
'jemalloc': [True, False],
|
||||
'rocksdb': [True, False],
|
||||
'shared': [True, False],
|
||||
'static': [True, False],
|
||||
'tests': [True, False],
|
||||
'unity': [True, False],
|
||||
'xrpld': [True, False],
|
||||
"assertions": [True, False],
|
||||
"coverage": [True, False],
|
||||
"fPIC": [True, False],
|
||||
"jemalloc": [True, False],
|
||||
"rocksdb": [True, False],
|
||||
"shared": [True, False],
|
||||
"static": [True, False],
|
||||
"tests": [True, False],
|
||||
"unity": [True, False],
|
||||
"xrpld": [True, False],
|
||||
}
|
||||
|
||||
requires = [
|
||||
'grpc/1.50.1',
|
||||
'libarchive/3.8.1',
|
||||
'nudb/2.0.9',
|
||||
'openssl/1.1.1w',
|
||||
'soci/4.0.3',
|
||||
'zlib/1.3.1',
|
||||
"ed25519/2015.03",
|
||||
"grpc/1.72.0",
|
||||
"libarchive/3.8.1",
|
||||
"nudb/2.0.9",
|
||||
"openssl/3.5.4",
|
||||
"secp256k1/0.7.0",
|
||||
"soci/4.0.3",
|
||||
"wasmi/0.42.1",
|
||||
"zlib/1.3.1",
|
||||
]
|
||||
|
||||
test_requires = [
|
||||
'doctest/2.4.11',
|
||||
"doctest/2.4.12",
|
||||
]
|
||||
|
||||
tool_requires = [
|
||||
'protobuf/3.21.12',
|
||||
"protobuf/6.32.1",
|
||||
]
|
||||
|
||||
default_options = {
|
||||
'assertions': False,
|
||||
'coverage': False,
|
||||
'fPIC': True,
|
||||
'jemalloc': False,
|
||||
'rocksdb': True,
|
||||
'shared': False,
|
||||
'static': True,
|
||||
'tests': False,
|
||||
'unity': False,
|
||||
'xrpld': False,
|
||||
|
||||
'date/*:header_only': True,
|
||||
'grpc/*:shared': False,
|
||||
'grpc/*:secure': True,
|
||||
'libarchive/*:shared': False,
|
||||
'libarchive/*:with_acl': False,
|
||||
'libarchive/*:with_bzip2': False,
|
||||
'libarchive/*:with_cng': False,
|
||||
'libarchive/*:with_expat': False,
|
||||
'libarchive/*:with_iconv': False,
|
||||
'libarchive/*:with_libxml2': False,
|
||||
'libarchive/*:with_lz4': True,
|
||||
'libarchive/*:with_lzma': False,
|
||||
'libarchive/*:with_lzo': False,
|
||||
'libarchive/*:with_nettle': False,
|
||||
'libarchive/*:with_openssl': False,
|
||||
'libarchive/*:with_pcreposix': False,
|
||||
'libarchive/*:with_xattr': False,
|
||||
'libarchive/*:with_zlib': False,
|
||||
'lz4/*:shared': False,
|
||||
'openssl/*:shared': False,
|
||||
'protobuf/*:shared': False,
|
||||
'protobuf/*:with_zlib': True,
|
||||
'rocksdb/*:enable_sse': False,
|
||||
'rocksdb/*:lite': False,
|
||||
'rocksdb/*:shared': False,
|
||||
'rocksdb/*:use_rtti': True,
|
||||
'rocksdb/*:with_jemalloc': False,
|
||||
'rocksdb/*:with_lz4': True,
|
||||
'rocksdb/*:with_snappy': True,
|
||||
'snappy/*:shared': False,
|
||||
'soci/*:shared': False,
|
||||
'soci/*:with_sqlite3': True,
|
||||
'soci/*:with_boost': True,
|
||||
'xxhash/*:shared': False,
|
||||
"assertions": False,
|
||||
"coverage": False,
|
||||
"fPIC": True,
|
||||
"jemalloc": False,
|
||||
"rocksdb": True,
|
||||
"shared": False,
|
||||
"static": True,
|
||||
"tests": False,
|
||||
"unity": False,
|
||||
"xrpld": False,
|
||||
"date/*:header_only": True,
|
||||
"ed25519/*:shared": False,
|
||||
"grpc/*:shared": False,
|
||||
"grpc/*:secure": True,
|
||||
"grpc/*:codegen": True,
|
||||
"grpc/*:cpp_plugin": True,
|
||||
"grpc/*:csharp_ext": False,
|
||||
"grpc/*:csharp_plugin": False,
|
||||
"grpc/*:node_plugin": False,
|
||||
"grpc/*:objective_c_plugin": False,
|
||||
"grpc/*:php_plugin": False,
|
||||
"grpc/*:python_plugin": False,
|
||||
"grpc/*:ruby_plugin": False,
|
||||
"grpc/*:otel_plugin": False,
|
||||
"libarchive/*:shared": False,
|
||||
"libarchive/*:with_acl": False,
|
||||
"libarchive/*:with_bzip2": False,
|
||||
"libarchive/*:with_cng": False,
|
||||
"libarchive/*:with_expat": False,
|
||||
"libarchive/*:with_iconv": False,
|
||||
"libarchive/*:with_libxml2": False,
|
||||
"libarchive/*:with_lz4": True,
|
||||
"libarchive/*:with_lzma": False,
|
||||
"libarchive/*:with_lzo": False,
|
||||
"libarchive/*:with_nettle": False,
|
||||
"libarchive/*:with_openssl": False,
|
||||
"libarchive/*:with_pcreposix": False,
|
||||
"libarchive/*:with_xattr": False,
|
||||
"libarchive/*:with_zlib": False,
|
||||
"lz4/*:shared": False,
|
||||
"openssl/*:shared": False,
|
||||
"protobuf/*:shared": False,
|
||||
"protobuf/*:with_zlib": True,
|
||||
"rocksdb/*:enable_sse": False,
|
||||
"rocksdb/*:lite": False,
|
||||
"rocksdb/*:shared": False,
|
||||
"rocksdb/*:use_rtti": True,
|
||||
"rocksdb/*:with_jemalloc": False,
|
||||
"rocksdb/*:with_lz4": True,
|
||||
"rocksdb/*:with_snappy": True,
|
||||
"secp256k1/*:shared": False,
|
||||
"snappy/*:shared": False,
|
||||
"soci/*:shared": False,
|
||||
"soci/*:with_sqlite3": True,
|
||||
"soci/*:with_boost": True,
|
||||
"xxhash/*:shared": False,
|
||||
}
|
||||
|
||||
def set_version(self):
|
||||
if self.version is None:
|
||||
path = f'{self.recipe_folder}/src/libxrpl/protocol/BuildInfo.cpp'
|
||||
regex = r'versionString\s?=\s?\"(.*)\"'
|
||||
with open(path, encoding='utf-8') as file:
|
||||
path = f"{self.recipe_folder}/src/libxrpl/protocol/BuildInfo.cpp"
|
||||
regex = r"versionString\s?=\s?\"(.*)\""
|
||||
with open(path, encoding="utf-8") as file:
|
||||
matches = (re.search(regex, line) for line in file)
|
||||
match = next(m for m in matches if m)
|
||||
self.version = match.group(1)
|
||||
|
||||
def configure(self):
|
||||
if self.settings.compiler == 'apple-clang':
|
||||
self.options['boost'].visibility = 'global'
|
||||
if self.settings.compiler == "apple-clang":
|
||||
self.options["boost"].visibility = "global"
|
||||
if self.settings.compiler in ["clang", "gcc"]:
|
||||
self.options["boost"].without_cobalt = True
|
||||
|
||||
def requirements(self):
|
||||
# Conan 2 requires transitive headers to be specified
|
||||
transitive_headers_opt = {'transitive_headers': True} if conan_version.split('.')[0] == '2' else {}
|
||||
self.requires('boost/1.83.0', force=True, **transitive_headers_opt)
|
||||
self.requires('date/3.0.4', **transitive_headers_opt)
|
||||
self.requires('lz4/1.10.0', force=True)
|
||||
self.requires('protobuf/3.21.12', force=True)
|
||||
self.requires('sqlite3/3.49.1', force=True)
|
||||
transitive_headers_opt = (
|
||||
{"transitive_headers": True} if conan_version.split(".")[0] == "2" else {}
|
||||
)
|
||||
self.requires("boost/1.88.0", force=True, **transitive_headers_opt)
|
||||
self.requires("date/3.0.4", **transitive_headers_opt)
|
||||
self.requires("lz4/1.10.0", force=True)
|
||||
self.requires("protobuf/6.32.1", force=True)
|
||||
self.requires("sqlite3/3.49.1", force=True)
|
||||
if self.options.jemalloc:
|
||||
self.requires('jemalloc/5.3.0')
|
||||
self.requires("jemalloc/5.3.0")
|
||||
if self.options.rocksdb:
|
||||
self.requires('rocksdb/10.0.1')
|
||||
self.requires('xxhash/0.8.3', **transitive_headers_opt)
|
||||
self.requires("rocksdb/10.5.1")
|
||||
self.requires("xxhash/0.8.3", **transitive_headers_opt)
|
||||
|
||||
exports_sources = (
|
||||
'CMakeLists.txt',
|
||||
'cfg/*',
|
||||
'cmake/*',
|
||||
'external/*',
|
||||
'include/*',
|
||||
'src/*',
|
||||
"CMakeLists.txt",
|
||||
"cfg/*",
|
||||
"cmake/*",
|
||||
"external/*",
|
||||
"include/*",
|
||||
"src/*",
|
||||
)
|
||||
|
||||
def layout(self):
|
||||
cmake_layout(self)
|
||||
# Fix this setting to follow the default introduced in Conan 1.48
|
||||
# to align with our build instructions.
|
||||
self.folders.generators = 'build/generators'
|
||||
self.folders.generators = "build/generators"
|
||||
|
||||
generators = "CMakeDeps"
|
||||
|
||||
generators = 'CMakeDeps'
|
||||
def generate(self):
|
||||
tc = CMakeToolchain(self)
|
||||
tc.variables['tests'] = self.options.tests
|
||||
tc.variables['assert'] = self.options.assertions
|
||||
tc.variables['coverage'] = self.options.coverage
|
||||
tc.variables['jemalloc'] = self.options.jemalloc
|
||||
tc.variables['rocksdb'] = self.options.rocksdb
|
||||
tc.variables['BUILD_SHARED_LIBS'] = self.options.shared
|
||||
tc.variables['static'] = self.options.static
|
||||
tc.variables['unity'] = self.options.unity
|
||||
tc.variables['xrpld'] = self.options.xrpld
|
||||
tc.variables["tests"] = self.options.tests
|
||||
tc.variables["assert"] = self.options.assertions
|
||||
tc.variables["coverage"] = self.options.coverage
|
||||
tc.variables["jemalloc"] = self.options.jemalloc
|
||||
tc.variables["rocksdb"] = self.options.rocksdb
|
||||
tc.variables["BUILD_SHARED_LIBS"] = self.options.shared
|
||||
tc.variables["static"] = self.options.static
|
||||
tc.variables["unity"] = self.options.unity
|
||||
tc.variables["xrpld"] = self.options.xrpld
|
||||
tc.generate()
|
||||
|
||||
def build(self):
|
||||
@@ -156,39 +179,41 @@ class Xrpl(ConanFile):
|
||||
cmake.install()
|
||||
|
||||
def package_info(self):
|
||||
libxrpl = self.cpp_info.components['libxrpl']
|
||||
libxrpl = self.cpp_info.components["libxrpl"]
|
||||
libxrpl.libs = [
|
||||
'xrpl',
|
||||
'xrpl.libpb',
|
||||
'ed25519',
|
||||
'secp256k1',
|
||||
"xrpl",
|
||||
"xrpl.libpb",
|
||||
]
|
||||
# TODO: Fix the protobufs to include each other relative to
|
||||
# `include/`, not `include/ripple/proto/`.
|
||||
libxrpl.includedirs = ['include', 'include/ripple/proto']
|
||||
# `include/`, not `include/xrpl/proto/`.
|
||||
libxrpl.includedirs = ["include", "include/xrpl/proto"]
|
||||
libxrpl.requires = [
|
||||
'boost::headers',
|
||||
'boost::chrono',
|
||||
'boost::container',
|
||||
'boost::coroutine',
|
||||
'boost::date_time',
|
||||
'boost::filesystem',
|
||||
'boost::json',
|
||||
'boost::program_options',
|
||||
'boost::regex',
|
||||
'boost::system',
|
||||
'boost::thread',
|
||||
'date::date',
|
||||
'grpc::grpc++',
|
||||
'libarchive::libarchive',
|
||||
'lz4::lz4',
|
||||
'nudb::nudb',
|
||||
'openssl::crypto',
|
||||
'protobuf::libprotobuf',
|
||||
'soci::soci',
|
||||
'sqlite3::sqlite',
|
||||
'xxhash::xxhash',
|
||||
'zlib::zlib',
|
||||
"boost::headers",
|
||||
"boost::chrono",
|
||||
"boost::container",
|
||||
"boost::coroutine",
|
||||
"boost::date_time",
|
||||
"boost::filesystem",
|
||||
"boost::json",
|
||||
"boost::program_options",
|
||||
"boost::process",
|
||||
"boost::regex",
|
||||
"boost::system",
|
||||
"boost::thread",
|
||||
"date::date",
|
||||
"ed25519::ed25519",
|
||||
"grpc::grpc++",
|
||||
"libarchive::libarchive",
|
||||
"lz4::lz4",
|
||||
"nudb::nudb",
|
||||
"openssl::crypto",
|
||||
"protobuf::libprotobuf",
|
||||
"soci::soci",
|
||||
"secp256k1::secp256k1",
|
||||
"sqlite3::sqlite",
|
||||
"wasmi::wasmi",
|
||||
"xxhash::xxhash",
|
||||
"zlib::zlib",
|
||||
]
|
||||
if self.options.rocksdb:
|
||||
libxrpl.requires.append('rocksdb::librocksdb')
|
||||
libxrpl.requires.append("rocksdb::librocksdb")
|
||||
|
||||
3
docs/.gitignore
vendored
3
docs/.gitignore
vendored
@@ -1,3 +0,0 @@
|
||||
html
|
||||
temp
|
||||
out.txt
|
||||
@@ -134,7 +134,7 @@ validation messages (_PAV_) received from each validator on the node's UNL. Note
|
||||
that the node will only count the validation messages that agree with its own
|
||||
validations.
|
||||
|
||||
We define the **PAV** as the **P**ercentage of **A**greed **V**alidation
|
||||
We define the **PAV** as the Percentage of Agreed Validation
|
||||
messages received for the last N ledgers, where N = 256 by default.
|
||||
|
||||
When the PAV drops below the **_low-water mark_**, the validator is considered
|
||||
|
||||
@@ -5,8 +5,8 @@ skinparam roundcorner 20
|
||||
skinparam maxmessagesize 160
|
||||
|
||||
actor "Rippled Start" as RS
|
||||
participant "Timer" as T
|
||||
participant "NetworkOPs" as NOP
|
||||
participant "Timer" as T
|
||||
participant "NetworkOPs" as NOP
|
||||
participant "ValidatorList" as VL #lightgreen
|
||||
participant "Consensus" as GC
|
||||
participant "ConsensusAdaptor" as CA #lightgreen
|
||||
@@ -20,7 +20,7 @@ VL -> NOP
|
||||
NOP -> VL: update trusted validators
|
||||
activate VL
|
||||
VL -> VL: re-calculate quorum
|
||||
hnote over VL#lightgreen: ignore negative listed validators\nwhen calculate quorum
|
||||
hnote over VL#lightgreen: ignore negative listed validators\nwhen calculate quorum
|
||||
VL -> NOP
|
||||
deactivate VL
|
||||
NOP -> GC: start round
|
||||
@@ -36,21 +36,21 @@ activate GC
|
||||
end
|
||||
|
||||
alt phase == OPEN
|
||||
alt should close ledger
|
||||
alt should close ledger
|
||||
GC -> GC: phase = ESTABLISH
|
||||
GC -> CA: onClose
|
||||
activate CA
|
||||
alt sqn%256==0
|
||||
alt sqn%256==0
|
||||
CA -[#green]> RM: <font color=green>getValidations
|
||||
CA -[#green]> CA: <font color=green>create UNLModify Tx
|
||||
hnote over CA#lightgreen: use validatations of the last 256 ledgers\nto figure out UNLModify Tx candidates.\nIf any, create UNLModify Tx, and add to TxSet.
|
||||
CA -[#green]> CA: <font color=green>create UNLModify Tx
|
||||
hnote over CA#lightgreen: use validations of the last 256 ledgers\nto figure out UNLModify Tx candidates.\nIf any, create UNLModify Tx, and add to TxSet.
|
||||
end
|
||||
CA -> GC
|
||||
GC -> CA: propose
|
||||
deactivate CA
|
||||
end
|
||||
else phase == ESTABLISH
|
||||
hnote over GC: receive peer postions
|
||||
hnote over GC: receive peer positions
|
||||
GC -> GC : update our position
|
||||
GC -> CA : propose \n(if position changed)
|
||||
GC -> GC : check if have consensus
|
||||
@@ -61,14 +61,14 @@ else phase == ESTABLISH
|
||||
CA -> CA : build LCL
|
||||
hnote over CA #lightgreen: copy negative UNL from parent ledger
|
||||
alt sqn%256==0
|
||||
CA -[#green]> CA: <font color=green>Adjust negative UNL
|
||||
CA -[#green]> CA: <font color=green>Adjust negative UNL
|
||||
CA -[#green]> CA: <font color=green>apply UNLModify Tx
|
||||
end
|
||||
CA -> CA : validate and send validation message
|
||||
activate NOP
|
||||
CA -> NOP : end consensus and\n<b>begin next consensus round
|
||||
deactivate NOP
|
||||
deactivate CA
|
||||
deactivate CA
|
||||
hnote over RM: receive validations
|
||||
end
|
||||
else phase == ACCEPTED
|
||||
@@ -76,4 +76,4 @@ else phase == ACCEPTED
|
||||
end
|
||||
deactivate GC
|
||||
|
||||
@enduml
|
||||
@enduml
|
||||
|
||||
@@ -4,7 +4,7 @@ class TimeoutCounter {
|
||||
#app_ : Application&
|
||||
}
|
||||
|
||||
TimeoutCounter o-- "1" Application
|
||||
TimeoutCounter o-- "1" Application
|
||||
': app_
|
||||
|
||||
Stoppable <.. Application
|
||||
@@ -14,13 +14,13 @@ class Application {
|
||||
-m_inboundLedgers : uptr<InboundLedgers>
|
||||
}
|
||||
|
||||
Application *-- "1" LedgerReplayer
|
||||
Application *-- "1" LedgerReplayer
|
||||
': m_ledgerReplayer
|
||||
Application *-- "1" InboundLedgers
|
||||
Application *-- "1" InboundLedgers
|
||||
': m_inboundLedgers
|
||||
|
||||
Stoppable <.. InboundLedgers
|
||||
Application "1" --o InboundLedgers
|
||||
Application "1" --o InboundLedgers
|
||||
': app_
|
||||
|
||||
class InboundLedgers {
|
||||
@@ -28,9 +28,9 @@ class InboundLedgers {
|
||||
}
|
||||
|
||||
Stoppable <.. LedgerReplayer
|
||||
InboundLedgers "1" --o LedgerReplayer
|
||||
InboundLedgers "1" --o LedgerReplayer
|
||||
': inboundLedgers_
|
||||
Application "1" --o LedgerReplayer
|
||||
Application "1" --o LedgerReplayer
|
||||
': app_
|
||||
|
||||
class LedgerReplayer {
|
||||
@@ -42,17 +42,17 @@ class LedgerReplayer {
|
||||
-skipLists_ : hash_map<u256, wptr<SkipListAcquire>>
|
||||
}
|
||||
|
||||
LedgerReplayer *-- LedgerReplayTask
|
||||
LedgerReplayer *-- LedgerReplayTask
|
||||
': tasks_
|
||||
LedgerReplayer o-- LedgerDeltaAcquire
|
||||
LedgerReplayer o-- LedgerDeltaAcquire
|
||||
': deltas_
|
||||
LedgerReplayer o-- SkipListAcquire
|
||||
LedgerReplayer o-- SkipListAcquire
|
||||
': skipLists_
|
||||
|
||||
TimeoutCounter <.. LedgerReplayTask
|
||||
InboundLedgers "1" --o LedgerReplayTask
|
||||
InboundLedgers "1" --o LedgerReplayTask
|
||||
': inboundLedgers_
|
||||
LedgerReplayer "1" --o LedgerReplayTask
|
||||
LedgerReplayer "1" --o LedgerReplayTask
|
||||
': replayer_
|
||||
|
||||
class LedgerReplayTask {
|
||||
@@ -63,15 +63,15 @@ class LedgerReplayTask {
|
||||
+addDelta(sptr<LedgerDeltaAcquire>)
|
||||
}
|
||||
|
||||
LedgerReplayTask *-- "1" SkipListAcquire
|
||||
LedgerReplayTask *-- "1" SkipListAcquire
|
||||
': skipListAcquirer_
|
||||
LedgerReplayTask *-- LedgerDeltaAcquire
|
||||
LedgerReplayTask *-- LedgerDeltaAcquire
|
||||
': deltas_
|
||||
|
||||
TimeoutCounter <.. SkipListAcquire
|
||||
InboundLedgers "1" --o SkipListAcquire
|
||||
InboundLedgers "1" --o SkipListAcquire
|
||||
': inboundLedgers_
|
||||
LedgerReplayer "1" --o SkipListAcquire
|
||||
LedgerReplayer "1" --o SkipListAcquire
|
||||
': replayer_
|
||||
LedgerReplayTask --o SkipListAcquire : implicit via callback
|
||||
|
||||
@@ -83,9 +83,9 @@ class SkipListAcquire {
|
||||
}
|
||||
|
||||
TimeoutCounter <.. LedgerDeltaAcquire
|
||||
InboundLedgers "1" --o LedgerDeltaAcquire
|
||||
InboundLedgers "1" --o LedgerDeltaAcquire
|
||||
': inboundLedgers_
|
||||
LedgerReplayer "1" --o LedgerDeltaAcquire
|
||||
LedgerReplayer "1" --o LedgerDeltaAcquire
|
||||
': replayer_
|
||||
LedgerReplayTask --o LedgerDeltaAcquire : implicit via callback
|
||||
|
||||
@@ -95,4 +95,4 @@ class LedgerDeltaAcquire {
|
||||
-replayer_ : LedgerReplayer&
|
||||
-dataReadyCallbacks_ : vector<callback>
|
||||
}
|
||||
@enduml
|
||||
@enduml
|
||||
|
||||
@@ -38,7 +38,7 @@ deactivate lr
|
||||
loop
|
||||
lr -> lda : make_shared(ledgerId, ledgerSeq)
|
||||
return delta
|
||||
lr -> lrt : addDelta(delta)
|
||||
lr -> lrt : addDelta(delta)
|
||||
lrt -> lda : addDataCallback(callback)
|
||||
return
|
||||
return
|
||||
@@ -62,7 +62,7 @@ deactivate peer
|
||||
lr -> lda : processData(ledgerHeader, txns)
|
||||
lda -> lda : notify()
|
||||
note over lda: call the callbacks added by\naddDataCallback(callback).
|
||||
lda -> lrt : callback(ledgerId)
|
||||
lda -> lrt : callback(ledgerId)
|
||||
lrt -> lrt : deltaReady(ledgerId)
|
||||
lrt -> lrt : tryAdvance()
|
||||
loop as long as child can be built
|
||||
@@ -82,4 +82,4 @@ deactivate peer
|
||||
deactivate peer
|
||||
|
||||
|
||||
@enduml
|
||||
@enduml
|
||||
|
||||
@@ -54,8 +54,8 @@ There is a `docs` target in the CMake configuration.
|
||||
```
|
||||
mkdir build
|
||||
cd build
|
||||
cmake ..
|
||||
cmake --build . --target docs
|
||||
cmake -Donly_docs=ON ..
|
||||
cmake --build . --target docs --parallel
|
||||
```
|
||||
|
||||
The output will be in `build/docs/html`.
|
||||
|
||||
@@ -189,7 +189,7 @@ validations. It checks this on every call to `timerEntry`.
|
||||
- _Wrong Ledger_ indicates the node is not working on the correct prior ledger
|
||||
and does not have it available. It requests that ledger from the network, but
|
||||
continues to work towards consensus this round while waiting. If it had been
|
||||
_proposing_, it will send a special "bowout" proposal to its peers to indicate
|
||||
_proposing_, it will send a special "bow-out" proposal to its peers to indicate
|
||||
its change in mode for the rest of this round. For the duration of the round,
|
||||
it defers to peer positions for determining the consensus outcome as if it
|
||||
were just _observing_.
|
||||
@@ -515,7 +515,7 @@ are excerpts of the generic consensus implementation and of helper types that wi
|
||||
interact with the concrete implementing class.
|
||||
|
||||
```{.cpp}
|
||||
// Represents a transction under dispute this round
|
||||
// Represents a transaction under dispute this round
|
||||
template <class Tx_t, class NodeID_t> class DisputedTx;
|
||||
|
||||
// Represents how the node participates in Consensus this round
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
---
|
||||
DisableFormat: true
|
||||
SortIncludes: false
|
||||
SortIncludes: Never
|
||||
5
external/README.md
vendored
5
external/README.md
vendored
@@ -1,10 +1,7 @@
|
||||
# External Conan recipes
|
||||
|
||||
The subdirectories in this directory contain copies of external libraries used
|
||||
by rippled.
|
||||
The subdirectories in this directory contain external libraries used by rippled.
|
||||
|
||||
| Folder | Upstream | Description |
|
||||
| :--------------- | :------------------------------------------------------------- | :------------------------------------------------------------------------------------------- |
|
||||
| `antithesis-sdk` | [Project](https://github.com/antithesishq/antithesis-sdk-cpp/) | [Antithesis](https://antithesis.com/docs/using_antithesis/sdk/cpp/overview.html) SDK for C++ |
|
||||
| `ed25519-donna` | [Project](https://github.com/floodyberry/ed25519-donna) | [Ed25519](http://ed25519.cr.yp.to/) digital signatures |
|
||||
| `secp256k1` | [Project](https://github.com/bitcoin-core/secp256k1) | ECDSA digital signatures using the **secp256k1** curve |
|
||||
|
||||
51
external/ed25519-donna/CMakeLists.txt
vendored
51
external/ed25519-donna/CMakeLists.txt
vendored
@@ -1,51 +0,0 @@
|
||||
cmake_minimum_required(VERSION 3.11)
|
||||
|
||||
project(ed25519
|
||||
LANGUAGES C
|
||||
)
|
||||
|
||||
if(PROJECT_NAME STREQUAL CMAKE_PROJECT_NAME)
|
||||
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}/output/$<CONFIG>/lib")
|
||||
endif()
|
||||
|
||||
if(NOT TARGET OpenSSL::SSL)
|
||||
find_package(OpenSSL)
|
||||
endif()
|
||||
|
||||
add_library(ed25519 STATIC
|
||||
ed25519.c
|
||||
)
|
||||
add_library(ed25519::ed25519 ALIAS ed25519)
|
||||
target_link_libraries(ed25519 PUBLIC OpenSSL::SSL)
|
||||
if(NOT MSVC)
|
||||
target_compile_options(ed25519 PRIVATE -Wno-implicit-fallthrough)
|
||||
endif()
|
||||
|
||||
include(GNUInstallDirs)
|
||||
|
||||
#[=========================================================[
|
||||
NOTE for macos:
|
||||
https://github.com/floodyberry/ed25519-donna/issues/29
|
||||
our source for ed25519-donna-portable.h has been
|
||||
patched to workaround this.
|
||||
#]=========================================================]
|
||||
target_include_directories(ed25519 PUBLIC
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>
|
||||
)
|
||||
|
||||
install(
|
||||
TARGETS ed25519
|
||||
EXPORT ${PROJECT_NAME}-exports
|
||||
ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}"
|
||||
)
|
||||
install(
|
||||
EXPORT ${PROJECT_NAME}-exports
|
||||
DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}"
|
||||
FILE ${PROJECT_NAME}-targets.cmake
|
||||
NAMESPACE ${PROJECT_NAME}::
|
||||
)
|
||||
install(
|
||||
FILES ed25519.h
|
||||
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}"
|
||||
)
|
||||
182
external/ed25519-donna/README.md
vendored
182
external/ed25519-donna/README.md
vendored
@@ -1,182 +0,0 @@
|
||||
[ed25519](http://ed25519.cr.yp.to/) is an
|
||||
[Elliptic Curve Digital Signature Algortithm](http://en.wikipedia.org/wiki/Elliptic_Curve_DSA),
|
||||
developed by [Dan Bernstein](http://cr.yp.to/djb.html),
|
||||
[Niels Duif](http://www.nielsduif.nl/),
|
||||
[Tanja Lange](http://hyperelliptic.org/tanja),
|
||||
[Peter Schwabe](http://www.cryptojedi.org/users/peter/),
|
||||
and [Bo-Yin Yang](http://www.iis.sinica.edu.tw/pages/byyang/).
|
||||
|
||||
This project provides performant, portable 32-bit & 64-bit implementations. All implementations are
|
||||
of course constant time in regard to secret data.
|
||||
|
||||
#### Performance
|
||||
|
||||
SSE2 code and benches have not been updated yet. I will do those next.
|
||||
|
||||
Compilers versions are gcc 4.6.3, icc 13.1.1, clang 3.4-1~exp1.
|
||||
|
||||
Batch verification time (in parentheses) is the average time per 1 verification in a batch of 64 signatures. Counts are in thousands of cycles.
|
||||
|
||||
Note that SSE2 performance may be less impressive on AMD & older CPUs with slower SSE ops!
|
||||
|
||||
Visual Studio performance for `ge25519_scalarmult_base_niels` will lag behind a bit until optimized assembler versions of `ge25519_scalarmult_base_choose_niels`
|
||||
are made.
|
||||
|
||||
##### E5200 @ 2.5ghz, march=core2
|
||||
|
||||
<table>
|
||||
<thead><tr><th>Implementation</th><th>Sign</th><th>gcc</th><th>icc</th><th>clang</th><th>Verify</th><th>gcc</th><th>icc</th><th>clang</th></tr></thead>
|
||||
<tbody>
|
||||
<tr><td>ed25519-donna 64bit </td><td></td><td>100k</td><td>110k</td><td>137k</td><td></td><td>327k (144k) </td><td>342k (163k) </td><td>422k (194k) </td></tr>
|
||||
<tr><td>amd64-64-24k </td><td></td><td>102k</td><td> </td><td> </td><td></td><td>355k (158k) </td><td> </td><td> </td></tr>
|
||||
<tr><td>ed25519-donna-sse2 64bit</td><td></td><td>108k</td><td>111k</td><td>116k</td><td></td><td>353k (155k) </td><td>345k (154k) </td><td>360k (161k) </td></tr>
|
||||
<tr><td>amd64-51-32k </td><td></td><td>116k</td><td> </td><td> </td><td></td><td>380k (175k) </td><td> </td><td> </td></tr>
|
||||
<tr><td>ed25519-donna-sse2 32bit</td><td></td><td>147k</td><td>147k</td><td>156k</td><td></td><td>380k (178k) </td><td>381k (173k) </td><td>430k (192k) </td></tr>
|
||||
<tr><td>ed25519-donna 32bit </td><td></td><td>597k</td><td>335k</td><td>380k</td><td></td><td>1693k (720k)</td><td>1052k (453k)</td><td>1141k (493k)</td></tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
##### E3-1270 @ 3.4ghz, march=corei7-avx
|
||||
|
||||
<table>
|
||||
<thead><tr><th>Implementation</th><th>Sign</th><th>gcc</th><th>icc</th><th>clang</th><th>Verify</th><th>gcc</th><th>icc</th><th>clang</th></tr></thead>
|
||||
<tbody>
|
||||
<tr><td>amd64-64-24k </td><td></td><td> 68k</td><td> </td><td> </td><td></td><td>225k (104k) </td><td> </td><td> </td></tr>
|
||||
<tr><td>ed25519-donna 64bit </td><td></td><td> 71k</td><td> 75k</td><td> 90k</td><td></td><td>226k (105k) </td><td>226k (112k) </td><td>277k (125k) </td></tr>
|
||||
<tr><td>amd64-51-32k </td><td></td><td> 72k</td><td> </td><td> </td><td></td><td>218k (107k) </td><td> </td><td> </td></tr>
|
||||
<tr><td>ed25519-donna-sse2 64bit</td><td></td><td> 79k</td><td> 82k</td><td> 92k</td><td></td><td>252k (122k) </td><td>259k (124k) </td><td>282k (131k) </td></tr>
|
||||
<tr><td>ed25519-donna-sse2 32bit</td><td></td><td> 94k</td><td> 95k</td><td>103k</td><td></td><td>296k (146k) </td><td>294k (137k) </td><td>306k (147k) </td></tr>
|
||||
<tr><td>ed25519-donna 32bit </td><td></td><td>525k</td><td>299k</td><td>316k</td><td></td><td>1502k (645k)</td><td>959k (418k) </td><td>954k (416k) </td></tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
#### Compilation
|
||||
|
||||
No configuration is needed **if you are compiling against OpenSSL**.
|
||||
|
||||
##### Hash Options
|
||||
|
||||
If you are not compiling aginst OpenSSL, you will need a hash function.
|
||||
|
||||
To use a simple/**slow** implementation of SHA-512, use `-DED25519_REFHASH` when compiling `ed25519.c`.
|
||||
This should never be used except to verify the code works when OpenSSL is not available.
|
||||
|
||||
To use a custom hash function, use `-DED25519_CUSTOMHASH` when compiling `ed25519.c` and put your
|
||||
custom hash implementation in ed25519-hash-custom.h. The hash must have a 512bit digest and implement
|
||||
|
||||
struct ed25519_hash_context;
|
||||
|
||||
void ed25519_hash_init(ed25519_hash_context *ctx);
|
||||
void ed25519_hash_update(ed25519_hash_context *ctx, const uint8_t *in, size_t inlen);
|
||||
void ed25519_hash_final(ed25519_hash_context *ctx, uint8_t *hash);
|
||||
void ed25519_hash(uint8_t *hash, const uint8_t *in, size_t inlen);
|
||||
|
||||
##### Random Options
|
||||
|
||||
If you are not compiling aginst OpenSSL, you will need a random function for batch verification.
|
||||
|
||||
To use a custom random function, use `-DED25519_CUSTOMRANDOM` when compiling `ed25519.c` and put your
|
||||
custom hash implementation in ed25519-randombytes-custom.h. The random function must implement:
|
||||
|
||||
void ED25519_FN(ed25519_randombytes_unsafe) (void *p, size_t len);
|
||||
|
||||
Use `-DED25519_TEST` when compiling `ed25519.c` to use a deterministically seeded, non-thread safe CSPRNG
|
||||
variant of Bob Jenkins [ISAAC](http://en.wikipedia.org/wiki/ISAAC_%28cipher%29)
|
||||
|
||||
##### Minor options
|
||||
|
||||
Use `-DED25519_INLINE_ASM` to disable the use of custom assembler routines and instead rely on portable C.
|
||||
|
||||
Use `-DED25519_FORCE_32BIT` to force the use of 32 bit routines even when compiling for 64 bit.
|
||||
|
||||
##### 32-bit
|
||||
|
||||
gcc ed25519.c -m32 -O3 -c
|
||||
|
||||
##### 64-bit
|
||||
|
||||
gcc ed25519.c -m64 -O3 -c
|
||||
|
||||
##### SSE2
|
||||
|
||||
gcc ed25519.c -m32 -O3 -c -DED25519_SSE2 -msse2
|
||||
gcc ed25519.c -m64 -O3 -c -DED25519_SSE2
|
||||
|
||||
clang and icc are also supported
|
||||
|
||||
#### Usage
|
||||
|
||||
To use the code, link against `ed25519.o -mbits` and:
|
||||
|
||||
#include "ed25519.h"
|
||||
|
||||
Add `-lssl -lcrypto` when using OpenSSL (Some systems don't need -lcrypto? It might be trial and error).
|
||||
|
||||
To generate a private key, simply generate 32 bytes from a secure
|
||||
cryptographic source:
|
||||
|
||||
ed25519_secret_key sk;
|
||||
randombytes(sk, sizeof(ed25519_secret_key));
|
||||
|
||||
To generate a public key:
|
||||
|
||||
ed25519_public_key pk;
|
||||
ed25519_publickey(sk, pk);
|
||||
|
||||
To sign a message:
|
||||
|
||||
ed25519_signature sig;
|
||||
ed25519_sign(message, message_len, sk, pk, signature);
|
||||
|
||||
To verify a signature:
|
||||
|
||||
int valid = ed25519_sign_open(message, message_len, pk, signature) == 0;
|
||||
|
||||
To batch verify signatures:
|
||||
|
||||
const unsigned char *mp[num] = {message1, message2..}
|
||||
size_t ml[num] = {message_len1, message_len2..}
|
||||
const unsigned char *pkp[num] = {pk1, pk2..}
|
||||
const unsigned char *sigp[num] = {signature1, signature2..}
|
||||
int valid[num]
|
||||
|
||||
/* valid[i] will be set to 1 if the individual signature was valid, 0 otherwise */
|
||||
int all_valid = ed25519_sign_open_batch(mp, ml, pkp, sigp, num, valid) == 0;
|
||||
|
||||
**Note**: Batch verification uses `ed25519_randombytes_unsafe`, implemented in
|
||||
`ed25519-randombytes.h`, to generate random scalars for the verification code.
|
||||
The default implementation now uses OpenSSLs `RAND_bytes`.
|
||||
|
||||
Unlike the [SUPERCOP](http://bench.cr.yp.to/supercop.html) version, signatures are
|
||||
not appended to messages, and there is no need for padding in front of messages.
|
||||
Additionally, the secret key does not contain a copy of the public key, so it is
|
||||
32 bytes instead of 64 bytes, and the public key must be provided to the signing
|
||||
function.
|
||||
|
||||
##### Curve25519
|
||||
|
||||
Curve25519 public keys can be generated thanks to
|
||||
[Adam Langley](http://www.imperialviolet.org/2013/05/10/fastercurve25519.html)
|
||||
leveraging Ed25519's precomputed basepoint scalar multiplication.
|
||||
|
||||
curved25519_key sk, pk;
|
||||
randombytes(sk, sizeof(curved25519_key));
|
||||
curved25519_scalarmult_basepoint(pk, sk);
|
||||
|
||||
Note the name is curved25519, a combination of curve and ed25519, to prevent
|
||||
name clashes. Performance is slightly faster than short message ed25519
|
||||
signing due to both using the same code for the scalar multiply.
|
||||
|
||||
#### Testing
|
||||
|
||||
Fuzzing against reference implemenations is now available. See [fuzz/README](fuzz/README.md).
|
||||
|
||||
Building `ed25519.c` with `-DED25519_TEST` and linking with `test.c` will run basic sanity tests
|
||||
and benchmark each function. `test-batch.c` has been incorporated in to `test.c`.
|
||||
|
||||
`test-internals.c` is standalone and built the same way as `ed25519.c`. It tests the math primitives
|
||||
with extreme values to ensure they function correctly. SSE2 is now supported.
|
||||
|
||||
#### Papers
|
||||
|
||||
[Available on the Ed25519 website](http://ed25519.cr.yp.to/papers.html)
|
||||
579
external/ed25519-donna/curve25519-donna-32bit.h
vendored
579
external/ed25519-donna/curve25519-donna-32bit.h
vendored
@@ -1,579 +0,0 @@
|
||||
/*
|
||||
Public domain by Andrew M. <liquidsun@gmail.com>
|
||||
See: https://github.com/floodyberry/curve25519-donna
|
||||
|
||||
32 bit integer curve25519 implementation
|
||||
*/
|
||||
|
||||
typedef uint32_t bignum25519[10];
|
||||
typedef uint32_t bignum25519align16[12];
|
||||
|
||||
static const uint32_t reduce_mask_25 = (1 << 25) - 1;
|
||||
static const uint32_t reduce_mask_26 = (1 << 26) - 1;
|
||||
|
||||
|
||||
/* out = in */
|
||||
DONNA_INLINE static void
|
||||
curve25519_copy(bignum25519 out, const bignum25519 in) {
|
||||
out[0] = in[0];
|
||||
out[1] = in[1];
|
||||
out[2] = in[2];
|
||||
out[3] = in[3];
|
||||
out[4] = in[4];
|
||||
out[5] = in[5];
|
||||
out[6] = in[6];
|
||||
out[7] = in[7];
|
||||
out[8] = in[8];
|
||||
out[9] = in[9];
|
||||
}
|
||||
|
||||
/* out = a + b */
|
||||
DONNA_INLINE static void
|
||||
curve25519_add(bignum25519 out, const bignum25519 a, const bignum25519 b) {
|
||||
out[0] = a[0] + b[0];
|
||||
out[1] = a[1] + b[1];
|
||||
out[2] = a[2] + b[2];
|
||||
out[3] = a[3] + b[3];
|
||||
out[4] = a[4] + b[4];
|
||||
out[5] = a[5] + b[5];
|
||||
out[6] = a[6] + b[6];
|
||||
out[7] = a[7] + b[7];
|
||||
out[8] = a[8] + b[8];
|
||||
out[9] = a[9] + b[9];
|
||||
}
|
||||
|
||||
DONNA_INLINE static void
|
||||
curve25519_add_after_basic(bignum25519 out, const bignum25519 a, const bignum25519 b) {
|
||||
uint32_t c;
|
||||
out[0] = a[0] + b[0] ; c = (out[0] >> 26); out[0] &= reduce_mask_26;
|
||||
out[1] = a[1] + b[1] + c; c = (out[1] >> 25); out[1] &= reduce_mask_25;
|
||||
out[2] = a[2] + b[2] + c; c = (out[2] >> 26); out[2] &= reduce_mask_26;
|
||||
out[3] = a[3] + b[3] + c; c = (out[3] >> 25); out[3] &= reduce_mask_25;
|
||||
out[4] = a[4] + b[4] + c; c = (out[4] >> 26); out[4] &= reduce_mask_26;
|
||||
out[5] = a[5] + b[5] + c; c = (out[5] >> 25); out[5] &= reduce_mask_25;
|
||||
out[6] = a[6] + b[6] + c; c = (out[6] >> 26); out[6] &= reduce_mask_26;
|
||||
out[7] = a[7] + b[7] + c; c = (out[7] >> 25); out[7] &= reduce_mask_25;
|
||||
out[8] = a[8] + b[8] + c; c = (out[8] >> 26); out[8] &= reduce_mask_26;
|
||||
out[9] = a[9] + b[9] + c; c = (out[9] >> 25); out[9] &= reduce_mask_25;
|
||||
out[0] += 19 * c;
|
||||
}
|
||||
|
||||
DONNA_INLINE static void
|
||||
curve25519_add_reduce(bignum25519 out, const bignum25519 a, const bignum25519 b) {
|
||||
uint32_t c;
|
||||
out[0] = a[0] + b[0] ; c = (out[0] >> 26); out[0] &= reduce_mask_26;
|
||||
out[1] = a[1] + b[1] + c; c = (out[1] >> 25); out[1] &= reduce_mask_25;
|
||||
out[2] = a[2] + b[2] + c; c = (out[2] >> 26); out[2] &= reduce_mask_26;
|
||||
out[3] = a[3] + b[3] + c; c = (out[3] >> 25); out[3] &= reduce_mask_25;
|
||||
out[4] = a[4] + b[4] + c; c = (out[4] >> 26); out[4] &= reduce_mask_26;
|
||||
out[5] = a[5] + b[5] + c; c = (out[5] >> 25); out[5] &= reduce_mask_25;
|
||||
out[6] = a[6] + b[6] + c; c = (out[6] >> 26); out[6] &= reduce_mask_26;
|
||||
out[7] = a[7] + b[7] + c; c = (out[7] >> 25); out[7] &= reduce_mask_25;
|
||||
out[8] = a[8] + b[8] + c; c = (out[8] >> 26); out[8] &= reduce_mask_26;
|
||||
out[9] = a[9] + b[9] + c; c = (out[9] >> 25); out[9] &= reduce_mask_25;
|
||||
out[0] += 19 * c;
|
||||
}
|
||||
|
||||
/* multiples of p */
|
||||
static const uint32_t twoP0 = 0x07ffffda;
|
||||
static const uint32_t twoP13579 = 0x03fffffe;
|
||||
static const uint32_t twoP2468 = 0x07fffffe;
|
||||
static const uint32_t fourP0 = 0x0fffffb4;
|
||||
static const uint32_t fourP13579 = 0x07fffffc;
|
||||
static const uint32_t fourP2468 = 0x0ffffffc;
|
||||
|
||||
/* out = a - b */
|
||||
DONNA_INLINE static void
|
||||
curve25519_sub(bignum25519 out, const bignum25519 a, const bignum25519 b) {
|
||||
uint32_t c;
|
||||
out[0] = twoP0 + a[0] - b[0] ; c = (out[0] >> 26); out[0] &= reduce_mask_26;
|
||||
out[1] = twoP13579 + a[1] - b[1] + c; c = (out[1] >> 25); out[1] &= reduce_mask_25;
|
||||
out[2] = twoP2468 + a[2] - b[2] + c; c = (out[2] >> 26); out[2] &= reduce_mask_26;
|
||||
out[3] = twoP13579 + a[3] - b[3] + c; c = (out[3] >> 25); out[3] &= reduce_mask_25;
|
||||
out[4] = twoP2468 + a[4] - b[4] + c;
|
||||
out[5] = twoP13579 + a[5] - b[5] ;
|
||||
out[6] = twoP2468 + a[6] - b[6] ;
|
||||
out[7] = twoP13579 + a[7] - b[7] ;
|
||||
out[8] = twoP2468 + a[8] - b[8] ;
|
||||
out[9] = twoP13579 + a[9] - b[9] ;
|
||||
}
|
||||
|
||||
/* out = a - b, where a is the result of a basic op (add,sub) */
|
||||
DONNA_INLINE static void
|
||||
curve25519_sub_after_basic(bignum25519 out, const bignum25519 a, const bignum25519 b) {
|
||||
uint32_t c;
|
||||
out[0] = fourP0 + a[0] - b[0] ; c = (out[0] >> 26); out[0] &= reduce_mask_26;
|
||||
out[1] = fourP13579 + a[1] - b[1] + c; c = (out[1] >> 25); out[1] &= reduce_mask_25;
|
||||
out[2] = fourP2468 + a[2] - b[2] + c; c = (out[2] >> 26); out[2] &= reduce_mask_26;
|
||||
out[3] = fourP13579 + a[3] - b[3] + c; c = (out[3] >> 25); out[3] &= reduce_mask_25;
|
||||
out[4] = fourP2468 + a[4] - b[4] + c; c = (out[4] >> 26); out[4] &= reduce_mask_26;
|
||||
out[5] = fourP13579 + a[5] - b[5] + c; c = (out[5] >> 25); out[5] &= reduce_mask_25;
|
||||
out[6] = fourP2468 + a[6] - b[6] + c; c = (out[6] >> 26); out[6] &= reduce_mask_26;
|
||||
out[7] = fourP13579 + a[7] - b[7] + c; c = (out[7] >> 25); out[7] &= reduce_mask_25;
|
||||
out[8] = fourP2468 + a[8] - b[8] + c; c = (out[8] >> 26); out[8] &= reduce_mask_26;
|
||||
out[9] = fourP13579 + a[9] - b[9] + c; c = (out[9] >> 25); out[9] &= reduce_mask_25;
|
||||
out[0] += 19 * c;
|
||||
}
|
||||
|
||||
DONNA_INLINE static void
|
||||
curve25519_sub_reduce(bignum25519 out, const bignum25519 a, const bignum25519 b) {
|
||||
uint32_t c;
|
||||
out[0] = fourP0 + a[0] - b[0] ; c = (out[0] >> 26); out[0] &= reduce_mask_26;
|
||||
out[1] = fourP13579 + a[1] - b[1] + c; c = (out[1] >> 25); out[1] &= reduce_mask_25;
|
||||
out[2] = fourP2468 + a[2] - b[2] + c; c = (out[2] >> 26); out[2] &= reduce_mask_26;
|
||||
out[3] = fourP13579 + a[3] - b[3] + c; c = (out[3] >> 25); out[3] &= reduce_mask_25;
|
||||
out[4] = fourP2468 + a[4] - b[4] + c; c = (out[4] >> 26); out[4] &= reduce_mask_26;
|
||||
out[5] = fourP13579 + a[5] - b[5] + c; c = (out[5] >> 25); out[5] &= reduce_mask_25;
|
||||
out[6] = fourP2468 + a[6] - b[6] + c; c = (out[6] >> 26); out[6] &= reduce_mask_26;
|
||||
out[7] = fourP13579 + a[7] - b[7] + c; c = (out[7] >> 25); out[7] &= reduce_mask_25;
|
||||
out[8] = fourP2468 + a[8] - b[8] + c; c = (out[8] >> 26); out[8] &= reduce_mask_26;
|
||||
out[9] = fourP13579 + a[9] - b[9] + c; c = (out[9] >> 25); out[9] &= reduce_mask_25;
|
||||
out[0] += 19 * c;
|
||||
}
|
||||
|
||||
/* out = -a */
|
||||
DONNA_INLINE static void
|
||||
curve25519_neg(bignum25519 out, const bignum25519 a) {
|
||||
uint32_t c;
|
||||
out[0] = twoP0 - a[0] ; c = (out[0] >> 26); out[0] &= reduce_mask_26;
|
||||
out[1] = twoP13579 - a[1] + c; c = (out[1] >> 25); out[1] &= reduce_mask_25;
|
||||
out[2] = twoP2468 - a[2] + c; c = (out[2] >> 26); out[2] &= reduce_mask_26;
|
||||
out[3] = twoP13579 - a[3] + c; c = (out[3] >> 25); out[3] &= reduce_mask_25;
|
||||
out[4] = twoP2468 - a[4] + c; c = (out[4] >> 26); out[4] &= reduce_mask_26;
|
||||
out[5] = twoP13579 - a[5] + c; c = (out[5] >> 25); out[5] &= reduce_mask_25;
|
||||
out[6] = twoP2468 - a[6] + c; c = (out[6] >> 26); out[6] &= reduce_mask_26;
|
||||
out[7] = twoP13579 - a[7] + c; c = (out[7] >> 25); out[7] &= reduce_mask_25;
|
||||
out[8] = twoP2468 - a[8] + c; c = (out[8] >> 26); out[8] &= reduce_mask_26;
|
||||
out[9] = twoP13579 - a[9] + c; c = (out[9] >> 25); out[9] &= reduce_mask_25;
|
||||
out[0] += 19 * c;
|
||||
}
|
||||
|
||||
/* out = a * b */
|
||||
#define curve25519_mul_noinline curve25519_mul
|
||||
static void
|
||||
curve25519_mul(bignum25519 out, const bignum25519 a, const bignum25519 b) {
|
||||
uint32_t r0,r1,r2,r3,r4,r5,r6,r7,r8,r9;
|
||||
uint32_t s0,s1,s2,s3,s4,s5,s6,s7,s8,s9;
|
||||
uint64_t m0,m1,m2,m3,m4,m5,m6,m7,m8,m9,c;
|
||||
uint32_t p;
|
||||
|
||||
r0 = b[0];
|
||||
r1 = b[1];
|
||||
r2 = b[2];
|
||||
r3 = b[3];
|
||||
r4 = b[4];
|
||||
r5 = b[5];
|
||||
r6 = b[6];
|
||||
r7 = b[7];
|
||||
r8 = b[8];
|
||||
r9 = b[9];
|
||||
|
||||
s0 = a[0];
|
||||
s1 = a[1];
|
||||
s2 = a[2];
|
||||
s3 = a[3];
|
||||
s4 = a[4];
|
||||
s5 = a[5];
|
||||
s6 = a[6];
|
||||
s7 = a[7];
|
||||
s8 = a[8];
|
||||
s9 = a[9];
|
||||
|
||||
m1 = mul32x32_64(r0, s1) + mul32x32_64(r1, s0);
|
||||
m3 = mul32x32_64(r0, s3) + mul32x32_64(r1, s2) + mul32x32_64(r2, s1) + mul32x32_64(r3, s0);
|
||||
m5 = mul32x32_64(r0, s5) + mul32x32_64(r1, s4) + mul32x32_64(r2, s3) + mul32x32_64(r3, s2) + mul32x32_64(r4, s1) + mul32x32_64(r5, s0);
|
||||
m7 = mul32x32_64(r0, s7) + mul32x32_64(r1, s6) + mul32x32_64(r2, s5) + mul32x32_64(r3, s4) + mul32x32_64(r4, s3) + mul32x32_64(r5, s2) + mul32x32_64(r6, s1) + mul32x32_64(r7, s0);
|
||||
m9 = mul32x32_64(r0, s9) + mul32x32_64(r1, s8) + mul32x32_64(r2, s7) + mul32x32_64(r3, s6) + mul32x32_64(r4, s5) + mul32x32_64(r5, s4) + mul32x32_64(r6, s3) + mul32x32_64(r7, s2) + mul32x32_64(r8, s1) + mul32x32_64(r9, s0);
|
||||
|
||||
r1 *= 2;
|
||||
r3 *= 2;
|
||||
r5 *= 2;
|
||||
r7 *= 2;
|
||||
|
||||
m0 = mul32x32_64(r0, s0);
|
||||
m2 = mul32x32_64(r0, s2) + mul32x32_64(r1, s1) + mul32x32_64(r2, s0);
|
||||
m4 = mul32x32_64(r0, s4) + mul32x32_64(r1, s3) + mul32x32_64(r2, s2) + mul32x32_64(r3, s1) + mul32x32_64(r4, s0);
|
||||
m6 = mul32x32_64(r0, s6) + mul32x32_64(r1, s5) + mul32x32_64(r2, s4) + mul32x32_64(r3, s3) + mul32x32_64(r4, s2) + mul32x32_64(r5, s1) + mul32x32_64(r6, s0);
|
||||
m8 = mul32x32_64(r0, s8) + mul32x32_64(r1, s7) + mul32x32_64(r2, s6) + mul32x32_64(r3, s5) + mul32x32_64(r4, s4) + mul32x32_64(r5, s3) + mul32x32_64(r6, s2) + mul32x32_64(r7, s1) + mul32x32_64(r8, s0);
|
||||
|
||||
r1 *= 19;
|
||||
r2 *= 19;
|
||||
r3 = (r3 / 2) * 19;
|
||||
r4 *= 19;
|
||||
r5 = (r5 / 2) * 19;
|
||||
r6 *= 19;
|
||||
r7 = (r7 / 2) * 19;
|
||||
r8 *= 19;
|
||||
r9 *= 19;
|
||||
|
||||
m1 += (mul32x32_64(r9, s2) + mul32x32_64(r8, s3) + mul32x32_64(r7, s4) + mul32x32_64(r6, s5) + mul32x32_64(r5, s6) + mul32x32_64(r4, s7) + mul32x32_64(r3, s8) + mul32x32_64(r2, s9));
|
||||
m3 += (mul32x32_64(r9, s4) + mul32x32_64(r8, s5) + mul32x32_64(r7, s6) + mul32x32_64(r6, s7) + mul32x32_64(r5, s8) + mul32x32_64(r4, s9));
|
||||
m5 += (mul32x32_64(r9, s6) + mul32x32_64(r8, s7) + mul32x32_64(r7, s8) + mul32x32_64(r6, s9));
|
||||
m7 += (mul32x32_64(r9, s8) + mul32x32_64(r8, s9));
|
||||
|
||||
r3 *= 2;
|
||||
r5 *= 2;
|
||||
r7 *= 2;
|
||||
r9 *= 2;
|
||||
|
||||
m0 += (mul32x32_64(r9, s1) + mul32x32_64(r8, s2) + mul32x32_64(r7, s3) + mul32x32_64(r6, s4) + mul32x32_64(r5, s5) + mul32x32_64(r4, s6) + mul32x32_64(r3, s7) + mul32x32_64(r2, s8) + mul32x32_64(r1, s9));
|
||||
m2 += (mul32x32_64(r9, s3) + mul32x32_64(r8, s4) + mul32x32_64(r7, s5) + mul32x32_64(r6, s6) + mul32x32_64(r5, s7) + mul32x32_64(r4, s8) + mul32x32_64(r3, s9));
|
||||
m4 += (mul32x32_64(r9, s5) + mul32x32_64(r8, s6) + mul32x32_64(r7, s7) + mul32x32_64(r6, s8) + mul32x32_64(r5, s9));
|
||||
m6 += (mul32x32_64(r9, s7) + mul32x32_64(r8, s8) + mul32x32_64(r7, s9));
|
||||
m8 += (mul32x32_64(r9, s9));
|
||||
|
||||
r0 = (uint32_t)m0 & reduce_mask_26; c = (m0 >> 26);
|
||||
m1 += c; r1 = (uint32_t)m1 & reduce_mask_25; c = (m1 >> 25);
|
||||
m2 += c; r2 = (uint32_t)m2 & reduce_mask_26; c = (m2 >> 26);
|
||||
m3 += c; r3 = (uint32_t)m3 & reduce_mask_25; c = (m3 >> 25);
|
||||
m4 += c; r4 = (uint32_t)m4 & reduce_mask_26; c = (m4 >> 26);
|
||||
m5 += c; r5 = (uint32_t)m5 & reduce_mask_25; c = (m5 >> 25);
|
||||
m6 += c; r6 = (uint32_t)m6 & reduce_mask_26; c = (m6 >> 26);
|
||||
m7 += c; r7 = (uint32_t)m7 & reduce_mask_25; c = (m7 >> 25);
|
||||
m8 += c; r8 = (uint32_t)m8 & reduce_mask_26; c = (m8 >> 26);
|
||||
m9 += c; r9 = (uint32_t)m9 & reduce_mask_25; p = (uint32_t)(m9 >> 25);
|
||||
m0 = r0 + mul32x32_64(p,19); r0 = (uint32_t)m0 & reduce_mask_26; p = (uint32_t)(m0 >> 26);
|
||||
r1 += p;
|
||||
|
||||
out[0] = r0;
|
||||
out[1] = r1;
|
||||
out[2] = r2;
|
||||
out[3] = r3;
|
||||
out[4] = r4;
|
||||
out[5] = r5;
|
||||
out[6] = r6;
|
||||
out[7] = r7;
|
||||
out[8] = r8;
|
||||
out[9] = r9;
|
||||
}
|
||||
|
||||
/* out = in*in */
|
||||
static void
|
||||
curve25519_square(bignum25519 out, const bignum25519 in) {
|
||||
uint32_t r0,r1,r2,r3,r4,r5,r6,r7,r8,r9;
|
||||
uint32_t d6,d7,d8,d9;
|
||||
uint64_t m0,m1,m2,m3,m4,m5,m6,m7,m8,m9,c;
|
||||
uint32_t p;
|
||||
|
||||
r0 = in[0];
|
||||
r1 = in[1];
|
||||
r2 = in[2];
|
||||
r3 = in[3];
|
||||
r4 = in[4];
|
||||
r5 = in[5];
|
||||
r6 = in[6];
|
||||
r7 = in[7];
|
||||
r8 = in[8];
|
||||
r9 = in[9];
|
||||
|
||||
m0 = mul32x32_64(r0, r0);
|
||||
r0 *= 2;
|
||||
m1 = mul32x32_64(r0, r1);
|
||||
m2 = mul32x32_64(r0, r2) + mul32x32_64(r1, r1 * 2);
|
||||
r1 *= 2;
|
||||
m3 = mul32x32_64(r0, r3) + mul32x32_64(r1, r2 );
|
||||
m4 = mul32x32_64(r0, r4) + mul32x32_64(r1, r3 * 2) + mul32x32_64(r2, r2);
|
||||
r2 *= 2;
|
||||
m5 = mul32x32_64(r0, r5) + mul32x32_64(r1, r4 ) + mul32x32_64(r2, r3);
|
||||
m6 = mul32x32_64(r0, r6) + mul32x32_64(r1, r5 * 2) + mul32x32_64(r2, r4) + mul32x32_64(r3, r3 * 2);
|
||||
r3 *= 2;
|
||||
m7 = mul32x32_64(r0, r7) + mul32x32_64(r1, r6 ) + mul32x32_64(r2, r5) + mul32x32_64(r3, r4 );
|
||||
m8 = mul32x32_64(r0, r8) + mul32x32_64(r1, r7 * 2) + mul32x32_64(r2, r6) + mul32x32_64(r3, r5 * 2) + mul32x32_64(r4, r4 );
|
||||
m9 = mul32x32_64(r0, r9) + mul32x32_64(r1, r8 ) + mul32x32_64(r2, r7) + mul32x32_64(r3, r6 ) + mul32x32_64(r4, r5 * 2);
|
||||
|
||||
d6 = r6 * 19;
|
||||
d7 = r7 * 2 * 19;
|
||||
d8 = r8 * 19;
|
||||
d9 = r9 * 2 * 19;
|
||||
|
||||
m0 += (mul32x32_64(d9, r1 ) + mul32x32_64(d8, r2 ) + mul32x32_64(d7, r3 ) + mul32x32_64(d6, r4 * 2) + mul32x32_64(r5, r5 * 2 * 19));
|
||||
m1 += (mul32x32_64(d9, r2 / 2) + mul32x32_64(d8, r3 ) + mul32x32_64(d7, r4 ) + mul32x32_64(d6, r5 * 2));
|
||||
m2 += (mul32x32_64(d9, r3 ) + mul32x32_64(d8, r4 * 2) + mul32x32_64(d7, r5 * 2) + mul32x32_64(d6, r6 ));
|
||||
m3 += (mul32x32_64(d9, r4 ) + mul32x32_64(d8, r5 * 2) + mul32x32_64(d7, r6 ));
|
||||
m4 += (mul32x32_64(d9, r5 * 2) + mul32x32_64(d8, r6 * 2) + mul32x32_64(d7, r7 ));
|
||||
m5 += (mul32x32_64(d9, r6 ) + mul32x32_64(d8, r7 * 2));
|
||||
m6 += (mul32x32_64(d9, r7 * 2) + mul32x32_64(d8, r8 ));
|
||||
m7 += (mul32x32_64(d9, r8 ));
|
||||
m8 += (mul32x32_64(d9, r9 ));
|
||||
|
||||
r0 = (uint32_t)m0 & reduce_mask_26; c = (m0 >> 26);
|
||||
m1 += c; r1 = (uint32_t)m1 & reduce_mask_25; c = (m1 >> 25);
|
||||
m2 += c; r2 = (uint32_t)m2 & reduce_mask_26; c = (m2 >> 26);
|
||||
m3 += c; r3 = (uint32_t)m3 & reduce_mask_25; c = (m3 >> 25);
|
||||
m4 += c; r4 = (uint32_t)m4 & reduce_mask_26; c = (m4 >> 26);
|
||||
m5 += c; r5 = (uint32_t)m5 & reduce_mask_25; c = (m5 >> 25);
|
||||
m6 += c; r6 = (uint32_t)m6 & reduce_mask_26; c = (m6 >> 26);
|
||||
m7 += c; r7 = (uint32_t)m7 & reduce_mask_25; c = (m7 >> 25);
|
||||
m8 += c; r8 = (uint32_t)m8 & reduce_mask_26; c = (m8 >> 26);
|
||||
m9 += c; r9 = (uint32_t)m9 & reduce_mask_25; p = (uint32_t)(m9 >> 25);
|
||||
m0 = r0 + mul32x32_64(p,19); r0 = (uint32_t)m0 & reduce_mask_26; p = (uint32_t)(m0 >> 26);
|
||||
r1 += p;
|
||||
|
||||
out[0] = r0;
|
||||
out[1] = r1;
|
||||
out[2] = r2;
|
||||
out[3] = r3;
|
||||
out[4] = r4;
|
||||
out[5] = r5;
|
||||
out[6] = r6;
|
||||
out[7] = r7;
|
||||
out[8] = r8;
|
||||
out[9] = r9;
|
||||
}
|
||||
|
||||
|
||||
/* out = in ^ (2 * count) */
|
||||
static void
|
||||
curve25519_square_times(bignum25519 out, const bignum25519 in, int count) {
|
||||
uint32_t r0,r1,r2,r3,r4,r5,r6,r7,r8,r9;
|
||||
uint32_t d6,d7,d8,d9;
|
||||
uint64_t m0,m1,m2,m3,m4,m5,m6,m7,m8,m9,c;
|
||||
uint32_t p;
|
||||
|
||||
r0 = in[0];
|
||||
r1 = in[1];
|
||||
r2 = in[2];
|
||||
r3 = in[3];
|
||||
r4 = in[4];
|
||||
r5 = in[5];
|
||||
r6 = in[6];
|
||||
r7 = in[7];
|
||||
r8 = in[8];
|
||||
r9 = in[9];
|
||||
|
||||
do {
|
||||
m0 = mul32x32_64(r0, r0);
|
||||
r0 *= 2;
|
||||
m1 = mul32x32_64(r0, r1);
|
||||
m2 = mul32x32_64(r0, r2) + mul32x32_64(r1, r1 * 2);
|
||||
r1 *= 2;
|
||||
m3 = mul32x32_64(r0, r3) + mul32x32_64(r1, r2 );
|
||||
m4 = mul32x32_64(r0, r4) + mul32x32_64(r1, r3 * 2) + mul32x32_64(r2, r2);
|
||||
r2 *= 2;
|
||||
m5 = mul32x32_64(r0, r5) + mul32x32_64(r1, r4 ) + mul32x32_64(r2, r3);
|
||||
m6 = mul32x32_64(r0, r6) + mul32x32_64(r1, r5 * 2) + mul32x32_64(r2, r4) + mul32x32_64(r3, r3 * 2);
|
||||
r3 *= 2;
|
||||
m7 = mul32x32_64(r0, r7) + mul32x32_64(r1, r6 ) + mul32x32_64(r2, r5) + mul32x32_64(r3, r4 );
|
||||
m8 = mul32x32_64(r0, r8) + mul32x32_64(r1, r7 * 2) + mul32x32_64(r2, r6) + mul32x32_64(r3, r5 * 2) + mul32x32_64(r4, r4 );
|
||||
m9 = mul32x32_64(r0, r9) + mul32x32_64(r1, r8 ) + mul32x32_64(r2, r7) + mul32x32_64(r3, r6 ) + mul32x32_64(r4, r5 * 2);
|
||||
|
||||
d6 = r6 * 19;
|
||||
d7 = r7 * 2 * 19;
|
||||
d8 = r8 * 19;
|
||||
d9 = r9 * 2 * 19;
|
||||
|
||||
m0 += (mul32x32_64(d9, r1 ) + mul32x32_64(d8, r2 ) + mul32x32_64(d7, r3 ) + mul32x32_64(d6, r4 * 2) + mul32x32_64(r5, r5 * 2 * 19));
|
||||
m1 += (mul32x32_64(d9, r2 / 2) + mul32x32_64(d8, r3 ) + mul32x32_64(d7, r4 ) + mul32x32_64(d6, r5 * 2));
|
||||
m2 += (mul32x32_64(d9, r3 ) + mul32x32_64(d8, r4 * 2) + mul32x32_64(d7, r5 * 2) + mul32x32_64(d6, r6 ));
|
||||
m3 += (mul32x32_64(d9, r4 ) + mul32x32_64(d8, r5 * 2) + mul32x32_64(d7, r6 ));
|
||||
m4 += (mul32x32_64(d9, r5 * 2) + mul32x32_64(d8, r6 * 2) + mul32x32_64(d7, r7 ));
|
||||
m5 += (mul32x32_64(d9, r6 ) + mul32x32_64(d8, r7 * 2));
|
||||
m6 += (mul32x32_64(d9, r7 * 2) + mul32x32_64(d8, r8 ));
|
||||
m7 += (mul32x32_64(d9, r8 ));
|
||||
m8 += (mul32x32_64(d9, r9 ));
|
||||
|
||||
r0 = (uint32_t)m0 & reduce_mask_26; c = (m0 >> 26);
|
||||
m1 += c; r1 = (uint32_t)m1 & reduce_mask_25; c = (m1 >> 25);
|
||||
m2 += c; r2 = (uint32_t)m2 & reduce_mask_26; c = (m2 >> 26);
|
||||
m3 += c; r3 = (uint32_t)m3 & reduce_mask_25; c = (m3 >> 25);
|
||||
m4 += c; r4 = (uint32_t)m4 & reduce_mask_26; c = (m4 >> 26);
|
||||
m5 += c; r5 = (uint32_t)m5 & reduce_mask_25; c = (m5 >> 25);
|
||||
m6 += c; r6 = (uint32_t)m6 & reduce_mask_26; c = (m6 >> 26);
|
||||
m7 += c; r7 = (uint32_t)m7 & reduce_mask_25; c = (m7 >> 25);
|
||||
m8 += c; r8 = (uint32_t)m8 & reduce_mask_26; c = (m8 >> 26);
|
||||
m9 += c; r9 = (uint32_t)m9 & reduce_mask_25; p = (uint32_t)(m9 >> 25);
|
||||
m0 = r0 + mul32x32_64(p,19); r0 = (uint32_t)m0 & reduce_mask_26; p = (uint32_t)(m0 >> 26);
|
||||
r1 += p;
|
||||
} while (--count);
|
||||
|
||||
out[0] = r0;
|
||||
out[1] = r1;
|
||||
out[2] = r2;
|
||||
out[3] = r3;
|
||||
out[4] = r4;
|
||||
out[5] = r5;
|
||||
out[6] = r6;
|
||||
out[7] = r7;
|
||||
out[8] = r8;
|
||||
out[9] = r9;
|
||||
}
|
||||
|
||||
/* Take a little-endian, 32-byte number and expand it into polynomial form */
|
||||
static void
|
||||
curve25519_expand(bignum25519 out, const unsigned char in[32]) {
|
||||
static const union { uint8_t b[2]; uint16_t s; } endian_check = {{1,0}};
|
||||
uint32_t x0,x1,x2,x3,x4,x5,x6,x7;
|
||||
|
||||
if (endian_check.s == 1) {
|
||||
x0 = *(uint32_t *)(in + 0);
|
||||
x1 = *(uint32_t *)(in + 4);
|
||||
x2 = *(uint32_t *)(in + 8);
|
||||
x3 = *(uint32_t *)(in + 12);
|
||||
x4 = *(uint32_t *)(in + 16);
|
||||
x5 = *(uint32_t *)(in + 20);
|
||||
x6 = *(uint32_t *)(in + 24);
|
||||
x7 = *(uint32_t *)(in + 28);
|
||||
} else {
|
||||
#define F(s) \
|
||||
((((uint32_t)in[s + 0]) ) | \
|
||||
(((uint32_t)in[s + 1]) << 8) | \
|
||||
(((uint32_t)in[s + 2]) << 16) | \
|
||||
(((uint32_t)in[s + 3]) << 24))
|
||||
x0 = F(0);
|
||||
x1 = F(4);
|
||||
x2 = F(8);
|
||||
x3 = F(12);
|
||||
x4 = F(16);
|
||||
x5 = F(20);
|
||||
x6 = F(24);
|
||||
x7 = F(28);
|
||||
#undef F
|
||||
}
|
||||
|
||||
out[0] = ( x0 ) & 0x3ffffff;
|
||||
out[1] = ((((uint64_t)x1 << 32) | x0) >> 26) & 0x1ffffff;
|
||||
out[2] = ((((uint64_t)x2 << 32) | x1) >> 19) & 0x3ffffff;
|
||||
out[3] = ((((uint64_t)x3 << 32) | x2) >> 13) & 0x1ffffff;
|
||||
out[4] = (( x3) >> 6) & 0x3ffffff;
|
||||
out[5] = ( x4 ) & 0x1ffffff;
|
||||
out[6] = ((((uint64_t)x5 << 32) | x4) >> 25) & 0x3ffffff;
|
||||
out[7] = ((((uint64_t)x6 << 32) | x5) >> 19) & 0x1ffffff;
|
||||
out[8] = ((((uint64_t)x7 << 32) | x6) >> 12) & 0x3ffffff;
|
||||
out[9] = (( x7) >> 6) & 0x1ffffff;
|
||||
}
|
||||
|
||||
/* Take a fully reduced polynomial form number and contract it into a
|
||||
* little-endian, 32-byte array
|
||||
*/
|
||||
static void
|
||||
curve25519_contract(unsigned char out[32], const bignum25519 in) {
|
||||
bignum25519 f;
|
||||
curve25519_copy(f, in);
|
||||
|
||||
#define carry_pass() \
|
||||
f[1] += f[0] >> 26; f[0] &= reduce_mask_26; \
|
||||
f[2] += f[1] >> 25; f[1] &= reduce_mask_25; \
|
||||
f[3] += f[2] >> 26; f[2] &= reduce_mask_26; \
|
||||
f[4] += f[3] >> 25; f[3] &= reduce_mask_25; \
|
||||
f[5] += f[4] >> 26; f[4] &= reduce_mask_26; \
|
||||
f[6] += f[5] >> 25; f[5] &= reduce_mask_25; \
|
||||
f[7] += f[6] >> 26; f[6] &= reduce_mask_26; \
|
||||
f[8] += f[7] >> 25; f[7] &= reduce_mask_25; \
|
||||
f[9] += f[8] >> 26; f[8] &= reduce_mask_26;
|
||||
|
||||
#define carry_pass_full() \
|
||||
carry_pass() \
|
||||
f[0] += 19 * (f[9] >> 25); f[9] &= reduce_mask_25;
|
||||
|
||||
#define carry_pass_final() \
|
||||
carry_pass() \
|
||||
f[9] &= reduce_mask_25;
|
||||
|
||||
carry_pass_full()
|
||||
carry_pass_full()
|
||||
|
||||
/* now t is between 0 and 2^255-1, properly carried. */
|
||||
/* case 1: between 0 and 2^255-20. case 2: between 2^255-19 and 2^255-1. */
|
||||
f[0] += 19;
|
||||
carry_pass_full()
|
||||
|
||||
/* now between 19 and 2^255-1 in both cases, and offset by 19. */
|
||||
f[0] += (reduce_mask_26 + 1) - 19;
|
||||
f[1] += (reduce_mask_25 + 1) - 1;
|
||||
f[2] += (reduce_mask_26 + 1) - 1;
|
||||
f[3] += (reduce_mask_25 + 1) - 1;
|
||||
f[4] += (reduce_mask_26 + 1) - 1;
|
||||
f[5] += (reduce_mask_25 + 1) - 1;
|
||||
f[6] += (reduce_mask_26 + 1) - 1;
|
||||
f[7] += (reduce_mask_25 + 1) - 1;
|
||||
f[8] += (reduce_mask_26 + 1) - 1;
|
||||
f[9] += (reduce_mask_25 + 1) - 1;
|
||||
|
||||
/* now between 2^255 and 2^256-20, and offset by 2^255. */
|
||||
carry_pass_final()
|
||||
|
||||
#undef carry_pass
|
||||
#undef carry_full
|
||||
#undef carry_final
|
||||
|
||||
f[1] <<= 2;
|
||||
f[2] <<= 3;
|
||||
f[3] <<= 5;
|
||||
f[4] <<= 6;
|
||||
f[6] <<= 1;
|
||||
f[7] <<= 3;
|
||||
f[8] <<= 4;
|
||||
f[9] <<= 6;
|
||||
|
||||
#define F(i, s) \
|
||||
out[s+0] |= (unsigned char )(f[i] & 0xff); \
|
||||
out[s+1] = (unsigned char )((f[i] >> 8) & 0xff); \
|
||||
out[s+2] = (unsigned char )((f[i] >> 16) & 0xff); \
|
||||
out[s+3] = (unsigned char )((f[i] >> 24) & 0xff);
|
||||
|
||||
out[0] = 0;
|
||||
out[16] = 0;
|
||||
F(0,0);
|
||||
F(1,3);
|
||||
F(2,6);
|
||||
F(3,9);
|
||||
F(4,12);
|
||||
F(5,16);
|
||||
F(6,19);
|
||||
F(7,22);
|
||||
F(8,25);
|
||||
F(9,28);
|
||||
#undef F
|
||||
}
|
||||
|
||||
|
||||
/* out = (flag) ? in : out */
|
||||
DONNA_INLINE static void
|
||||
curve25519_move_conditional_bytes(uint8_t out[96], const uint8_t in[96], uint32_t flag) {
|
||||
const uint32_t nb = flag - 1, b = ~nb;
|
||||
const uint32_t *inl = (const uint32_t *)in;
|
||||
uint32_t *outl = (uint32_t *)out;
|
||||
outl[0] = (outl[0] & nb) | (inl[0] & b);
|
||||
outl[1] = (outl[1] & nb) | (inl[1] & b);
|
||||
outl[2] = (outl[2] & nb) | (inl[2] & b);
|
||||
outl[3] = (outl[3] & nb) | (inl[3] & b);
|
||||
outl[4] = (outl[4] & nb) | (inl[4] & b);
|
||||
outl[5] = (outl[5] & nb) | (inl[5] & b);
|
||||
outl[6] = (outl[6] & nb) | (inl[6] & b);
|
||||
outl[7] = (outl[7] & nb) | (inl[7] & b);
|
||||
outl[8] = (outl[8] & nb) | (inl[8] & b);
|
||||
outl[9] = (outl[9] & nb) | (inl[9] & b);
|
||||
outl[10] = (outl[10] & nb) | (inl[10] & b);
|
||||
outl[11] = (outl[11] & nb) | (inl[11] & b);
|
||||
outl[12] = (outl[12] & nb) | (inl[12] & b);
|
||||
outl[13] = (outl[13] & nb) | (inl[13] & b);
|
||||
outl[14] = (outl[14] & nb) | (inl[14] & b);
|
||||
outl[15] = (outl[15] & nb) | (inl[15] & b);
|
||||
outl[16] = (outl[16] & nb) | (inl[16] & b);
|
||||
outl[17] = (outl[17] & nb) | (inl[17] & b);
|
||||
outl[18] = (outl[18] & nb) | (inl[18] & b);
|
||||
outl[19] = (outl[19] & nb) | (inl[19] & b);
|
||||
outl[20] = (outl[20] & nb) | (inl[20] & b);
|
||||
outl[21] = (outl[21] & nb) | (inl[21] & b);
|
||||
outl[22] = (outl[22] & nb) | (inl[22] & b);
|
||||
outl[23] = (outl[23] & nb) | (inl[23] & b);
|
||||
|
||||
}
|
||||
|
||||
/* if (iswap) swap(a, b) */
|
||||
DONNA_INLINE static void
|
||||
curve25519_swap_conditional(bignum25519 a, bignum25519 b, uint32_t iswap) {
|
||||
const uint32_t swap = (uint32_t)(-(int32_t)iswap);
|
||||
uint32_t x0,x1,x2,x3,x4,x5,x6,x7,x8,x9;
|
||||
|
||||
x0 = swap & (a[0] ^ b[0]); a[0] ^= x0; b[0] ^= x0;
|
||||
x1 = swap & (a[1] ^ b[1]); a[1] ^= x1; b[1] ^= x1;
|
||||
x2 = swap & (a[2] ^ b[2]); a[2] ^= x2; b[2] ^= x2;
|
||||
x3 = swap & (a[3] ^ b[3]); a[3] ^= x3; b[3] ^= x3;
|
||||
x4 = swap & (a[4] ^ b[4]); a[4] ^= x4; b[4] ^= x4;
|
||||
x5 = swap & (a[5] ^ b[5]); a[5] ^= x5; b[5] ^= x5;
|
||||
x6 = swap & (a[6] ^ b[6]); a[6] ^= x6; b[6] ^= x6;
|
||||
x7 = swap & (a[7] ^ b[7]); a[7] ^= x7; b[7] ^= x7;
|
||||
x8 = swap & (a[8] ^ b[8]); a[8] ^= x8; b[8] ^= x8;
|
||||
x9 = swap & (a[9] ^ b[9]); a[9] ^= x9; b[9] ^= x9;
|
||||
}
|
||||
413
external/ed25519-donna/curve25519-donna-64bit.h
vendored
413
external/ed25519-donna/curve25519-donna-64bit.h
vendored
@@ -1,413 +0,0 @@
|
||||
/*
|
||||
Public domain by Adam Langley <agl@imperialviolet.org> &
|
||||
Andrew M. <liquidsun@gmail.com>
|
||||
See: https://github.com/floodyberry/curve25519-donna
|
||||
|
||||
64bit integer curve25519 implementation
|
||||
*/
|
||||
|
||||
typedef uint64_t bignum25519[5];
|
||||
|
||||
static const uint64_t reduce_mask_40 = ((uint64_t)1 << 40) - 1;
|
||||
static const uint64_t reduce_mask_51 = ((uint64_t)1 << 51) - 1;
|
||||
static const uint64_t reduce_mask_56 = ((uint64_t)1 << 56) - 1;
|
||||
|
||||
/* out = in */
|
||||
DONNA_INLINE static void
|
||||
curve25519_copy(bignum25519 out, const bignum25519 in) {
|
||||
out[0] = in[0];
|
||||
out[1] = in[1];
|
||||
out[2] = in[2];
|
||||
out[3] = in[3];
|
||||
out[4] = in[4];
|
||||
}
|
||||
|
||||
/* out = a + b */
|
||||
DONNA_INLINE static void
|
||||
curve25519_add(bignum25519 out, const bignum25519 a, const bignum25519 b) {
|
||||
out[0] = a[0] + b[0];
|
||||
out[1] = a[1] + b[1];
|
||||
out[2] = a[2] + b[2];
|
||||
out[3] = a[3] + b[3];
|
||||
out[4] = a[4] + b[4];
|
||||
}
|
||||
|
||||
/* out = a + b, where a and/or b are the result of a basic op (add,sub) */
|
||||
DONNA_INLINE static void
|
||||
curve25519_add_after_basic(bignum25519 out, const bignum25519 a, const bignum25519 b) {
|
||||
out[0] = a[0] + b[0];
|
||||
out[1] = a[1] + b[1];
|
||||
out[2] = a[2] + b[2];
|
||||
out[3] = a[3] + b[3];
|
||||
out[4] = a[4] + b[4];
|
||||
}
|
||||
|
||||
DONNA_INLINE static void
|
||||
curve25519_add_reduce(bignum25519 out, const bignum25519 a, const bignum25519 b) {
|
||||
uint64_t c;
|
||||
out[0] = a[0] + b[0] ; c = (out[0] >> 51); out[0] &= reduce_mask_51;
|
||||
out[1] = a[1] + b[1] + c; c = (out[1] >> 51); out[1] &= reduce_mask_51;
|
||||
out[2] = a[2] + b[2] + c; c = (out[2] >> 51); out[2] &= reduce_mask_51;
|
||||
out[3] = a[3] + b[3] + c; c = (out[3] >> 51); out[3] &= reduce_mask_51;
|
||||
out[4] = a[4] + b[4] + c; c = (out[4] >> 51); out[4] &= reduce_mask_51;
|
||||
out[0] += c * 19;
|
||||
}
|
||||
|
||||
/* multiples of p */
|
||||
static const uint64_t twoP0 = 0x0fffffffffffda;
|
||||
static const uint64_t twoP1234 = 0x0ffffffffffffe;
|
||||
static const uint64_t fourP0 = 0x1fffffffffffb4;
|
||||
static const uint64_t fourP1234 = 0x1ffffffffffffc;
|
||||
|
||||
/* out = a - b */
|
||||
DONNA_INLINE static void
|
||||
curve25519_sub(bignum25519 out, const bignum25519 a, const bignum25519 b) {
|
||||
out[0] = a[0] + twoP0 - b[0];
|
||||
out[1] = a[1] + twoP1234 - b[1];
|
||||
out[2] = a[2] + twoP1234 - b[2];
|
||||
out[3] = a[3] + twoP1234 - b[3];
|
||||
out[4] = a[4] + twoP1234 - b[4];
|
||||
}
|
||||
|
||||
/* out = a - b, where a and/or b are the result of a basic op (add,sub) */
|
||||
DONNA_INLINE static void
|
||||
curve25519_sub_after_basic(bignum25519 out, const bignum25519 a, const bignum25519 b) {
|
||||
out[0] = a[0] + fourP0 - b[0];
|
||||
out[1] = a[1] + fourP1234 - b[1];
|
||||
out[2] = a[2] + fourP1234 - b[2];
|
||||
out[3] = a[3] + fourP1234 - b[3];
|
||||
out[4] = a[4] + fourP1234 - b[4];
|
||||
}
|
||||
|
||||
DONNA_INLINE static void
|
||||
curve25519_sub_reduce(bignum25519 out, const bignum25519 a, const bignum25519 b) {
|
||||
uint64_t c;
|
||||
out[0] = a[0] + fourP0 - b[0] ; c = (out[0] >> 51); out[0] &= reduce_mask_51;
|
||||
out[1] = a[1] + fourP1234 - b[1] + c; c = (out[1] >> 51); out[1] &= reduce_mask_51;
|
||||
out[2] = a[2] + fourP1234 - b[2] + c; c = (out[2] >> 51); out[2] &= reduce_mask_51;
|
||||
out[3] = a[3] + fourP1234 - b[3] + c; c = (out[3] >> 51); out[3] &= reduce_mask_51;
|
||||
out[4] = a[4] + fourP1234 - b[4] + c; c = (out[4] >> 51); out[4] &= reduce_mask_51;
|
||||
out[0] += c * 19;
|
||||
}
|
||||
|
||||
/* out = -a */
|
||||
DONNA_INLINE static void
|
||||
curve25519_neg(bignum25519 out, const bignum25519 a) {
|
||||
uint64_t c;
|
||||
out[0] = twoP0 - a[0] ; c = (out[0] >> 51); out[0] &= reduce_mask_51;
|
||||
out[1] = twoP1234 - a[1] + c; c = (out[1] >> 51); out[1] &= reduce_mask_51;
|
||||
out[2] = twoP1234 - a[2] + c; c = (out[2] >> 51); out[2] &= reduce_mask_51;
|
||||
out[3] = twoP1234 - a[3] + c; c = (out[3] >> 51); out[3] &= reduce_mask_51;
|
||||
out[4] = twoP1234 - a[4] + c; c = (out[4] >> 51); out[4] &= reduce_mask_51;
|
||||
out[0] += c * 19;
|
||||
}
|
||||
|
||||
/* out = a * b */
|
||||
DONNA_INLINE static void
|
||||
curve25519_mul(bignum25519 out, const bignum25519 in2, const bignum25519 in) {
|
||||
#if !defined(HAVE_NATIVE_UINT128)
|
||||
uint128_t mul;
|
||||
#endif
|
||||
uint128_t t[5];
|
||||
uint64_t r0,r1,r2,r3,r4,s0,s1,s2,s3,s4,c;
|
||||
|
||||
r0 = in[0];
|
||||
r1 = in[1];
|
||||
r2 = in[2];
|
||||
r3 = in[3];
|
||||
r4 = in[4];
|
||||
|
||||
s0 = in2[0];
|
||||
s1 = in2[1];
|
||||
s2 = in2[2];
|
||||
s3 = in2[3];
|
||||
s4 = in2[4];
|
||||
|
||||
#if defined(HAVE_NATIVE_UINT128)
|
||||
t[0] = ((uint128_t) r0) * s0;
|
||||
t[1] = ((uint128_t) r0) * s1 + ((uint128_t) r1) * s0;
|
||||
t[2] = ((uint128_t) r0) * s2 + ((uint128_t) r2) * s0 + ((uint128_t) r1) * s1;
|
||||
t[3] = ((uint128_t) r0) * s3 + ((uint128_t) r3) * s0 + ((uint128_t) r1) * s2 + ((uint128_t) r2) * s1;
|
||||
t[4] = ((uint128_t) r0) * s4 + ((uint128_t) r4) * s0 + ((uint128_t) r3) * s1 + ((uint128_t) r1) * s3 + ((uint128_t) r2) * s2;
|
||||
#else
|
||||
mul64x64_128(t[0], r0, s0)
|
||||
mul64x64_128(t[1], r0, s1) mul64x64_128(mul, r1, s0) add128(t[1], mul)
|
||||
mul64x64_128(t[2], r0, s2) mul64x64_128(mul, r2, s0) add128(t[2], mul) mul64x64_128(mul, r1, s1) add128(t[2], mul)
|
||||
mul64x64_128(t[3], r0, s3) mul64x64_128(mul, r3, s0) add128(t[3], mul) mul64x64_128(mul, r1, s2) add128(t[3], mul) mul64x64_128(mul, r2, s1) add128(t[3], mul)
|
||||
mul64x64_128(t[4], r0, s4) mul64x64_128(mul, r4, s0) add128(t[4], mul) mul64x64_128(mul, r3, s1) add128(t[4], mul) mul64x64_128(mul, r1, s3) add128(t[4], mul) mul64x64_128(mul, r2, s2) add128(t[4], mul)
|
||||
#endif
|
||||
|
||||
r1 *= 19;
|
||||
r2 *= 19;
|
||||
r3 *= 19;
|
||||
r4 *= 19;
|
||||
|
||||
#if defined(HAVE_NATIVE_UINT128)
|
||||
t[0] += ((uint128_t) r4) * s1 + ((uint128_t) r1) * s4 + ((uint128_t) r2) * s3 + ((uint128_t) r3) * s2;
|
||||
t[1] += ((uint128_t) r4) * s2 + ((uint128_t) r2) * s4 + ((uint128_t) r3) * s3;
|
||||
t[2] += ((uint128_t) r4) * s3 + ((uint128_t) r3) * s4;
|
||||
t[3] += ((uint128_t) r4) * s4;
|
||||
#else
|
||||
mul64x64_128(mul, r4, s1) add128(t[0], mul) mul64x64_128(mul, r1, s4) add128(t[0], mul) mul64x64_128(mul, r2, s3) add128(t[0], mul) mul64x64_128(mul, r3, s2) add128(t[0], mul)
|
||||
mul64x64_128(mul, r4, s2) add128(t[1], mul) mul64x64_128(mul, r2, s4) add128(t[1], mul) mul64x64_128(mul, r3, s3) add128(t[1], mul)
|
||||
mul64x64_128(mul, r4, s3) add128(t[2], mul) mul64x64_128(mul, r3, s4) add128(t[2], mul)
|
||||
mul64x64_128(mul, r4, s4) add128(t[3], mul)
|
||||
#endif
|
||||
|
||||
|
||||
r0 = lo128(t[0]) & reduce_mask_51; shr128(c, t[0], 51);
|
||||
add128_64(t[1], c) r1 = lo128(t[1]) & reduce_mask_51; shr128(c, t[1], 51);
|
||||
add128_64(t[2], c) r2 = lo128(t[2]) & reduce_mask_51; shr128(c, t[2], 51);
|
||||
add128_64(t[3], c) r3 = lo128(t[3]) & reduce_mask_51; shr128(c, t[3], 51);
|
||||
add128_64(t[4], c) r4 = lo128(t[4]) & reduce_mask_51; shr128(c, t[4], 51);
|
||||
r0 += c * 19; c = r0 >> 51; r0 = r0 & reduce_mask_51;
|
||||
r1 += c;
|
||||
|
||||
out[0] = r0;
|
||||
out[1] = r1;
|
||||
out[2] = r2;
|
||||
out[3] = r3;
|
||||
out[4] = r4;
|
||||
}
|
||||
|
||||
DONNA_NOINLINE static void
|
||||
curve25519_mul_noinline(bignum25519 out, const bignum25519 in2, const bignum25519 in) {
|
||||
curve25519_mul(out, in2, in);
|
||||
}
|
||||
|
||||
/* out = in^(2 * count) */
|
||||
DONNA_NOINLINE static void
|
||||
curve25519_square_times(bignum25519 out, const bignum25519 in, uint64_t count) {
|
||||
#if !defined(HAVE_NATIVE_UINT128)
|
||||
uint128_t mul;
|
||||
#endif
|
||||
uint128_t t[5];
|
||||
uint64_t r0,r1,r2,r3,r4,c;
|
||||
uint64_t d0,d1,d2,d4,d419;
|
||||
|
||||
r0 = in[0];
|
||||
r1 = in[1];
|
||||
r2 = in[2];
|
||||
r3 = in[3];
|
||||
r4 = in[4];
|
||||
|
||||
do {
|
||||
d0 = r0 * 2;
|
||||
d1 = r1 * 2;
|
||||
d2 = r2 * 2 * 19;
|
||||
d419 = r4 * 19;
|
||||
d4 = d419 * 2;
|
||||
|
||||
#if defined(HAVE_NATIVE_UINT128)
|
||||
t[0] = ((uint128_t) r0) * r0 + ((uint128_t) d4) * r1 + (((uint128_t) d2) * (r3 ));
|
||||
t[1] = ((uint128_t) d0) * r1 + ((uint128_t) d4) * r2 + (((uint128_t) r3) * (r3 * 19));
|
||||
t[2] = ((uint128_t) d0) * r2 + ((uint128_t) r1) * r1 + (((uint128_t) d4) * (r3 ));
|
||||
t[3] = ((uint128_t) d0) * r3 + ((uint128_t) d1) * r2 + (((uint128_t) r4) * (d419 ));
|
||||
t[4] = ((uint128_t) d0) * r4 + ((uint128_t) d1) * r3 + (((uint128_t) r2) * (r2 ));
|
||||
#else
|
||||
mul64x64_128(t[0], r0, r0) mul64x64_128(mul, d4, r1) add128(t[0], mul) mul64x64_128(mul, d2, r3) add128(t[0], mul)
|
||||
mul64x64_128(t[1], d0, r1) mul64x64_128(mul, d4, r2) add128(t[1], mul) mul64x64_128(mul, r3, r3 * 19) add128(t[1], mul)
|
||||
mul64x64_128(t[2], d0, r2) mul64x64_128(mul, r1, r1) add128(t[2], mul) mul64x64_128(mul, d4, r3) add128(t[2], mul)
|
||||
mul64x64_128(t[3], d0, r3) mul64x64_128(mul, d1, r2) add128(t[3], mul) mul64x64_128(mul, r4, d419) add128(t[3], mul)
|
||||
mul64x64_128(t[4], d0, r4) mul64x64_128(mul, d1, r3) add128(t[4], mul) mul64x64_128(mul, r2, r2) add128(t[4], mul)
|
||||
#endif
|
||||
|
||||
r0 = lo128(t[0]) & reduce_mask_51;
|
||||
r1 = lo128(t[1]) & reduce_mask_51; shl128(c, t[0], 13); r1 += c;
|
||||
r2 = lo128(t[2]) & reduce_mask_51; shl128(c, t[1], 13); r2 += c;
|
||||
r3 = lo128(t[3]) & reduce_mask_51; shl128(c, t[2], 13); r3 += c;
|
||||
r4 = lo128(t[4]) & reduce_mask_51; shl128(c, t[3], 13); r4 += c;
|
||||
shl128(c, t[4], 13); r0 += c * 19;
|
||||
c = r0 >> 51; r0 &= reduce_mask_51;
|
||||
r1 += c ; c = r1 >> 51; r1 &= reduce_mask_51;
|
||||
r2 += c ; c = r2 >> 51; r2 &= reduce_mask_51;
|
||||
r3 += c ; c = r3 >> 51; r3 &= reduce_mask_51;
|
||||
r4 += c ; c = r4 >> 51; r4 &= reduce_mask_51;
|
||||
r0 += c * 19;
|
||||
} while(--count);
|
||||
|
||||
out[0] = r0;
|
||||
out[1] = r1;
|
||||
out[2] = r2;
|
||||
out[3] = r3;
|
||||
out[4] = r4;
|
||||
}
|
||||
|
||||
DONNA_INLINE static void
|
||||
curve25519_square(bignum25519 out, const bignum25519 in) {
|
||||
#if !defined(HAVE_NATIVE_UINT128)
|
||||
uint128_t mul;
|
||||
#endif
|
||||
uint128_t t[5];
|
||||
uint64_t r0,r1,r2,r3,r4,c;
|
||||
uint64_t d0,d1,d2,d4,d419;
|
||||
|
||||
r0 = in[0];
|
||||
r1 = in[1];
|
||||
r2 = in[2];
|
||||
r3 = in[3];
|
||||
r4 = in[4];
|
||||
|
||||
d0 = r0 * 2;
|
||||
d1 = r1 * 2;
|
||||
d2 = r2 * 2 * 19;
|
||||
d419 = r4 * 19;
|
||||
d4 = d419 * 2;
|
||||
|
||||
#if defined(HAVE_NATIVE_UINT128)
|
||||
t[0] = ((uint128_t) r0) * r0 + ((uint128_t) d4) * r1 + (((uint128_t) d2) * (r3 ));
|
||||
t[1] = ((uint128_t) d0) * r1 + ((uint128_t) d4) * r2 + (((uint128_t) r3) * (r3 * 19));
|
||||
t[2] = ((uint128_t) d0) * r2 + ((uint128_t) r1) * r1 + (((uint128_t) d4) * (r3 ));
|
||||
t[3] = ((uint128_t) d0) * r3 + ((uint128_t) d1) * r2 + (((uint128_t) r4) * (d419 ));
|
||||
t[4] = ((uint128_t) d0) * r4 + ((uint128_t) d1) * r3 + (((uint128_t) r2) * (r2 ));
|
||||
#else
|
||||
mul64x64_128(t[0], r0, r0) mul64x64_128(mul, d4, r1) add128(t[0], mul) mul64x64_128(mul, d2, r3) add128(t[0], mul)
|
||||
mul64x64_128(t[1], d0, r1) mul64x64_128(mul, d4, r2) add128(t[1], mul) mul64x64_128(mul, r3, r3 * 19) add128(t[1], mul)
|
||||
mul64x64_128(t[2], d0, r2) mul64x64_128(mul, r1, r1) add128(t[2], mul) mul64x64_128(mul, d4, r3) add128(t[2], mul)
|
||||
mul64x64_128(t[3], d0, r3) mul64x64_128(mul, d1, r2) add128(t[3], mul) mul64x64_128(mul, r4, d419) add128(t[3], mul)
|
||||
mul64x64_128(t[4], d0, r4) mul64x64_128(mul, d1, r3) add128(t[4], mul) mul64x64_128(mul, r2, r2) add128(t[4], mul)
|
||||
#endif
|
||||
|
||||
r0 = lo128(t[0]) & reduce_mask_51; shr128(c, t[0], 51);
|
||||
add128_64(t[1], c) r1 = lo128(t[1]) & reduce_mask_51; shr128(c, t[1], 51);
|
||||
add128_64(t[2], c) r2 = lo128(t[2]) & reduce_mask_51; shr128(c, t[2], 51);
|
||||
add128_64(t[3], c) r3 = lo128(t[3]) & reduce_mask_51; shr128(c, t[3], 51);
|
||||
add128_64(t[4], c) r4 = lo128(t[4]) & reduce_mask_51; shr128(c, t[4], 51);
|
||||
r0 += c * 19; c = r0 >> 51; r0 = r0 & reduce_mask_51;
|
||||
r1 += c;
|
||||
|
||||
out[0] = r0;
|
||||
out[1] = r1;
|
||||
out[2] = r2;
|
||||
out[3] = r3;
|
||||
out[4] = r4;
|
||||
}
|
||||
|
||||
/* Take a little-endian, 32-byte number and expand it into polynomial form */
|
||||
DONNA_INLINE static void
|
||||
curve25519_expand(bignum25519 out, const unsigned char *in) {
|
||||
static const union { uint8_t b[2]; uint16_t s; } endian_check = {{1,0}};
|
||||
uint64_t x0,x1,x2,x3;
|
||||
|
||||
if (endian_check.s == 1) {
|
||||
x0 = *(uint64_t *)(in + 0);
|
||||
x1 = *(uint64_t *)(in + 8);
|
||||
x2 = *(uint64_t *)(in + 16);
|
||||
x3 = *(uint64_t *)(in + 24);
|
||||
} else {
|
||||
#define F(s) \
|
||||
((((uint64_t)in[s + 0]) ) | \
|
||||
(((uint64_t)in[s + 1]) << 8) | \
|
||||
(((uint64_t)in[s + 2]) << 16) | \
|
||||
(((uint64_t)in[s + 3]) << 24) | \
|
||||
(((uint64_t)in[s + 4]) << 32) | \
|
||||
(((uint64_t)in[s + 5]) << 40) | \
|
||||
(((uint64_t)in[s + 6]) << 48) | \
|
||||
(((uint64_t)in[s + 7]) << 56))
|
||||
|
||||
x0 = F(0);
|
||||
x1 = F(8);
|
||||
x2 = F(16);
|
||||
x3 = F(24);
|
||||
}
|
||||
|
||||
out[0] = x0 & reduce_mask_51; x0 = (x0 >> 51) | (x1 << 13);
|
||||
out[1] = x0 & reduce_mask_51; x1 = (x1 >> 38) | (x2 << 26);
|
||||
out[2] = x1 & reduce_mask_51; x2 = (x2 >> 25) | (x3 << 39);
|
||||
out[3] = x2 & reduce_mask_51; x3 = (x3 >> 12);
|
||||
out[4] = x3 & reduce_mask_51;
|
||||
}
|
||||
|
||||
/* Take a fully reduced polynomial form number and contract it into a
|
||||
* little-endian, 32-byte array
|
||||
*/
|
||||
DONNA_INLINE static void
|
||||
curve25519_contract(unsigned char *out, const bignum25519 input) {
|
||||
uint64_t t[5];
|
||||
uint64_t f, i;
|
||||
|
||||
t[0] = input[0];
|
||||
t[1] = input[1];
|
||||
t[2] = input[2];
|
||||
t[3] = input[3];
|
||||
t[4] = input[4];
|
||||
|
||||
#define curve25519_contract_carry() \
|
||||
t[1] += t[0] >> 51; t[0] &= reduce_mask_51; \
|
||||
t[2] += t[1] >> 51; t[1] &= reduce_mask_51; \
|
||||
t[3] += t[2] >> 51; t[2] &= reduce_mask_51; \
|
||||
t[4] += t[3] >> 51; t[3] &= reduce_mask_51;
|
||||
|
||||
#define curve25519_contract_carry_full() curve25519_contract_carry() \
|
||||
t[0] += 19 * (t[4] >> 51); t[4] &= reduce_mask_51;
|
||||
|
||||
#define curve25519_contract_carry_final() curve25519_contract_carry() \
|
||||
t[4] &= reduce_mask_51;
|
||||
|
||||
curve25519_contract_carry_full()
|
||||
curve25519_contract_carry_full()
|
||||
|
||||
/* now t is between 0 and 2^255-1, properly carried. */
|
||||
/* case 1: between 0 and 2^255-20. case 2: between 2^255-19 and 2^255-1. */
|
||||
t[0] += 19;
|
||||
curve25519_contract_carry_full()
|
||||
|
||||
/* now between 19 and 2^255-1 in both cases, and offset by 19. */
|
||||
t[0] += (reduce_mask_51 + 1) - 19;
|
||||
t[1] += (reduce_mask_51 + 1) - 1;
|
||||
t[2] += (reduce_mask_51 + 1) - 1;
|
||||
t[3] += (reduce_mask_51 + 1) - 1;
|
||||
t[4] += (reduce_mask_51 + 1) - 1;
|
||||
|
||||
/* now between 2^255 and 2^256-20, and offset by 2^255. */
|
||||
curve25519_contract_carry_final()
|
||||
|
||||
#define write51full(n,shift) \
|
||||
f = ((t[n] >> shift) | (t[n+1] << (51 - shift))); \
|
||||
for (i = 0; i < 8; i++, f >>= 8) *out++ = (unsigned char)f;
|
||||
#define write51(n) write51full(n,13*n)
|
||||
write51(0)
|
||||
write51(1)
|
||||
write51(2)
|
||||
write51(3)
|
||||
}
|
||||
|
||||
#if !defined(ED25519_GCC_64BIT_CHOOSE)
|
||||
|
||||
/* out = (flag) ? in : out */
|
||||
DONNA_INLINE static void
|
||||
curve25519_move_conditional_bytes(uint8_t out[96], const uint8_t in[96], uint64_t flag) {
|
||||
const uint64_t nb = flag - 1, b = ~nb;
|
||||
const uint64_t *inq = (const uint64_t *)in;
|
||||
uint64_t *outq = (uint64_t *)out;
|
||||
outq[0] = (outq[0] & nb) | (inq[0] & b);
|
||||
outq[1] = (outq[1] & nb) | (inq[1] & b);
|
||||
outq[2] = (outq[2] & nb) | (inq[2] & b);
|
||||
outq[3] = (outq[3] & nb) | (inq[3] & b);
|
||||
outq[4] = (outq[4] & nb) | (inq[4] & b);
|
||||
outq[5] = (outq[5] & nb) | (inq[5] & b);
|
||||
outq[6] = (outq[6] & nb) | (inq[6] & b);
|
||||
outq[7] = (outq[7] & nb) | (inq[7] & b);
|
||||
outq[8] = (outq[8] & nb) | (inq[8] & b);
|
||||
outq[9] = (outq[9] & nb) | (inq[9] & b);
|
||||
outq[10] = (outq[10] & nb) | (inq[10] & b);
|
||||
outq[11] = (outq[11] & nb) | (inq[11] & b);
|
||||
}
|
||||
|
||||
/* if (iswap) swap(a, b) */
|
||||
DONNA_INLINE static void
|
||||
curve25519_swap_conditional(bignum25519 a, bignum25519 b, uint64_t iswap) {
|
||||
const uint64_t swap = (uint64_t)(-(int64_t)iswap);
|
||||
uint64_t x0,x1,x2,x3,x4;
|
||||
|
||||
x0 = swap & (a[0] ^ b[0]); a[0] ^= x0; b[0] ^= x0;
|
||||
x1 = swap & (a[1] ^ b[1]); a[1] ^= x1; b[1] ^= x1;
|
||||
x2 = swap & (a[2] ^ b[2]); a[2] ^= x2; b[2] ^= x2;
|
||||
x3 = swap & (a[3] ^ b[3]); a[3] ^= x3; b[3] ^= x3;
|
||||
x4 = swap & (a[4] ^ b[4]); a[4] ^= x4; b[4] ^= x4;
|
||||
}
|
||||
|
||||
#endif /* ED25519_GCC_64BIT_CHOOSE */
|
||||
|
||||
#define ED25519_64BIT_TABLES
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
/*
|
||||
Public domain by Andrew M. <liquidsun@gmail.com>
|
||||
See: https://github.com/floodyberry/curve25519-donna
|
||||
|
||||
Curve25519 implementation agnostic helpers
|
||||
*/
|
||||
|
||||
/*
|
||||
* In: b = 2^5 - 2^0
|
||||
* Out: b = 2^250 - 2^0
|
||||
*/
|
||||
static void
|
||||
curve25519_pow_two5mtwo0_two250mtwo0(bignum25519 b) {
|
||||
bignum25519 ALIGN(16) t0,c;
|
||||
|
||||
/* 2^5 - 2^0 */ /* b */
|
||||
/* 2^10 - 2^5 */ curve25519_square_times(t0, b, 5);
|
||||
/* 2^10 - 2^0 */ curve25519_mul_noinline(b, t0, b);
|
||||
/* 2^20 - 2^10 */ curve25519_square_times(t0, b, 10);
|
||||
/* 2^20 - 2^0 */ curve25519_mul_noinline(c, t0, b);
|
||||
/* 2^40 - 2^20 */ curve25519_square_times(t0, c, 20);
|
||||
/* 2^40 - 2^0 */ curve25519_mul_noinline(t0, t0, c);
|
||||
/* 2^50 - 2^10 */ curve25519_square_times(t0, t0, 10);
|
||||
/* 2^50 - 2^0 */ curve25519_mul_noinline(b, t0, b);
|
||||
/* 2^100 - 2^50 */ curve25519_square_times(t0, b, 50);
|
||||
/* 2^100 - 2^0 */ curve25519_mul_noinline(c, t0, b);
|
||||
/* 2^200 - 2^100 */ curve25519_square_times(t0, c, 100);
|
||||
/* 2^200 - 2^0 */ curve25519_mul_noinline(t0, t0, c);
|
||||
/* 2^250 - 2^50 */ curve25519_square_times(t0, t0, 50);
|
||||
/* 2^250 - 2^0 */ curve25519_mul_noinline(b, t0, b);
|
||||
}
|
||||
|
||||
/*
|
||||
* z^(p - 2) = z(2^255 - 21)
|
||||
*/
|
||||
static void
|
||||
curve25519_recip(bignum25519 out, const bignum25519 z) {
|
||||
bignum25519 ALIGN(16) a,t0,b;
|
||||
|
||||
/* 2 */ curve25519_square_times(a, z, 1); /* a = 2 */
|
||||
/* 8 */ curve25519_square_times(t0, a, 2);
|
||||
/* 9 */ curve25519_mul_noinline(b, t0, z); /* b = 9 */
|
||||
/* 11 */ curve25519_mul_noinline(a, b, a); /* a = 11 */
|
||||
/* 22 */ curve25519_square_times(t0, a, 1);
|
||||
/* 2^5 - 2^0 = 31 */ curve25519_mul_noinline(b, t0, b);
|
||||
/* 2^250 - 2^0 */ curve25519_pow_two5mtwo0_two250mtwo0(b);
|
||||
/* 2^255 - 2^5 */ curve25519_square_times(b, b, 5);
|
||||
/* 2^255 - 21 */ curve25519_mul_noinline(out, b, a);
|
||||
}
|
||||
|
||||
/*
|
||||
* z^((p-5)/8) = z^(2^252 - 3)
|
||||
*/
|
||||
static void
|
||||
curve25519_pow_two252m3(bignum25519 two252m3, const bignum25519 z) {
|
||||
bignum25519 ALIGN(16) b,c,t0;
|
||||
|
||||
/* 2 */ curve25519_square_times(c, z, 1); /* c = 2 */
|
||||
/* 8 */ curve25519_square_times(t0, c, 2); /* t0 = 8 */
|
||||
/* 9 */ curve25519_mul_noinline(b, t0, z); /* b = 9 */
|
||||
/* 11 */ curve25519_mul_noinline(c, b, c); /* c = 11 */
|
||||
/* 22 */ curve25519_square_times(t0, c, 1);
|
||||
/* 2^5 - 2^0 = 31 */ curve25519_mul_noinline(b, t0, b);
|
||||
/* 2^250 - 2^0 */ curve25519_pow_two5mtwo0_two250mtwo0(b);
|
||||
/* 2^252 - 2^2 */ curve25519_square_times(b, b, 2);
|
||||
/* 2^252 - 3 */ curve25519_mul_noinline(two252m3, b, z);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user