mirror of
https://github.com/XRPLF/rippled.git
synced 2025-11-04 19:25:51 +00:00
Compare commits
1655 Commits
1.1.2
...
ci/a1q1234
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b5ee925be3 | ||
|
|
7a8836c202 | ||
|
|
386c0befe1 | ||
|
|
76b34f7d82 | ||
|
|
46073d7f87 | ||
|
|
30b64cb162 | ||
|
|
237d67b724 | ||
|
|
9bc08b25ed | ||
|
|
626f846ed6 | ||
|
|
73f086b548 | ||
|
|
c7578b00f3 | ||
|
|
e0a0d7c49d | ||
|
|
344df580e2 | ||
|
|
ed79654388 | ||
|
|
7125a6bd24 | ||
|
|
087a6044fd | ||
|
|
e02c49e7df | ||
|
|
e1914cd8ef | ||
|
|
88ef67e73a | ||
|
|
06163a6d75 | ||
|
|
d46c05e1b7 | ||
|
|
2b3e131ec8 | ||
|
|
9924bcc0d2 | ||
|
|
195ac8ac46 | ||
|
|
2c862a4060 | ||
|
|
c6c836b82b | ||
|
|
46d18c50f8 | ||
|
|
ca1c2134ab | ||
|
|
ea4f39baae | ||
|
|
1291b574b6 | ||
|
|
d05f6c1f90 | ||
|
|
bd70aad699 | ||
|
|
a5217e433d | ||
|
|
b869902453 | ||
|
|
b6ff27f51f | ||
|
|
17b6235cc0 | ||
|
|
355438b29c | ||
|
|
0b44deb4e4 | ||
|
|
b3637549f3 | ||
|
|
a6c63dbc0c | ||
|
|
7c9aad6e9f | ||
|
|
5c4c658c38 | ||
|
|
51a04d8c11 | ||
|
|
74930b1bfe | ||
|
|
a9d7393c42 | ||
|
|
346666695b | ||
|
|
a3a5e57e18 | ||
|
|
7acf9f517d | ||
|
|
b260565373 | ||
|
|
4fc0c9c1f2 | ||
|
|
b698085d8d | ||
|
|
6b34810222 | ||
|
|
323a3ed4b6 | ||
|
|
915b4568f3 | ||
|
|
4e7480125f | ||
|
|
d250365900 | ||
|
|
8030670edf | ||
|
|
521cf7f157 | ||
|
|
e5d99954ca | ||
|
|
71336d8f19 | ||
|
|
d283eb1287 | ||
|
|
8d9c1719c1 | ||
|
|
e6535d64cb | ||
|
|
15ffa2c71d | ||
|
|
a8753ebed1 | ||
|
|
9659fc8e05 | ||
|
|
86c0c4cf48 | ||
|
|
1e29ebde08 | ||
|
|
4779fb28fb | ||
|
|
7179ce9c58 | ||
|
|
921aef9934 | ||
|
|
e7a7bb83c1 | ||
|
|
5c2a3a2779 | ||
|
|
b2960b9e7f | ||
|
|
5713f9782a | ||
|
|
60e340d356 | ||
|
|
80d82c5b2b | ||
|
|
433eeabfa5 | ||
|
|
faa781b71f | ||
|
|
c233df720a | ||
|
|
7ff4f79d30 | ||
|
|
60909655d3 | ||
|
|
03e46cd026 | ||
|
|
e95683a0fb | ||
|
|
13353ae36d | ||
|
|
1a40f18bdd | ||
|
|
90e6380383 | ||
|
|
8bfaa7fe0a | ||
|
|
c9135a63cd | ||
|
|
452263eaa5 | ||
|
|
8aa94ea09a | ||
|
|
258ba71363 | ||
|
|
b8626ea3c6 | ||
|
|
6534757d85 | ||
|
|
8e94ea3154 | ||
|
|
b113190563 | ||
|
|
358b7f50a7 | ||
|
|
f47e2f4e82 | ||
|
|
a7eea9546f | ||
|
|
9874d47d7f | ||
|
|
c2f3e2e263 | ||
|
|
e18f27f5f7 | ||
|
|
df6daf0d8f | ||
|
|
e9d46f0bfc | ||
|
|
42fd74b77b | ||
|
|
c55ea56c5e | ||
|
|
1e01cd34f7 | ||
|
|
e2fa5c1b7c | ||
|
|
fc0984d286 | ||
|
|
8b3dcd41f7 | ||
|
|
8f2f5310e2 | ||
|
|
edb4f0342c | ||
|
|
ea17abb92a | ||
|
|
35a40a8e62 | ||
|
|
d494bf45b2 | ||
|
|
8bf4a5cbff | ||
|
|
58c2c82a30 | ||
|
|
11edaa441d | ||
|
|
a5e953b191 | ||
|
|
506ae12a8c | ||
|
|
0310c5cbe0 | ||
|
|
053e1af7ff | ||
|
|
7e24adbdd0 | ||
|
|
621df422a7 | ||
|
|
0a34b5c691 | ||
|
|
e0bc3dd51f | ||
|
|
dacecd24ba | ||
|
|
05105743e9 | ||
|
|
9e1fe9a85e | ||
|
|
d71ce51901 | ||
|
|
be668ee26d | ||
|
|
cae5294b4e | ||
|
|
cd777f79ef | ||
|
|
8b9e21e3f5 | ||
|
|
2a61aee562 | ||
|
|
40ce8a8833 | ||
|
|
7713ff8c5c | ||
|
|
70371a4344 | ||
|
|
e514de76ed | ||
|
|
dd62cfcc22 | ||
|
|
09690f1b38 | ||
|
|
380ba9f1c1 | ||
|
|
c3e9380fb4 | ||
|
|
e3ebc253fa | ||
|
|
c6c7c84355 | ||
|
|
28f50cb7cf | ||
|
|
3e152fec74 | ||
|
|
2db2791805 | ||
|
|
9ec2d7f8ff | ||
|
|
4a084ce34c | ||
|
|
3502df2174 | ||
|
|
fa1e25abef | ||
|
|
217ba8dd4d | ||
|
|
405f4613d8 | ||
|
|
cba512068b | ||
|
|
1c99ea23d1 | ||
|
|
c4308b216f | ||
|
|
aafd2d8525 | ||
|
|
a574ec6023 | ||
|
|
e429455f4d | ||
|
|
7692eeb9a0 | ||
|
|
a099f5a804 | ||
|
|
ca0bc767fe | ||
|
|
4ba9288935 | ||
|
|
e923ec6d36 | ||
|
|
851d99d99e | ||
|
|
f608e653ca | ||
|
|
72e076b694 | ||
|
|
6cf37c4abe | ||
|
|
fc204773d6 | ||
|
|
2bc5cb240f | ||
|
|
67028d6ea6 | ||
|
|
d22a5057b9 | ||
|
|
75a20194c5 | ||
|
|
7fe81fe62e | ||
|
|
345ddc7234 | ||
|
|
d167d4864f | ||
|
|
bf504912a4 | ||
|
|
a7fb8ae915 | ||
|
|
d9b7a2688f | ||
|
|
c0299dba88 | ||
|
|
c3ecdb4746 | ||
|
|
c17676a9be | ||
|
|
ed8e32cc92 | ||
|
|
2406b28e64 | ||
|
|
2216e5a13f | ||
|
|
5bf3a308d5 | ||
|
|
53ea31c69a | ||
|
|
c1c2b5bf52 | ||
|
|
af018c7b0b | ||
|
|
0a1ca0600f | ||
|
|
cd7c62818b | ||
|
|
37d06bcce8 | ||
|
|
9745718467 | ||
|
|
ab44cc31e2 | ||
|
|
dce3e1efa6 | ||
|
|
159dfb5acb | ||
|
|
844646dc50 | ||
|
|
01fc8f2209 | ||
|
|
43e1d4440e | ||
|
|
466849efe8 | ||
|
|
db0fad6826 | ||
|
|
dd5e6559dd | ||
|
|
7c9d652d9b | ||
|
|
dc9e6c37fe | ||
|
|
01fe9477f4 | ||
|
|
97e3dae6f4 | ||
|
|
e8e7888a23 | ||
|
|
b02b8d016c | ||
|
|
a079bac153 | ||
|
|
3a55a64e1c | ||
|
|
fa5a85439f | ||
|
|
81034596a8 | ||
|
|
0968cdf340 | ||
|
|
d9e4009e33 | ||
|
|
02387fd227 | ||
|
|
fb3713bc25 | ||
|
|
f6d63082c0 | ||
|
|
33e1c42599 | ||
|
|
1b75dc8bcd | ||
|
|
3d02580c09 | ||
|
|
8458233a31 | ||
|
|
cb0ddbf863 | ||
|
|
dcc4581220 | ||
|
|
50b8f19cb5 | ||
|
|
f3e201f983 | ||
|
|
b14c24960b | ||
|
|
b6e3453f49 | ||
|
|
ed4870cdb4 | ||
|
|
5fbee8c824 | ||
|
|
3868c04e99 | ||
|
|
409c1d5aa2 | ||
|
|
20710f5232 | ||
|
|
870882f567 | ||
|
|
e1e67b2c9e | ||
|
|
eac3abdca9 | ||
|
|
ebd8e63276 | ||
|
|
839d17e7bd | ||
|
|
7be5c31bc6 | ||
|
|
9e4a7d5871 | ||
|
|
ff8b9aa439 | ||
|
|
ccc0889803 | ||
|
|
07f118caec | ||
|
|
58af62f388 | ||
|
|
040cd23e4a | ||
|
|
0324764a83 | ||
|
|
679e35fd46 | ||
|
|
49e0d54c76 | ||
|
|
7506852a99 | ||
|
|
bcbfb04992 | ||
|
|
5cd72f2431 | ||
|
|
eabca8439f | ||
|
|
ea1fffeebf | ||
|
|
6d58065909 | ||
|
|
47b0543461 | ||
|
|
8215c605b4 | ||
|
|
d7e949193f | ||
|
|
f64cf9187a | ||
|
|
b54d85d862 | ||
|
|
f419c18056 | ||
|
|
0ec17b6026 | ||
|
|
838978b869 | ||
|
|
8186253707 | ||
|
|
2316d843d7 | ||
|
|
9d58f11a60 | ||
|
|
7b18006193 | ||
|
|
9e48fc0c83 | ||
|
|
8e827e32ac | ||
|
|
c5c0e70e23 | ||
|
|
ec61f5e9d3 | ||
|
|
d57cced17b | ||
|
|
54a350be79 | ||
|
|
d6dbf0e0a6 | ||
|
|
0d887ad815 | ||
|
|
d4a5f8390e | ||
|
|
ab5d450d3c | ||
|
|
23c37fa506 | ||
|
|
63209c2646 | ||
|
|
f0dabd1446 | ||
|
|
552377c76f | ||
|
|
e7cd03325b | ||
|
|
decb3c178e | ||
|
|
f6d647d6c3 | ||
|
|
bf4a7b6ce8 | ||
|
|
8e2c85d14d | ||
|
|
1fbf8da79f | ||
|
|
a75309919e | ||
|
|
0ece395c24 | ||
|
|
b6391fe011 | ||
|
|
9a6af9c431 | ||
|
|
fa1cbb0746 | ||
|
|
68e1be3cf5 | ||
|
|
9abc4868d6 | ||
|
|
23991c99c3 | ||
|
|
cc0177be87 | ||
|
|
37b3e96b04 | ||
|
|
85214bdf81 | ||
|
|
fbbea9e6e2 | ||
|
|
7741483894 | ||
|
|
2f432e812c | ||
|
|
cad8970a57 | ||
|
|
4d7aed84ec | ||
|
|
00ed7c9424 | ||
|
|
d9bd75e683 | ||
|
|
93d8bafb24 | ||
|
|
c19a88fee9 | ||
|
|
0a331ea72e | ||
|
|
7d27b11190 | ||
|
|
eedfec015e | ||
|
|
ffc343a2bc | ||
|
|
e5aa605742 | ||
|
|
8b181ed818 | ||
|
|
f5a349558e | ||
|
|
b9b75ddcf5 | ||
|
|
a39720e94a | ||
|
|
2820feb02a | ||
|
|
8fc805d2e2 | ||
|
|
d54151e7c4 | ||
|
|
21a0a64648 | ||
|
|
20707fac4a | ||
|
|
e6ef0fc26c | ||
|
|
c157816017 | ||
|
|
eba5d19377 | ||
|
|
ad14d09a2b | ||
|
|
f3bcc651c7 | ||
|
|
e8602b81fa | ||
|
|
0f32109993 | ||
|
|
a17ccca615 | ||
|
|
7a1b238035 | ||
|
|
e1534a3200 | ||
|
|
9fec615dca | ||
|
|
ef02893f2f | ||
|
|
7cf4611d7c | ||
|
|
d028005aa6 | ||
|
|
1d23148e6d | ||
|
|
e416ee72ca | ||
|
|
2e902dee53 | ||
|
|
f6879da6c9 | ||
|
|
ae20a3ad3f | ||
|
|
c706926ee3 | ||
|
|
223e6c7590 | ||
|
|
825864032a | ||
|
|
06733ec21a | ||
|
|
9f7c619e4f | ||
|
|
3f5e3212fe | ||
|
|
20d05492d2 | ||
|
|
ae7ea33b75 | ||
|
|
263e984bf4 | ||
|
|
58f3abe3c6 | ||
|
|
d576416953 | ||
|
|
e3d1bb271f | ||
|
|
2df635693d | ||
|
|
40b4adc9cc | ||
|
|
0c971b4415 | ||
|
|
f2d37da4ca | ||
|
|
d5e5c3c220 | ||
|
|
15390bedd5 | ||
|
|
7f6a079aa4 | ||
|
|
2a25f58d40 | ||
|
|
2705109592 | ||
|
|
244ac5e024 | ||
|
|
f4da2e31d9 | ||
|
|
f650949573 | ||
|
|
76128051c0 | ||
|
|
5aa1106ba1 | ||
|
|
dccf3f49ef | ||
|
|
02ec8b7962 | ||
|
|
3f7ce939c8 | ||
|
|
b65cea1984 | ||
|
|
b422e71eed | ||
|
|
e9859ac1b1 | ||
|
|
b84f7e7c10 | ||
|
|
513842b23f | ||
|
|
985c80fbc6 | ||
|
|
35fe957020 | ||
|
|
0eebe6a5f4 | ||
|
|
760f16f568 | ||
|
|
241b9ddde9 | ||
|
|
3fcfb5cd49 | ||
|
|
e2384885f5 | ||
|
|
dd312c3cc5 | ||
|
|
80379927e8 | ||
|
|
676aae2755 | ||
|
|
f20e66e6f9 | ||
|
|
cd737ad7d3 | ||
|
|
df3aa84523 | ||
|
|
24a275ba25 | ||
|
|
aae438315f | ||
|
|
8b0d049b9f | ||
|
|
659bd99a67 | ||
|
|
c88166e055 | ||
|
|
099c0bcd34 | ||
|
|
d992e63075 | ||
|
|
c187f750fe | ||
|
|
bcbf6c1973 | ||
|
|
4bcbf70cae | ||
|
|
2d1854f354 | ||
|
|
a7c4a47723 | ||
|
|
61672ad3ff | ||
|
|
cea43099d2 | ||
|
|
6edf03c152 | ||
|
|
47c8cc24f4 | ||
|
|
64e46878e0 | ||
|
|
ea9b1e3503 | ||
|
|
2e9261cb26 | ||
|
|
69143d71f8 | ||
|
|
0c32fc5f2a | ||
|
|
af9cabe100 | ||
|
|
2ecb851926 | ||
|
|
2ffead76c1 | ||
|
|
5cc377751a | ||
|
|
ad8e9764e6 | ||
|
|
4ce426d8f6 | ||
|
|
c28e005087 | ||
|
|
22b751834f | ||
|
|
cce09b717e | ||
|
|
62dae3c6c6 | ||
|
|
97863e0b62 | ||
|
|
8a2f6bec33 | ||
|
|
e718378bdb | ||
|
|
d7d15a922a | ||
|
|
e74cb35aa4 | ||
|
|
da68651f61 | ||
|
|
be12136b8a | ||
|
|
6d3c21e369 | ||
|
|
1e96a1d6eb | ||
|
|
828bb64ebc | ||
|
|
6f00d32f7e | ||
|
|
f9e365828a | ||
|
|
90d463b925 | ||
|
|
22cdb5728b | ||
|
|
901152bd93 | ||
|
|
d9a5bca625 | ||
|
|
1676e9fe21 | ||
|
|
fad9d639bf | ||
|
|
efe6722bf8 | ||
|
|
a41f38547b | ||
|
|
87ee7868ea | ||
|
|
861bd1a96e | ||
|
|
6ac2b705dd | ||
|
|
fe4d6c68b5 | ||
|
|
5a7af5bb77 | ||
|
|
d9f90c84c0 | ||
|
|
4308407dc1 | ||
|
|
2b0313d60c | ||
|
|
350d213ee8 | ||
|
|
ca3198164c | ||
|
|
c53a5e7a72 | ||
|
|
ffb53f2085 | ||
|
|
3b191a3097 | ||
|
|
656948cd0f | ||
|
|
46f3d3ef61 | ||
|
|
06251aa76f | ||
|
|
5aef102f4f | ||
|
|
431646437e | ||
|
|
fe8621b00f | ||
|
|
c045060560 | ||
|
|
8ec475b1fd | ||
|
|
e33a6d5c2b | ||
|
|
923e1cef99 | ||
|
|
2e93dd57eb | ||
|
|
92957d685d | ||
|
|
f05acbd782 | ||
|
|
44a9266c9e | ||
|
|
c1710900b4 | ||
|
|
d5059b11b9 | ||
|
|
f95fa338c9 | ||
|
|
96c926c71e | ||
|
|
4dff203787 | ||
|
|
4977a5d43c | ||
|
|
8ce85a9750 | ||
|
|
d5939727e0 | ||
|
|
7b49f1e1de | ||
|
|
ac27089c69 | ||
|
|
4cb0bcb003 | ||
|
|
cf4e9e5578 | ||
|
|
32ced493de | ||
|
|
09e0f103f4 | ||
|
|
056255e396 | ||
|
|
85342b21c8 | ||
|
|
26b0322aa5 | ||
|
|
3b624d8bf8 | ||
|
|
ac02e56519 | ||
|
|
1eac4d2c07 | ||
|
|
3e5f770a38 | ||
|
|
2a66bb3fc7 | ||
|
|
397268394b | ||
|
|
5026cbdaf3 | ||
|
|
5af9dc5abd | ||
|
|
8d86c5e17d | ||
|
|
078bd606c7 | ||
|
|
b421945e71 | ||
|
|
41cd337506 | ||
|
|
b69156ac01 | ||
|
|
be6ac7e7a1 | ||
|
|
1fc1eb9f68 | ||
|
|
1fde585003 | ||
|
|
c915984340 | ||
|
|
50cc1cf0c9 | ||
|
|
1151fba415 | ||
|
|
3e08c390f5 | ||
|
|
053b69c63f | ||
|
|
ced14ec1ab | ||
|
|
6ba9450c89 | ||
|
|
ec8626046b | ||
|
|
4e84ad6cd7 | ||
|
|
40ebbecac8 | ||
|
|
0c43eb3973 | ||
|
|
3dea78d34b | ||
|
|
e27d24ba00 | ||
|
|
925aca764b | ||
|
|
2bb8de030f | ||
|
|
b92d511558 | ||
|
|
548c91ebb6 | ||
|
|
2c56d9fc3e | ||
|
|
6b61505ac2 | ||
|
|
e4db0fba2b | ||
|
|
8f89694fae | ||
|
|
5433e133d5 | ||
|
|
2487dab194 | ||
|
|
77e0912a0e | ||
|
|
a948203dae | ||
|
|
8f65bc24a0 | ||
|
|
9f102fc99d | ||
|
|
7bff9dc7f0 | ||
|
|
e86181c096 | ||
|
|
65df4bceaa | ||
|
|
3397922989 | ||
|
|
046d0c2f0a | ||
|
|
01fc4b0203 | ||
|
|
5b7d2c133a | ||
|
|
237b406e8c | ||
|
|
5427321260 | ||
|
|
ce570c166d | ||
|
|
649c11a78e | ||
|
|
7fae1c1262 | ||
|
|
f259cc1ab6 | ||
|
|
002893f280 | ||
|
|
1d9db1bfdd | ||
|
|
3b5fcd5873 | ||
|
|
a95505739d | ||
|
|
31c8281922 | ||
|
|
c6f6375015 | ||
|
|
6f74a745db | ||
|
|
36cb5f90e2 | ||
|
|
89780c8e4f | ||
|
|
9d4d8c22d9 | ||
|
|
b014b79d88 | ||
|
|
a61a88ea81 | ||
|
|
f31c50d04d | ||
|
|
8ceb0f3a79 | ||
|
|
d943c58b3d | ||
|
|
7ca1c644d1 | ||
|
|
300b7e078a | ||
|
|
3574956e5e | ||
|
|
e8a7b2a1fc | ||
|
|
b580049ec0 | ||
|
|
21c4aaf993 | ||
|
|
213491d94b | ||
|
|
da203b241b | ||
|
|
5d88b726c2 | ||
|
|
dcb7dd8d27 | ||
|
|
91e9658217 | ||
|
|
b28f62bbd9 | ||
|
|
4f4951022c | ||
|
|
58f7aecb0b | ||
|
|
5530a0b665 | ||
|
|
aded4a7a92 | ||
|
|
01df19c08b | ||
|
|
c197c4cdd9 | ||
|
|
5ba1f984df | ||
|
|
cb09e61d2f | ||
|
|
3c9db4b69e | ||
|
|
eeb8b41889 | ||
|
|
f7dd37e355 | ||
|
|
a45a95e5ea | ||
|
|
c6fee28b92 | ||
|
|
77dc63b549 | ||
|
|
66bfe909e6 | ||
|
|
9c50415ebe | ||
|
|
516ffb2147 | ||
|
|
f18c6dfea7 | ||
|
|
1cb67fbd20 | ||
|
|
54afdaa101 | ||
|
|
1c2ae10dc0 | ||
|
|
d34e8be316 | ||
|
|
4111382a31 | ||
|
|
11e914fbe9 | ||
|
|
0e983528e1 | ||
|
|
6b4437db39 | ||
|
|
534a36536c | ||
|
|
beba87129e | ||
|
|
b7e902dccc | ||
|
|
9eb30d4316 | ||
|
|
8fdad0d7fd | ||
|
|
0b812cdece | ||
|
|
724a301599 | ||
|
|
71d7d67fa3 | ||
|
|
264280edd7 | ||
|
|
beb0904a32 | ||
|
|
77c0a62a74 | ||
|
|
5d011c7e6b | ||
|
|
5644c8704f | ||
|
|
c9a586c243 | ||
|
|
f709311762 | ||
|
|
adde0c2d11 | ||
|
|
adf672ff83 | ||
|
|
029580886e | ||
|
|
32f8ae1af1 | ||
|
|
ce997a6de8 | ||
|
|
3620ac287e | ||
|
|
629ed5c691 | ||
|
|
78076a6903 | ||
|
|
67238b9fa6 | ||
|
|
c7ef4c9783 | ||
|
|
b21a05d465 | ||
|
|
436de0e03a | ||
|
|
8d482d3557 | ||
|
|
c5003969de | ||
|
|
1f417764c3 | ||
|
|
e75cd49313 | ||
|
|
4f95b9d7a6 | ||
|
|
066f91ca07 | ||
|
|
c3acbce82d | ||
|
|
b7f588b789 | ||
|
|
9346842eed | ||
|
|
f191c911d4 | ||
|
|
e6f49040f5 | ||
|
|
2f3f6dcb03 | ||
|
|
5ebcaf0a6c | ||
|
|
8bfdbcbab5 | ||
|
|
135b63dbe0 | ||
|
|
46167d1c46 | ||
|
|
79e621d96c | ||
|
|
66627b26cf | ||
|
|
7aad6e5127 | ||
|
|
dffcdea12b | ||
|
|
d7725837f5 | ||
|
|
7745c72b2c | ||
|
|
acb373280b | ||
|
|
4f506599f6 | ||
|
|
383f1b6ab3 | ||
|
|
da18c86cbf | ||
|
|
9fcb28acad | ||
|
|
305c9a8d61 | ||
|
|
9b2d563dec | ||
|
|
150d4a47e4 | ||
|
|
1c9df69b33 | ||
|
|
10555faa92 | ||
|
|
0f1ffff068 | ||
|
|
9309b57364 | ||
|
|
cb08f2b6ec | ||
|
|
84cde3ce0b | ||
|
|
f7b3ddd87b | ||
|
|
1e7710eee2 | ||
|
|
07f047b1e2 | ||
|
|
8687b5c3c9 | ||
|
|
ecd49e1535 | ||
|
|
c77a8d5ec6 | ||
|
|
e13676f709 | ||
|
|
74594d5348 | ||
|
|
caf4827c0b | ||
|
|
c2b03fecca | ||
|
|
60c276d90b | ||
|
|
2929748898 | ||
|
|
5ec8783d35 | ||
|
|
96aab1288f | ||
|
|
aebf2ac990 | ||
|
|
ac78b7a9a7 | ||
|
|
b72a87c7d3 | ||
|
|
39c32561bd | ||
|
|
89aa8b21ec | ||
|
|
a828e24cf0 | ||
|
|
f7a8d2de84 | ||
|
|
32559463ef | ||
|
|
8f514937a4 | ||
|
|
36b34a7bd5 | ||
|
|
5edaec2bd0 | ||
|
|
2f1f453052 | ||
|
|
6eaaa7bcfa | ||
|
|
e354497f63 | ||
|
|
6fcd654bee | ||
|
|
d275a2ab72 | ||
|
|
3f33471220 | ||
|
|
a82ad5ba76 | ||
|
|
48e804c40c | ||
|
|
c9c54c9799 | ||
|
|
24fe5f9fd0 | ||
|
|
476ee8a479 | ||
|
|
0ee63b7c7b | ||
|
|
31e7e5a56e | ||
|
|
e4b17d1cf2 | ||
|
|
0ce15e0e35 | ||
|
|
b0e0f319a1 | ||
|
|
2233f585f8 | ||
|
|
61d8c7a85b | ||
|
|
6f8750316c | ||
|
|
fda9e9a7ee | ||
|
|
d8a84e9530 | ||
|
|
c3a9f3dbf3 | ||
|
|
df1300fb37 | ||
|
|
648d6c3e2f | ||
|
|
47ffc392d7 | ||
|
|
0362e935af | ||
|
|
a79fa2026b | ||
|
|
c1e7fe2d93 | ||
|
|
4a5ad4cfac | ||
|
|
ffd453f7dd | ||
|
|
518fb6d208 | ||
|
|
093055c039 | ||
|
|
b7ac73c8e4 | ||
|
|
004ec2d201 | ||
|
|
ebbf4b64a5 | ||
|
|
649ab872ff | ||
|
|
7e9e9104ea | ||
|
|
3726f8bf31 | ||
|
|
e37dc710cf | ||
|
|
5d38e4cfbf | ||
|
|
28f4cc7817 | ||
|
|
95fabd5762 | ||
|
|
23ce431876 | ||
|
|
04ef885108 | ||
|
|
d33df35378 | ||
|
|
ba3c0e5145 | ||
|
|
be1ce5eca9 | ||
|
|
df66e4151e | ||
|
|
9a31f321cd | ||
|
|
e40e38e8d3 | ||
|
|
f5af42a640 | ||
|
|
fe05b8c4fe | ||
|
|
7b3507bb87 | ||
|
|
0ecfc7cb1a | ||
|
|
e2eed966b0 | ||
|
|
5a15229eeb | ||
|
|
d318ab612a | ||
|
|
9aaa0dff5f | ||
|
|
b88ed5a8ec | ||
|
|
ce64f7a90f | ||
|
|
5e1cb09b88 | ||
|
|
3d0c14f3e3 | ||
|
|
83ac141f65 | ||
|
|
e67f90588a | ||
|
|
69bb2be446 | ||
|
|
92d35e54c7 | ||
|
|
47dec467ea | ||
|
|
cd3a6bf530 | ||
|
|
6a9c270776 | ||
|
|
21a3f4a5b5 | ||
|
|
e5275b8577 | ||
|
|
83faf43140 | ||
|
|
22b4de2e44 | ||
|
|
b95ca98965 | ||
|
|
7e46f5342b | ||
|
|
59326bbbc5 | ||
|
|
9eb303f8e8 | ||
|
|
47ccd0b579 | ||
|
|
5e6728dccd | ||
|
|
d458e9972b | ||
|
|
b0b44d32bd | ||
|
|
8266d9d598 | ||
|
|
0839a202c9 | ||
|
|
ee60b16b3a | ||
|
|
18d437284e | ||
|
|
1f75ba23ee | ||
|
|
723733a778 | ||
|
|
8e6a0d418c | ||
|
|
f55913dcee | ||
|
|
e46d2bcf27 | ||
|
|
d632f9f6c8 | ||
|
|
3172a816fa | ||
|
|
4e724794c5 | ||
|
|
610436d737 | ||
|
|
0ee6f15b35 | ||
|
|
e32bc674aa | ||
|
|
34786abd4f | ||
|
|
d0a813a19d | ||
|
|
25474343a9 | ||
|
|
670bc22cfa | ||
|
|
80bda7cc48 | ||
|
|
dac080f1c8 | ||
|
|
767dd4ff3f | ||
|
|
01c37fed69 | ||
|
|
04bd5878f1 | ||
|
|
e836375d99 | ||
|
|
aa4a5b7fe9 | ||
|
|
5aedb0e07a | ||
|
|
dfe69f1b76 | ||
|
|
87d06a2571 | ||
|
|
7ca1f78446 | ||
|
|
b68a66928c | ||
|
|
245174c42c | ||
|
|
7c66747d27 | ||
|
|
cdd37a2a05 | ||
|
|
c66be3e6cf | ||
|
|
70779f6850 | ||
|
|
525aaecbca | ||
|
|
9d3cd718e4 | ||
|
|
656e9fe180 | ||
|
|
8aa617d972 | ||
|
|
711608e652 | ||
|
|
bc9773eb45 | ||
|
|
bea9610440 | ||
|
|
375af87a86 | ||
|
|
1502e6e2cd | ||
|
|
593677ee82 | ||
|
|
8f58687091 | ||
|
|
c7e6803956 | ||
|
|
6faaa91850 | ||
|
|
d66d960d59 | ||
|
|
18235067af | ||
|
|
3eb8aa8b80 | ||
|
|
b9903bbcc4 | ||
|
|
48803a48af | ||
|
|
1b9387eddc | ||
|
|
34ca457132 | ||
|
|
df60e46750 | ||
|
|
e7e672c3f8 | ||
|
|
4d5459d041 | ||
|
|
59f5844381 | ||
|
|
a07a729e3d | ||
|
|
b65e279db6 | ||
|
|
1ddc966b31 | ||
|
|
1a8eb5e6e3 | ||
|
|
6a8180c967 | ||
|
|
eb57679085 | ||
|
|
297def5ed3 | ||
|
|
a01cadbfd5 | ||
|
|
11ca9a946c | ||
|
|
f326f019bf | ||
|
|
90326bf756 | ||
|
|
255bf829ca | ||
|
|
0623a40f02 | ||
|
|
a529b218f3 | ||
|
|
c0cb389b20 | ||
|
|
8f82b62e0d | ||
|
|
dc213a4fab | ||
|
|
06e87e0f6a | ||
|
|
c2a08a1f26 | ||
|
|
df02eb125f | ||
|
|
0c13676d5f | ||
|
|
74e6ed1af3 | ||
|
|
72377e7bf2 | ||
|
|
5b085a75fd | ||
|
|
61389a8bef | ||
|
|
bd97e59254 | ||
|
|
95ecf296ad | ||
|
|
b7e0306d0a | ||
|
|
a9ee802240 | ||
|
|
d23d37fcfd | ||
|
|
289bc0afd9 | ||
|
|
c5dc00af74 | ||
|
|
d49b486224 | ||
|
|
417cfc2fb0 | ||
|
|
febbe14e6d | ||
|
|
44514930f9 | ||
|
|
dc778536ed | ||
|
|
18584ef2fd | ||
|
|
416ce35d73 | ||
|
|
7c12f01358 | ||
|
|
5a4654a0da | ||
|
|
89766c5f21 | ||
|
|
4ec11e692b | ||
|
|
d02f0e11c5 | ||
|
|
e28989638d | ||
|
|
915fe31274 | ||
|
|
db720a59e4 | ||
|
|
72752b1ee0 | ||
|
|
c663f1f62b | ||
|
|
d54f6278bb | ||
|
|
fc04336caa | ||
|
|
47376a0cc3 | ||
|
|
45aa0142a6 | ||
|
|
e3acb61d57 | ||
|
|
8fa33795a3 | ||
|
|
b1c9b134dc | ||
|
|
ae9930b87d | ||
|
|
aaa601841c | ||
|
|
8ca2d98496 | ||
|
|
ad805eb95b | ||
|
|
eb17325cbe | ||
|
|
b00787e161 | ||
|
|
81e7ec859d | ||
|
|
6f6179abb4 | ||
|
|
32a26a65d9 | ||
|
|
daccb5b4c0 | ||
|
|
cf97dcb992 | ||
|
|
6746b863b3 | ||
|
|
bf013c02ad | ||
|
|
fbedfb25ae | ||
|
|
9e877a929e | ||
|
|
e0eae9725b | ||
|
|
3083983fee | ||
|
|
5050b366d9 | ||
|
|
f0c237e001 | ||
|
|
970711f1fd | ||
|
|
19018e8959 | ||
|
|
7edfbbd8bd | ||
|
|
d36024394d | ||
|
|
35e0ab4280 | ||
|
|
ef60ac8348 | ||
|
|
0c47cfad6f | ||
|
|
eb6b79bed7 | ||
|
|
eaff0d30fb | ||
|
|
fae9f9b24b | ||
|
|
5d44998368 | ||
|
|
a145759d1e | ||
|
|
e2a42184b9 | ||
|
|
00a4c3a478 | ||
|
|
0320d2169e | ||
|
|
da26d11593 | ||
|
|
90aa3c75a7 | ||
|
|
1197e49068 | ||
|
|
b6ed50eb03 | ||
|
|
8c78c83d05 | ||
|
|
a5c4684273 | ||
|
|
2bbf0eb588 | ||
|
|
6095f55bf1 | ||
|
|
f64bd54093 | ||
|
|
bc91fd740f | ||
|
|
4a9bd7ed6d | ||
|
|
1ca8898703 | ||
|
|
8a25f32824 | ||
|
|
d9d001dffd | ||
|
|
bdfafa0b58 | ||
|
|
2266b04dd8 | ||
|
|
c50d166c23 | ||
|
|
de43d43560 | ||
|
|
f954faada6 | ||
|
|
9f75f2d522 | ||
|
|
cf70ecbd6d | ||
|
|
2a298469be | ||
|
|
3be668b343 | ||
|
|
2027f642ec | ||
|
|
9376d81d0d | ||
|
|
54e5d5fc35 | ||
|
|
b8552abcea | ||
|
|
3fb60a89a3 | ||
|
|
15b0ae5bf0 | ||
|
|
33b396c7b4 | ||
|
|
ea145d12c7 | ||
|
|
0d17dd8228 | ||
|
|
af5f28cbf8 | ||
|
|
234b754038 | ||
|
|
c231adf324 | ||
|
|
7a088a5280 | ||
|
|
96bbabbd2e | ||
|
|
e37c108195 | ||
|
|
53df35eef3 | ||
|
|
1061b01ab3 | ||
|
|
aee422e819 | ||
|
|
324667b877 | ||
|
|
10d73655bc | ||
|
|
96f11c786e | ||
|
|
9202197354 | ||
|
|
d78a396525 | ||
|
|
cd27b5f2bd | ||
|
|
78bc2727f7 | ||
|
|
8b58e93a2e | ||
|
|
3752234161 | ||
|
|
bf75094224 | ||
|
|
8d59c7dd40 | ||
|
|
a1fd579756 | ||
|
|
b9943d3746 | ||
|
|
b5502a49c3 | ||
|
|
7bd5d51e4e | ||
|
|
d4d937c37b | ||
|
|
2f0231025f | ||
|
|
f1a9e8840f | ||
|
|
2c559116fb | ||
|
|
4bedbd1d39 | ||
|
|
433feade5d | ||
|
|
10e4608ce0 | ||
|
|
ff3d2e7c29 | ||
|
|
7822a28c87 | ||
|
|
dcba79be48 | ||
|
|
2a7c573dec | ||
|
|
22cc9a254a | ||
|
|
09ae9168ca | ||
|
|
9eb9b8f631 | ||
|
|
6298daba1a | ||
|
|
2eb1c6a396 | ||
|
|
7717056cf2 | ||
|
|
9fd5cd303d | ||
|
|
04ff6249d5 | ||
|
|
fa9ecae2d6 | ||
|
|
62d2b76fa8 | ||
|
|
d95aab1139 | ||
|
|
80c2302fd3 | ||
|
|
38f954fd46 | ||
|
|
14b2f27c3e | ||
|
|
a2a37a928a | ||
|
|
c10c0be11b | ||
|
|
34ee4ca0cb | ||
|
|
30fd45890b | ||
|
|
36fe1966c3 | ||
|
|
1bb99e5d3c | ||
|
|
430802c1cf | ||
|
|
9106a06579 | ||
|
|
79e69da364 | ||
|
|
73116297aa | ||
|
|
8579eb0c19 | ||
|
|
9c8caddc5a | ||
|
|
2913847925 | ||
|
|
cf8438fe1d | ||
|
|
6d82fb83a0 | ||
|
|
207e1730e9 | ||
|
|
f0424fe7dd | ||
|
|
2e456a835d | ||
|
|
ab9039e77d | ||
|
|
9932a19139 | ||
|
|
64e4a89470 | ||
|
|
9d89d4c188 | ||
|
|
b2bf2b6e6b | ||
|
|
3b33318dc8 | ||
|
|
85307b29d0 | ||
|
|
95426efb8a | ||
|
|
3e2b568ef9 | ||
|
|
a06525649d | ||
|
|
b4699c3b46 | ||
|
|
27d978b891 | ||
|
|
f91b568069 | ||
|
|
06bd16c928 | ||
|
|
c0a0b79d2d | ||
|
|
1d5d902d28 | ||
|
|
f284a19246 | ||
|
|
735b8b7d48 | ||
|
|
a2e1a7a84d | ||
|
|
c138338358 | ||
|
|
e7eba93666 | ||
|
|
93ea4b2f4f | ||
|
|
5776c2ebe5 | ||
|
|
8defb4cd28 | ||
|
|
d358495f02 | ||
|
|
38dd2d6677 | ||
|
|
80bd107e57 | ||
|
|
68286df23d | ||
|
|
d494d7a725 | ||
|
|
36be4856fd | ||
|
|
c47b4f3667 | ||
|
|
fe129e8e4f | ||
|
|
0a1fb4e6ca | ||
|
|
7e97bfce10 | ||
|
|
49409dbf27 | ||
|
|
c3227a67ec | ||
|
|
a4a46a491f | ||
|
|
c11037fd27 | ||
|
|
114981f774 | ||
|
|
27543170d0 | ||
|
|
b0a39c5f86 | ||
|
|
4fbb1699be | ||
|
|
319f29da8d | ||
|
|
70bacb349e | ||
|
|
012bbcfe36 | ||
|
|
f74b469e68 | ||
|
|
55dc7a252e | ||
|
|
b015623128 | ||
|
|
c76a124d14 | ||
|
|
74d96ff4bd | ||
|
|
44fe0e1fc4 | ||
|
|
78245a072c | ||
|
|
1fd1c34112 | ||
|
|
0dae22adf2 | ||
|
|
746181cb33 | ||
|
|
4b9d3ca7de | ||
|
|
54da532ace | ||
|
|
4159b02753 | ||
|
|
42a068ab5e | ||
|
|
6f7d413d88 | ||
|
|
1aaafeb57b | ||
|
|
6dd3d825c8 | ||
|
|
02ccdeb94e | ||
|
|
7b192945eb | ||
|
|
28ed2b9e69 | ||
|
|
24b17c6de9 | ||
|
|
6eb93e9f8a | ||
|
|
8173d1f643 | ||
|
|
b29812e40b | ||
|
|
183dcd08d9 | ||
|
|
deea16e14f | ||
|
|
e96a719724 | ||
|
|
8e38d8e6f0 | ||
|
|
a0b862bbf7 | ||
|
|
434e2f4cbf | ||
|
|
cb0572d66e | ||
|
|
5c8e072b7f | ||
|
|
e39c452f31 | ||
|
|
1bb294afbc | ||
|
|
5def79e93c | ||
|
|
f072469409 | ||
|
|
4a444f7d60 | ||
|
|
ab77444fa3 | ||
|
|
57ffc58613 | ||
|
|
8c386ae07e | ||
|
|
cba6b4a749 | ||
|
|
64191a4b13 | ||
|
|
9bd6b249ce | ||
|
|
bec6c626d8 | ||
|
|
7ddf856d58 | ||
|
|
a3915fa5c4 | ||
|
|
17abca1caa | ||
|
|
f239050054 | ||
|
|
76a6956138 | ||
|
|
cf5ca9a5cf | ||
|
|
77ec62e9c8 | ||
|
|
a3f2196d4e | ||
|
|
d89c158a77 | ||
|
|
ef53197e1f | ||
|
|
8707c15b9c | ||
|
|
0b4e34b03b | ||
|
|
6968da153c | ||
|
|
03c809371a | ||
|
|
d282b0bf85 | ||
|
|
efa615a5e3 | ||
|
|
068db1f48b | ||
|
|
afacbe2a3a | ||
|
|
f8a0ef8f87 | ||
|
|
e1a2939f89 | ||
|
|
95da398e7d | ||
|
|
52adcc73d9 | ||
|
|
dbde686a97 | ||
|
|
1129110be3 | ||
|
|
a26a175957 | ||
|
|
795de3a75a | ||
|
|
8116b569c7 | ||
|
|
2bba79138f | ||
|
|
a8d481c2a5 | ||
|
|
85fc1e8235 | ||
|
|
ab4102a632 | ||
|
|
4ae2f06be4 | ||
|
|
831e03ad2a | ||
|
|
ab9f3fa42a | ||
|
|
13b8359de6 | ||
|
|
660d9c1602 | ||
|
|
b1d47c65d4 | ||
|
|
271e79095b | ||
|
|
6c268a3e9c | ||
|
|
e5ff70f606 | ||
|
|
801b1580f5 | ||
|
|
707868be33 | ||
|
|
9b9f34f881 | ||
|
|
7724cca384 | ||
|
|
01bd5a2646 | ||
|
|
2b44868373 | ||
|
|
f79a4a8cdb | ||
|
|
7bb6b75f3c | ||
|
|
e5d17a9452 | ||
|
|
dbd5f0073e | ||
|
|
12c0e8148b | ||
|
|
72a9a2bdbb | ||
|
|
80860fa8f5 | ||
|
|
8cf542abb0 | ||
|
|
a931b020b5 | ||
|
|
d317060ae4 | ||
|
|
eee07a4f96 | ||
|
|
7b048b423e | ||
|
|
a459206848 | ||
|
|
3a3b0b4c14 | ||
|
|
de0c527387 | ||
|
|
b54453d1a7 | ||
|
|
706ca874b0 | ||
|
|
51bd4626b1 | ||
|
|
cf6f40ea8f | ||
|
|
d3798f6290 | ||
|
|
4e33a1abf7 | ||
|
|
86e8f2e232 | ||
|
|
91e857874f | ||
|
|
8f50fd051e | ||
|
|
94e8e94750 | ||
|
|
54ece72b62 | ||
|
|
4702c8b591 | ||
|
|
00702f28c2 | ||
|
|
df29e98ea5 | ||
|
|
fe9922d654 | ||
|
|
362a017eee | ||
|
|
e8f3525226 | ||
|
|
1067086f71 | ||
|
|
0214d83aa5 | ||
|
|
d69a902876 | ||
|
|
f43aeda49c | ||
|
|
27484f78a9 | ||
|
|
93bf77bdec | ||
|
|
728651b5d5 | ||
|
|
fb74eefa9e | ||
|
|
853c964194 | ||
|
|
645c06764b | ||
|
|
2d23e7bd18 | ||
|
|
0290d0b82c | ||
|
|
3d86b49dae | ||
|
|
0b9e935806 | ||
|
|
17412d17e2 | ||
|
|
99f5193699 | ||
|
|
328e42ad42 | ||
|
|
6d28f2a8d9 | ||
|
|
421417ab07 | ||
|
|
68494a308e | ||
|
|
16f79d160a | ||
|
|
8f984042f4 | ||
|
|
eb1a699c5a | ||
|
|
ac766ec0eb | ||
|
|
21340a1c1e | ||
|
|
c594f3b0cf | ||
|
|
268e28a278 | ||
|
|
ca664b17d3 | ||
|
|
3936110c8d | ||
|
|
9f91870b1c | ||
|
|
97712107b7 | ||
|
|
d9fa148688 | ||
|
|
d88a7c73b4 | ||
|
|
894d3463ce | ||
|
|
5b5226d518 | ||
|
|
d025f3fb28 | ||
|
|
11be30dd40 | ||
|
|
4fad421c8a | ||
|
|
57b3543e7b | ||
|
|
a00543b6bc | ||
|
|
dbd25f0e32 | ||
|
|
62a3f33d72 | ||
|
|
74f9edef07 | ||
|
|
dbee3f01b7 | ||
|
|
6c72d5cf7e | ||
|
|
2827de4d63 | ||
|
|
381606aba2 | ||
|
|
567e42e071 | ||
|
|
2bf3b194fa | ||
|
|
444ea56182 | ||
|
|
d797589164 | ||
|
|
1577c775b3 | ||
|
|
3bf0b724a3 | ||
|
|
bd8dbb87b6 | ||
|
|
cd78ce3118 | ||
|
|
023f5704d0 | ||
|
|
123e94c603 | ||
|
|
156dc2ec4c | ||
|
|
b7b7e098be | ||
|
|
50760c6935 | ||
|
|
65dfc5d19e | ||
|
|
020b285808 | ||
|
|
bdd22e4d51 | ||
|
|
b7631d2a28 | ||
|
|
284ed38471 | ||
|
|
b96ef6adb8 | ||
|
|
ce6b427202 | ||
|
|
858e93c7f8 | ||
|
|
6477bdf3e8 | ||
|
|
ce5f240551 | ||
|
|
1c3c69f8b5 | ||
|
|
be2652544b | ||
|
|
f155eaff4b | ||
|
|
67981f002f | ||
|
|
0d83223445 | ||
|
|
9f8d648514 | ||
|
|
3d3b6d85cd | ||
|
|
8cf7c9548a | ||
|
|
323dbc7962 | ||
|
|
46a76fb318 | ||
|
|
a6246b0baa | ||
|
|
c8282795ef | ||
|
|
e93a44fe9b | ||
|
|
3e870866e0 | ||
|
|
78d771af36 | ||
|
|
6bb9dd22e0 | ||
|
|
1661c84af6 | ||
|
|
4f422f6f39 | ||
|
|
393ca87572 | ||
|
|
f4c56cbd53 | ||
|
|
9470558ecc | ||
|
|
f22fcb3b2a | ||
|
|
e257a226f3 | ||
|
|
a4e9878790 | ||
|
|
25b13978e7 | ||
|
|
3e9cff9287 | ||
|
|
758a3792eb | ||
|
|
ade5eb71cf | ||
|
|
d097819c52 | ||
|
|
905a97e0aa | ||
|
|
cc452dfa9b | ||
|
|
f00f263852 | ||
|
|
b54d672710 | ||
|
|
5047b89922 | ||
|
|
78ea75b116 | ||
|
|
c0964832f5 | ||
|
|
4e3dc0e820 | ||
|
|
2a2ad898b1 | ||
|
|
b26ed948c8 | ||
|
|
ba53722425 | ||
|
|
4852daf695 | ||
|
|
2d1be70ba2 | ||
|
|
b126641f13 | ||
|
|
6529d3e6f7 | ||
|
|
16c659b22c | ||
|
|
ec137044a0 | ||
|
|
053b6d9fd3 | ||
|
|
f76a5a3183 | ||
|
|
cfe59ee028 | ||
|
|
20d4a39b63 | ||
|
|
e7ce3909d2 | ||
|
|
acf4b78892 | ||
|
|
e9b3c58043 | ||
|
|
455105d3dc | ||
|
|
2dac108c57 | ||
|
|
0c6d380780 | ||
|
|
2c71802e38 | ||
|
|
3753840de5 | ||
|
|
5ff23f8f31 | ||
|
|
4315913a5d | ||
|
|
facb627786 | ||
|
|
b784988caf | ||
|
|
172ead8221 | ||
|
|
d224d7e404 | ||
|
|
7ea78c8517 | ||
|
|
ce589225dd | ||
|
|
79896af275 | ||
|
|
2831ed0538 | ||
|
|
e24dd2f954 | ||
|
|
60f0f5224d | ||
|
|
3578acaf0b | ||
|
|
2f9edf495e | ||
|
|
cc4cefaa4b | ||
|
|
81326a6d08 | ||
|
|
9d3626fec5 | ||
|
|
ae707b814f | ||
|
|
9c580b20a3 | ||
|
|
70c21f90b9 | ||
|
|
0964379a66 | ||
|
|
a176f58a92 | ||
|
|
fc0a082700 | ||
|
|
f7fffee28d | ||
|
|
51ed7db002 | ||
|
|
6e4945c56b | ||
|
|
c48be14f4f | ||
|
|
ecbbabfc3c | ||
|
|
687cdbb78b | ||
|
|
098086592b | ||
|
|
90d9ca901d | ||
|
|
eb016456a1 | ||
|
|
cd9732b47a | ||
|
|
7d867b806d | ||
|
|
761bb5744e | ||
|
|
e3b5b808c5 | ||
|
|
1901b981f3 | ||
|
|
c624fc9b17 | ||
|
|
9294a07362 | ||
|
|
9fcc30df89 | ||
|
|
1f5d9404d0 | ||
|
|
5096cc1f5f | ||
|
|
2aa11fa41d | ||
|
|
79e9085dd1 | ||
|
|
14f0234a26 | ||
|
|
a65c91a676 | ||
|
|
607328e1a0 | ||
|
|
a96dc2ecea | ||
|
|
3a77990781 | ||
|
|
4bb951d48e | ||
|
|
63503ee8f0 | ||
|
|
4a1148eb28 | ||
|
|
f6916bfd42 | ||
|
|
3ea525430e | ||
|
|
fb0d065723 | ||
|
|
47501b7f99 | ||
|
|
11cf27e006 | ||
|
|
ade1afe1b0 | ||
|
|
6cda070fe0 | ||
|
|
9bd470f2c7 | ||
|
|
530f1939d6 | ||
|
|
e4ea3752ac | ||
|
|
b728bf0d09 | ||
|
|
194fb2b86d | ||
|
|
ce5d901e6e | ||
|
|
14075630d4 | ||
|
|
5c1dd87fab | ||
|
|
06c371544a | ||
|
|
ffa0f048f3 | ||
|
|
22ca0041b8 | ||
|
|
232975bfdb | ||
|
|
4e84868747 | ||
|
|
9e1ccb900e | ||
|
|
98d55b988f | ||
|
|
e720a66076 | ||
|
|
7be7343e05 | ||
|
|
e26dd7bdfe | ||
|
|
906b9ae00b | ||
|
|
15c5f9c111 | ||
|
|
726dd69ab9 | ||
|
|
41b2c80dde | ||
|
|
cd01502d17 | ||
|
|
b2317f8b41 | ||
|
|
113167acf4 | ||
|
|
a3a9dc26b4 | ||
|
|
7e7664c29a | ||
|
|
fccb7e1c70 | ||
|
|
3f45b8c3bd | ||
|
|
ca6d5798e9 | ||
|
|
2110b24090 | ||
|
|
82484e26f5 | ||
|
|
ca47583a3b | ||
|
|
f4d6b0e1c4 | ||
|
|
9196d9541a | ||
|
|
b53fda1e1a | ||
|
|
9a5911f94c | ||
|
|
80acc85e59 | ||
|
|
8ce1c189f7 | ||
|
|
7228b2e068 | ||
|
|
33ab0cd7bd | ||
|
|
e33ac1d450 | ||
|
|
826cbbc3bf | ||
|
|
38f82115f7 | ||
|
|
d9bb78c8b8 | ||
|
|
905f631b71 | ||
|
|
9213c49ca1 | ||
|
|
fc7ecd672a | ||
|
|
a6944be5cf | ||
|
|
e5b61c9ac9 | ||
|
|
a9a4e2c8fb | ||
|
|
56eac5c9a1 | ||
|
|
4b1970afa9 | ||
|
|
22c9de487a | ||
|
|
66fad62e66 | ||
|
|
008fc5155a | ||
|
|
5834fbbc5d | ||
|
|
06219f1151 | ||
|
|
c2d2ba9f45 | ||
|
|
1eb3753f26 | ||
|
|
0a256247a0 | ||
|
|
7912ee6f7b | ||
|
|
9c58f23cf8 | ||
|
|
9245b0b666 | ||
|
|
92925a0af6 | ||
|
|
70c2e1b419 | ||
|
|
5d1728cc96 | ||
|
|
4076b6d92e | ||
|
|
b9e73b4852 | ||
|
|
014df67fed | ||
|
|
014ec021bb | ||
|
|
7fa9b91d23 | ||
|
|
98c4a7a2b1 | ||
|
|
c04c00d279 | ||
|
|
ef139e8b3c | ||
|
|
45c1c38993 | ||
|
|
1942fee581 | ||
|
|
b3c85e2709 | ||
|
|
561942da23 | ||
|
|
c217baa367 | ||
|
|
c699864c85 | ||
|
|
4ff0f482c3 | ||
|
|
28b942b186 | ||
|
|
4900e3081d | ||
|
|
145326c00c | ||
|
|
c7d90bfddd | ||
|
|
bfa84cfca5 | ||
|
|
cbc6e500b6 | ||
|
|
13a4fefe34 | ||
|
|
87e9ee5ce9 | ||
|
|
20cc5df5fe | ||
|
|
9e117b7b38 | ||
|
|
a02d914093 | ||
|
|
c5a95f1eb5 | ||
|
|
e1adbd7ddd | ||
|
|
355a7b04a8 | ||
|
|
d3ee0df93a | ||
|
|
7c24f7b170 | ||
|
|
a3060516c6 | ||
|
|
caa5c9e223 | ||
|
|
846538304f | ||
|
|
7b7e3b6750 | ||
|
|
a988b3224f | ||
|
|
89b3bf0796 | ||
|
|
6d8988b78a | ||
|
|
3acbd84f1d | ||
|
|
45403b877f | ||
|
|
f17d9bc421 | ||
|
|
ba2714fa22 | ||
|
|
2c4b3d515d | ||
|
|
59973a435e | ||
|
|
93232ec7df | ||
|
|
efa926676c | ||
|
|
dc24748c24 | ||
|
|
f8365f5009 | ||
|
|
c2138c4e88 | ||
|
|
bfad96dbb9 | ||
|
|
0ef6d9f9a0 | ||
|
|
c1a1cfe550 | ||
|
|
773dcd1d48 | ||
|
|
de99e79bf1 | ||
|
|
e83d367f49 | ||
|
|
aa76b382af | ||
|
|
595b7b194c | ||
|
|
5f908ba870 | ||
|
|
adc1b8a36b | ||
|
|
73c6e47e8a | ||
|
|
3a780f80f1 | ||
|
|
c78404e233 | ||
|
|
79a0cb096b | ||
|
|
6f9e8dc720 | ||
|
|
b39b0fef39 | ||
|
|
be139d9bde | ||
|
|
c6d82c722b | ||
|
|
0c20e2eb8b | ||
|
|
63eeb8d734 | ||
|
|
5214b3c1b0 | ||
|
|
5f7a61f040 | ||
|
|
c5a938de55 | ||
|
|
9372a587e4 | ||
|
|
948e724dff | ||
|
|
06faf2bd5b | ||
|
|
1dd81c04f3 | ||
|
|
56dbf70c3c | ||
|
|
f8a4ac6ad7 | ||
|
|
61bd06177f | ||
|
|
80e535a13c | ||
|
|
64b55c0f88 | ||
|
|
afcc4ff296 | ||
|
|
57fe197d3e | ||
|
|
9279a3fee7 | ||
|
|
b6363289bf | ||
|
|
8c1123edc6 | ||
|
|
834f545498 | ||
|
|
dd99bf479f | ||
|
|
2e26377e7c | ||
|
|
0329ee236f | ||
|
|
b347afcc5b | ||
|
|
fa57859477 | ||
|
|
88cb0e5928 | ||
|
|
e239eed6de | ||
|
|
504b3441dd | ||
|
|
872478d965 | ||
|
|
185f2baf76 | ||
|
|
36d6758945 | ||
|
|
d8c450d272 | ||
|
|
8ef5b9bab4 | ||
|
|
e6370a6482 | ||
|
|
b2170d016a | ||
|
|
5c124f11c2 | ||
|
|
2aed24a552 | ||
|
|
296469f5fe | ||
|
|
08371ba2c4 | ||
|
|
56bc2a2ade | ||
|
|
1084dc6dd3 | ||
|
|
8023caaa97 | ||
|
|
8b97466285 | ||
|
|
de1d102535 | ||
|
|
1e1e8c2547 | ||
|
|
aa49be65a1 | ||
|
|
cd820b3777 | ||
|
|
8d59ed5b2a | ||
|
|
e03efdbe0b | ||
|
|
4f52c2989c | ||
|
|
3fb13233a9 | ||
|
|
9695fd44ba | ||
|
|
1bb32134f8 | ||
|
|
0ebed96142 | ||
|
|
4c06b3f86f | ||
|
|
a3470c225b | ||
|
|
c5d215d901 | ||
|
|
9dbf8495ee | ||
|
|
2529edd2b6 | ||
|
|
e974c7d8a4 | ||
|
|
b335adb674 | ||
|
|
c6ab880c03 | ||
|
|
7779dcdda0 | ||
|
|
132f1b218c | ||
|
|
e5d6f16f19 | ||
|
|
8f973621fc | ||
|
|
b75c2d71a5 | ||
|
|
eed210bb67 | ||
|
|
eab2a0d668 | ||
|
|
148bbf4e8f | ||
|
|
494724578a | ||
|
|
ea76103d5f | ||
|
|
2151110976 | ||
|
|
dfb45baa93 | ||
| f443439f1f | |||
|
|
6d0b108ec1 | ||
|
|
710f9ee1ac | ||
|
|
76d5ecb595 | ||
|
|
ba9ca1378e | ||
|
|
1be8094ee2 | ||
|
|
96c949a997 | ||
|
|
9121e26708 | ||
|
|
2432f13903 | ||
|
|
259fb1c32e | ||
|
|
e0515b0015 | ||
|
|
412a3ec710 | ||
|
|
30bba29da2 | ||
|
|
4cff94f7a4 | ||
|
|
fbdbffed67 | ||
|
|
ad5c5f1969 | ||
|
|
c354809e1c | ||
|
|
dc4d76f626 | ||
|
|
c1a02440dc | ||
|
|
e7a69cce65 | ||
|
|
60dc949314 | ||
|
|
cc824685e7 | ||
|
|
be70d81bd7 | ||
|
|
6df96f08df | ||
|
|
9ad2b9be45 | ||
|
|
0d2b2923da | ||
|
|
265f5f1fb1 | ||
|
|
a2ab6c4b02 | ||
|
|
6bdc9e7b30 | ||
|
|
c71eb45240 | ||
|
|
753600a2a0 | ||
|
|
945493d9cf | ||
|
|
2a8b0e4b88 | ||
|
|
513b1dd194 | ||
|
|
77462b8f72 | ||
|
|
1682fe3a39 | ||
|
|
58f786cbb4 | ||
|
|
a96cb8fc1c | ||
|
|
c587012e5c | ||
|
|
ad4bbd8dff | ||
|
|
202d91c9f0 | ||
|
|
146ea5d44e | ||
|
|
157c066f2b | ||
|
|
156e8dae83 | ||
|
|
5e96da51f9 | ||
|
|
cb71d493a0 | ||
|
|
8124c1f51f | ||
|
|
6ed2270bc9 | ||
|
|
4e7c038520 | ||
|
|
7b48dc36f5 | ||
|
|
d5c0e1216d | ||
|
|
a999894dae | ||
|
|
63e167b7a3 | ||
|
|
8fc6a8175b | ||
|
|
af1697cc6a | ||
|
|
e98c76110a | ||
|
|
7fe1d4b9c2 | ||
|
|
b36e11bc49 | ||
|
|
63c3fc30d8 | ||
|
|
1ac9694dbc | ||
|
|
3661dc88fe | ||
|
|
86c066cd7e | ||
|
|
d70464032c | ||
|
|
0bbe6e226c | ||
|
|
49e61cc0a6 | ||
|
|
6572fc8e95 | ||
|
|
3ce4dda5cb | ||
|
|
7295cf979b | ||
|
|
e14f913244 | ||
|
|
cd1c5a30dd | ||
|
|
8dd8433bb6 | ||
|
|
eeb9d92fb0 | ||
|
|
4104778067 | ||
|
|
4dcb3c9199 | ||
|
|
b0092aee24 | ||
|
|
bb52b04c25 | ||
|
|
83dac8b382 | ||
|
|
8a4951947d | ||
|
|
ab6163e989 | ||
|
|
5741a8356f | ||
|
|
b2f2d89a08 | ||
|
|
c946043280 | ||
|
|
820546c873 | ||
|
|
b36e9dd1b4 | ||
|
|
582d1691a9 |
@@ -18,7 +18,7 @@ AlwaysBreakBeforeMultilineStrings: true
|
||||
AlwaysBreakTemplateDeclarations: true
|
||||
BinPackArguments: false
|
||||
BinPackParameters: false
|
||||
BraceWrapping:
|
||||
BraceWrapping:
|
||||
AfterClass: true
|
||||
AfterControlStatement: true
|
||||
AfterEnum: false
|
||||
@@ -43,19 +43,27 @@ Cpp11BracedListStyle: true
|
||||
DerivePointerAlignment: false
|
||||
DisableFormat: false
|
||||
ExperimentalAutoDetectBinPacking: false
|
||||
ForEachMacros: [ foreach, Q_FOREACH, BOOST_FOREACH ]
|
||||
IncludeCategories:
|
||||
- Regex: '^<(BeastConfig)'
|
||||
ForEachMacros: [ Q_FOREACH, BOOST_FOREACH ]
|
||||
IncludeBlocks: Regroup
|
||||
IncludeCategories:
|
||||
- Regex: '^<(test)/'
|
||||
Priority: 0
|
||||
- Regex: '^<(ripple)/'
|
||||
- Regex: '^<(xrpld)/'
|
||||
Priority: 1
|
||||
- Regex: '^<(xrpl)/'
|
||||
Priority: 2
|
||||
- Regex: '^<(boost)/'
|
||||
Priority: 3
|
||||
- Regex: '.*'
|
||||
- Regex: '^.*/'
|
||||
Priority: 4
|
||||
- Regex: '^.*\.h'
|
||||
Priority: 5
|
||||
- Regex: '.*'
|
||||
Priority: 6
|
||||
IncludeIsMainRegex: '$'
|
||||
IndentCaseLabels: true
|
||||
IndentFunctionDeclarationAfterType: false
|
||||
IndentRequiresClause: true
|
||||
IndentWidth: 4
|
||||
IndentWrappedFunctionNames: false
|
||||
KeepEmptyLinesAtTheStartOfBlocks: false
|
||||
@@ -71,6 +79,7 @@ PenaltyExcessCharacter: 1000000
|
||||
PenaltyReturnTypeOnItsOwnLine: 200
|
||||
PointerAlignment: Left
|
||||
ReflowComments: true
|
||||
RequiresClausePosition: OwnLine
|
||||
SortIncludes: true
|
||||
SpaceAfterCStyleCast: false
|
||||
SpaceBeforeAssignmentOperators: true
|
||||
@@ -84,4 +93,5 @@ SpacesInParentheses: false
|
||||
SpacesInSquareBrackets: false
|
||||
Standard: Cpp11
|
||||
TabWidth: 8
|
||||
UseTab: Never
|
||||
UseTab: Never
|
||||
QualifierAlignment: Right
|
||||
41
.codecov.yml
41
.codecov.yml
@@ -1,6 +1,37 @@
|
||||
|
||||
codecov:
|
||||
ci:
|
||||
- ci.ops.ripple.com # add custom jenkins server
|
||||
- !appveyor
|
||||
- !travis
|
||||
require_ci_to_pass: true
|
||||
|
||||
comment:
|
||||
behavior: default
|
||||
layout: reach,diff,flags,tree,reach
|
||||
show_carryforward_flags: false
|
||||
|
||||
coverage:
|
||||
range: "70..85"
|
||||
precision: 1
|
||||
round: nearest
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: 75%
|
||||
threshold: 2%
|
||||
patch:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 2%
|
||||
changes: false
|
||||
|
||||
github_checks:
|
||||
annotations: true
|
||||
|
||||
parsers:
|
||||
cobertura:
|
||||
partials_as_hits: true
|
||||
handle_missing_conditions : true
|
||||
|
||||
slack_app: false
|
||||
|
||||
ignore:
|
||||
- "src/test/"
|
||||
- "include/xrpl/beast/test/"
|
||||
- "include/xrpl/beast/unit_test/"
|
||||
|
||||
13
.git-blame-ignore-revs
Normal file
13
.git-blame-ignore-revs
Normal file
@@ -0,0 +1,13 @@
|
||||
# This feature requires Git >= 2.24
|
||||
# To use it by default in git blame:
|
||||
# git config blame.ignoreRevsFile .git-blame-ignore-revs
|
||||
50760c693510894ca368e90369b0cc2dabfd07f3
|
||||
e2384885f5f630c8f0ffe4bf21a169b433a16858
|
||||
241b9ddde9e11beb7480600fd5ed90e1ef109b21
|
||||
760f16f56835663d9286bd29294d074de26a7ba6
|
||||
0eebe6a5f4246fced516d52b83ec4e7f47373edd
|
||||
2189cc950c0cebb89e4e2fa3b2d8817205bf7cef
|
||||
b9d007813378ad0ff45660dc07285b823c7e9855
|
||||
fe9a5365b8a52d4acc42eb27369247e6f238a4f9
|
||||
9a93577314e6a8d4b4a8368cc9d2b15a5d8303e8
|
||||
552377c76f55b403a1c876df873a23d780fcc81c
|
||||
8
.github/CODEOWNERS
vendored
Normal file
8
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
# Allow anyone to review any change by default.
|
||||
*
|
||||
|
||||
# Require the rpc-reviewers team to review changes to the rpc code.
|
||||
include/xrpl/protocol/ @xrplf/rpc-reviewers
|
||||
src/libxrpl/protocol/ @xrplf/rpc-reviewers
|
||||
src/xrpld/rpc/ @xrplf/rpc-reviewers
|
||||
src/xrpld/app/misc/ @xrplf/rpc-reviewers
|
||||
31
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
31
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
---
|
||||
name: Bug Report
|
||||
about: Create a report to help us improve rippled
|
||||
title: "[Title with short description] (Version: [rippled version])"
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
<!-- Please search existing issues to avoid creating duplicates.-->
|
||||
|
||||
## Issue Description
|
||||
<!--Provide a summary for your issue/bug.-->
|
||||
|
||||
## Steps to Reproduce
|
||||
<!--List in detail the exact steps to reproduce the unexpected behavior of the software.-->
|
||||
|
||||
## Expected Result
|
||||
<!--Explain in detail what behavior you expected to happen.-->
|
||||
|
||||
## Actual Result
|
||||
<!--Explain in detail what behavior actually happened.-->
|
||||
|
||||
## Environment
|
||||
<!--Please describe your environment setup (such as Ubuntu 18.04 with Boost 1.70).-->
|
||||
<!-- If you are using a formal release, please use the version returned by './rippled --version' as the version number-->
|
||||
<!-- If you are working off of develop, please add the git hash via 'git rev-parse HEAD'-->
|
||||
|
||||
## Supporting Files
|
||||
<!--If you have supporting files such as a log, feel free to post a link here using Github Gist.-->
|
||||
<!--Consider adding configuration files with private information removed via Github Gist. -->
|
||||
|
||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: XRP Ledger Documentation
|
||||
url: https://xrpl.org/
|
||||
about: All things about XRPL
|
||||
- name: Security bug bounty program
|
||||
url: https://ripple.com/bug-bounty/
|
||||
about: Please report security-relevant bugs in our software here.
|
||||
21
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
21
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
---
|
||||
name: Feature Request
|
||||
about: Suggest a new feature for the rippled project
|
||||
title: "[Title with short description] (Version: [rippled version])"
|
||||
labels: Feature Request
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
<!-- Please search existing issues to avoid creating duplicates.-->
|
||||
|
||||
## Summary
|
||||
<!-- Provide a summary to the feature request-->
|
||||
|
||||
## Motivation
|
||||
<!-- Why do we need this feature?-->
|
||||
|
||||
## Solution
|
||||
<!-- What is the solution?-->
|
||||
|
||||
## Paths Not Taken
|
||||
<!-- What other alternatives have been considered?-->
|
||||
76
.github/actions/build-test/action.yml
vendored
Normal file
76
.github/actions/build-test/action.yml
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
name: Build and Test (Linux and MacOS)
|
||||
|
||||
inputs:
|
||||
build_dir:
|
||||
description: 'The directory where to build.'
|
||||
required: true
|
||||
type: string
|
||||
build_type:
|
||||
description: 'The build type to use.'
|
||||
required: true
|
||||
type: string
|
||||
cmake_args:
|
||||
description: 'Additional arguments to pass to CMake.'
|
||||
required: false
|
||||
type: string
|
||||
cmake_generator:
|
||||
description: 'The CMake generator to use for the build.'
|
||||
required: true
|
||||
type: string
|
||||
cmake_target:
|
||||
description: 'The CMake target to build.'
|
||||
required: true
|
||||
type: string
|
||||
os:
|
||||
description: 'A string representing which operating system is used.'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- Linux
|
||||
- MacOS
|
||||
- Windows
|
||||
|
||||
# Install the Conan profiles and log into the specified remote. We first remove
|
||||
# the remote if it already exists, which can occur on self-hosted runners where
|
||||
# the workspace is not cleaned up between runs.
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Configure CMake
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
cmake \
|
||||
${{ inputs.cmake_generator && format('-G "{0}"', inputs.cmake_generator) || '' }} \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.build_type }} \
|
||||
-Dtests=TRUE \
|
||||
-Dxrpld=TRUE \
|
||||
${{ inputs.cmake_args }} \
|
||||
..
|
||||
- name: Build the binary
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
cmake --build . \
|
||||
--config ${{ inputs.build_type }} \
|
||||
--parallel $(nproc) \
|
||||
--target ${{ inputs.cmake_target }}
|
||||
- name: Check linking
|
||||
if: inputs.os == 'Linux'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
ldd ./rippled
|
||||
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
|
||||
echo 'The binary is statically linked.'
|
||||
else
|
||||
echo 'The binary is dynamically linked.'
|
||||
exit 1
|
||||
fi
|
||||
- name: Test the binary
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}/${{ inputs.os == 'Windows' && inputs.build_type || '' }}
|
||||
run: |
|
||||
./rippled --unittest --unittest-jobs $(nproc)
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
51
.github/actions/configure-conan/action.yml
vendored
Normal file
51
.github/actions/configure-conan/action.yml
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
name: Configure Conan
|
||||
|
||||
inputs:
|
||||
conan_global_conf:
|
||||
description: 'The contents of the global Conan configuration file.'
|
||||
required: true
|
||||
type: string
|
||||
conan_remote_name:
|
||||
description: 'The name of the Conan remote to use.'
|
||||
required: true
|
||||
type: string
|
||||
conan_remote_url:
|
||||
description: 'The URL of the Conan remote to use.'
|
||||
required: true
|
||||
type: string
|
||||
conan_remote_username:
|
||||
description: 'The username for logging into the Conan remote.'
|
||||
required: true
|
||||
type: string
|
||||
conan_remote_password:
|
||||
description: 'The password for logging into the Conan remote.'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
# Install the Conan profiles and log into the specified remote. We first remove
|
||||
# the remote if it already exists, which can occur on self-hosted runners where
|
||||
# the workspace is not cleaned up between runs.
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install Conan profile
|
||||
shell: bash
|
||||
run: |
|
||||
echo "${{ inputs.conan_global_conf }}" >> $(conan config home)/global.conf
|
||||
conan config install conan/profiles/default -tf $(conan config home)/profiles/
|
||||
echo "Installed Conan profile:"
|
||||
conan profile show
|
||||
- name: Add Conan remote
|
||||
shell: bash
|
||||
run: |
|
||||
if conan remote list | grep -q '${{ inputs.conan_remote_name }}'; then
|
||||
conan remote remove ${{ inputs.conan_remote_name }}
|
||||
echo "Removed Conan remote '${{ inputs.conan_remote_name }}'."
|
||||
fi
|
||||
conan remote add --index 0 ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
|
||||
echo "Added new conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
|
||||
- name: Log into Conan remote
|
||||
shell: bash
|
||||
run: |
|
||||
conan remote login ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_username }} --password "${{ inputs.conan_remote_password }}"
|
||||
conan remote list-users
|
||||
37
.github/actions/install-dependencies/action.yml
vendored
Normal file
37
.github/actions/install-dependencies/action.yml
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
name: Install-dependencies
|
||||
|
||||
inputs:
|
||||
build_dir:
|
||||
description: 'The directory where to build.'
|
||||
required: true
|
||||
type: string
|
||||
build_type:
|
||||
description: 'The build type to use.'
|
||||
required: true
|
||||
type: string
|
||||
conan_remote_name:
|
||||
description: 'The name of the Conan remote to use.'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
# Install the Conan profiles and log into the specified remote. We first remove
|
||||
# the remote if it already exists, which can occur on self-hosted runners where
|
||||
# the workspace is not cleaned up between runs.
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install Conan dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p ${{ inputs.build_dir }}
|
||||
cd ${{ inputs.build_dir }}
|
||||
conan install \
|
||||
--output-folder . \
|
||||
--build * \
|
||||
--options:host "&:tests=True" \
|
||||
--options:host "&:xrpld=True" \
|
||||
--settings:all build_type=${{ inputs.build_type }} \
|
||||
..
|
||||
- name: Upload Conan dependencies
|
||||
shell: bash
|
||||
run: conan upload '*' --confirm --remote ${{ inputs.conan_remote_name }}
|
||||
85
.github/pull_request_template.md
vendored
Normal file
85
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
<!--
|
||||
This PR template helps you to write a good pull request description.
|
||||
Please feel free to include additional useful information even beyond what is requested below.
|
||||
|
||||
If your branch is on a personal fork and has a name that allows it to
|
||||
run CI build/test jobs (e.g. "ci/foo"), remember to rename it BEFORE
|
||||
opening the PR. This avoids unnecessary redundant test runs. Renaming
|
||||
the branch after opening the PR will close the PR.
|
||||
https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/managing-branches-in-your-repository/renaming-a-branch
|
||||
-->
|
||||
|
||||
## High Level Overview of Change
|
||||
|
||||
<!--
|
||||
Please include a summary of the changes.
|
||||
This may be a direct input to the release notes.
|
||||
If too broad, please consider splitting into multiple PRs.
|
||||
If a relevant task or issue, please link it here.
|
||||
-->
|
||||
|
||||
### Context of Change
|
||||
|
||||
<!--
|
||||
Please include the context of a change.
|
||||
If a bug fix, when was the bug introduced? What was the behavior?
|
||||
If a new feature, why was this architecture chosen? What were the alternatives?
|
||||
If a refactor, how is this better than the previous implementation?
|
||||
|
||||
If there is a spec or design document for this feature, please link it here.
|
||||
-->
|
||||
|
||||
### Type of Change
|
||||
|
||||
<!--
|
||||
Please check [x] relevant options, delete irrelevant ones.
|
||||
-->
|
||||
|
||||
- [ ] Bug fix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- [ ] Refactor (non-breaking change that only restructures code)
|
||||
- [ ] Performance (increase or change in throughput and/or latency)
|
||||
- [ ] Tests (you added tests for code that already exists, or your new feature included in this PR)
|
||||
- [ ] Documentation update
|
||||
- [ ] Chore (no impact to binary, e.g. `.gitignore`, formatting, dropping support for older tooling)
|
||||
- [ ] Release
|
||||
|
||||
### API Impact
|
||||
|
||||
<!--
|
||||
Please check [x] relevant options, delete irrelevant ones.
|
||||
|
||||
* If there is any impact to the public API methods (HTTP / WebSocket), please update https://github.com/xrplf/rippled/blob/develop/API-CHANGELOG.md
|
||||
* Update API-CHANGELOG.md and add the change directly in this PR by pushing to your PR branch.
|
||||
* libxrpl: See https://github.com/XRPLF/rippled/blob/develop/docs/build/depend.md
|
||||
* Peer Protocol: See https://xrpl.org/peer-protocol.html
|
||||
-->
|
||||
|
||||
- [ ] Public API: New feature (new methods and/or new fields)
|
||||
- [ ] Public API: Breaking change (in general, breaking changes should only impact the next api_version)
|
||||
- [ ] `libxrpl` change (any change that may affect `libxrpl` or dependents of `libxrpl`)
|
||||
- [ ] Peer protocol change (must be backward compatible or bump the peer protocol version)
|
||||
|
||||
<!--
|
||||
## Before / After
|
||||
If relevant, use this section for an English description of the change at a technical level.
|
||||
If this change affects an API, examples should be included here.
|
||||
|
||||
For performance-impacting changes, please provide these details:
|
||||
1. Is this a new feature, bug fix, or improvement to existing functionality?
|
||||
2. What behavior/functionality does the change impact?
|
||||
3. In what processing can the impact be measured? Be as specific as possible - e.g. RPC client call, payment transaction that involves LOB, AMM, caching, DB operations, etc.
|
||||
4. Does this change affect concurrent processing - e.g. does it involve acquiring locks, multi-threaded processing, or async processing?
|
||||
-->
|
||||
|
||||
<!--
|
||||
## Test Plan
|
||||
If helpful, please describe the tests that you ran to verify your changes and provide instructions so that others can reproduce.
|
||||
This section may not be needed if your change includes thoroughly commented unit tests.
|
||||
-->
|
||||
|
||||
<!--
|
||||
## Future Tasks
|
||||
For future tasks related to PR.
|
||||
-->
|
||||
225
.github/workflows/build-debian.yml
vendored
Normal file
225
.github/workflows/build-debian.yml
vendored
Normal file
@@ -0,0 +1,225 @@
|
||||
# This workflow builds and tests the binary on various Debian configurations.
|
||||
name: Debian
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: 'The directory where to build.'
|
||||
required: false
|
||||
type: string
|
||||
default: '.build'
|
||||
conan_remote_name:
|
||||
description: 'The name of the Conan remote to use.'
|
||||
required: true
|
||||
type: string
|
||||
conan_remote_url:
|
||||
description: 'The URL of the Conan remote to use.'
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
conan_remote_username:
|
||||
description: 'The username for logging into the Conan remote.'
|
||||
required: true
|
||||
conan_remote_password:
|
||||
description: 'The password for logging into the Conan remote.'
|
||||
required: true
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-debian
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
# Global configuration for Conan. This is used to set the number of parallel
|
||||
# downloads, uploads, and build jobs. The verbosity is set to verbose to
|
||||
# provide more information during the build process.
|
||||
CONAN_GLOBAL_CONF: |
|
||||
core.download:parallel={{ os.cpu_count() }}
|
||||
core.upload:parallel={{ os.cpu_count() }}
|
||||
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
|
||||
tools.build:verbosity=verbose
|
||||
tools.compilation:verbosity=verbose
|
||||
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
|
||||
# duplication, we define it here using environment variables and create the
|
||||
# matrix in the first job. The matrix defined below should be kept in sync
|
||||
# with https://github.com/XRPLF/ci/blob/main/.github/workflows/debian.yml.
|
||||
STRATEGY_MATRIX_ARCHITECTURE: >-
|
||||
[
|
||||
{
|
||||
"platform": "linux/amd64",
|
||||
"runner": ["self-hosted", "Linux", "X64", "devbox"]
|
||||
},
|
||||
{
|
||||
"platform": "linux/arm64",
|
||||
"runner": ["self-hosted", "Linux", "ARM64", "devbox"]
|
||||
}
|
||||
]
|
||||
STRATEGY_MATRIX_OS: >-
|
||||
[
|
||||
{
|
||||
"distro": "debian",
|
||||
"release": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12"
|
||||
},
|
||||
{
|
||||
"distro": "debian",
|
||||
"release": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro": "debian",
|
||||
"release": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro": "debian",
|
||||
"release": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16"
|
||||
},
|
||||
{
|
||||
"distro": "debian",
|
||||
"release": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17"
|
||||
},
|
||||
{
|
||||
"distro": "debian",
|
||||
"release": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18"
|
||||
},
|
||||
{
|
||||
"distro": "debian",
|
||||
"release": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19"
|
||||
}
|
||||
]
|
||||
STRATEGY_MATRIX_BUILD_TYPE: >-
|
||||
[
|
||||
"Debug",
|
||||
"Release"
|
||||
]
|
||||
STRATEGY_MATRIX_CMAKE_ARGS: >-
|
||||
[
|
||||
"-DUnity=OFF",
|
||||
"-DUnity=ON"
|
||||
]
|
||||
# STRATEGY_MATRIX_ARCHITECTURE: >-
|
||||
# [
|
||||
# {
|
||||
# "platform": "linux/amd64",
|
||||
# "runner": ["self-hosted", "Linux", "X64"]
|
||||
# }
|
||||
# ]
|
||||
# STRATEGY_MATRIX_OS: >-
|
||||
# [
|
||||
# {
|
||||
# "distro": "debian",
|
||||
# "release": "bookworm",
|
||||
# "compiler_name": "gcc",
|
||||
# "compiler_version": "12"
|
||||
# }
|
||||
# ]
|
||||
# STRATEGY_MATRIX_BUILD_TYPE: >-
|
||||
# [
|
||||
# "Release"
|
||||
# ]
|
||||
# STRATEGY_MATRIX_CMAKE_ARGS: >-
|
||||
# [
|
||||
# "-DUnity=ON"
|
||||
# ]
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix and expose environment variables to be used by
|
||||
# following jobs. Exposing env vars this way is needed as they cannot be
|
||||
# directly passed as inputs to reusable workflows (although they can be passed
|
||||
# as inputs to actions).
|
||||
generate-outputs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Generate outputs
|
||||
id: generate
|
||||
run: |
|
||||
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
|
||||
echo "strategy_matrix_os=$(jq -c <<< '${{ env.STRATEGY_MATRIX_OS }}')" >> "$GITHUB_OUTPUT"
|
||||
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
|
||||
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
|
||||
outputs:
|
||||
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
|
||||
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
|
||||
strategy_matrix_os: ${{ steps.generate.outputs.strategy_matrix_os }}
|
||||
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
|
||||
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
|
||||
|
||||
# Install and cache the dependencies, and then build and test the binary using
|
||||
# various configurations.
|
||||
build-test:
|
||||
needs:
|
||||
- generate-outputs
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ghcr.io/xrplf/ci/${{ matrix.os.distro }}-${{ matrix.os.release }}:${{ matrix.os.compiler_name }}-${{ matrix.os.compiler_version }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
|
||||
os: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_os) }}
|
||||
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
|
||||
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Check configuration
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Checking path"
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo "Checking environment variables."
|
||||
env | sort
|
||||
- name: Check versions
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Checking CMake version."
|
||||
cmake --version
|
||||
|
||||
echo "Checking compiler version."
|
||||
${CC} --version
|
||||
|
||||
echo "Checking Conan version."
|
||||
conan --version
|
||||
|
||||
echo "Checking Ninja version."
|
||||
ninja --version
|
||||
- name: Configure Conan
|
||||
uses: ./.github/actions/configure-conan
|
||||
with:
|
||||
conan_global_conf: ${{ inputs.conan_global_conf }}
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
conan_remote_username: ${{ secrets.conan_remote_username }}
|
||||
conan_remote_password: ${{ secrets.conan_remote_password }}
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
- name: Build and test the binary
|
||||
uses: ./.github/actions/build-test
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_generator: 'Ninja'
|
||||
cmake_target: 'all'
|
||||
os: 'Linux'
|
||||
160
.github/workflows/build-macos.yml
vendored
Normal file
160
.github/workflows/build-macos.yml
vendored
Normal file
@@ -0,0 +1,160 @@
|
||||
# This workflow builds and tests the binary on various MacOS configurations.
|
||||
name: MacOS
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: 'The directory where to build.'
|
||||
required: false
|
||||
type: string
|
||||
default: '.build'
|
||||
conan_remote_name:
|
||||
description: 'The name of the Conan remote to use.'
|
||||
required: true
|
||||
type: string
|
||||
conan_remote_url:
|
||||
description: 'The URL of the Conan remote to use.'
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
conan_remote_username:
|
||||
description: 'The username for logging into the Conan remote.'
|
||||
required: true
|
||||
conan_remote_password:
|
||||
description: 'The password for logging into the Conan remote.'
|
||||
required: true
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-macos
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
# Global configuration for Conan. This is used to set the number of parallel
|
||||
# downloads, uploads, and build jobs. The verbosity is set to verbose to
|
||||
# provide more information during the build process.
|
||||
CONAN_GLOBAL_CONF: |
|
||||
core.download:parallel={{ os.cpu_count() }}
|
||||
core.upload:parallel={{ os.cpu_count() }}
|
||||
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
|
||||
tools.build:verbosity=verbose
|
||||
tools.compilation:verbosity=verbose
|
||||
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
|
||||
# duplication, we define it here using environment variables and create the
|
||||
# matrix in the first job.
|
||||
STRATEGY_MATRIX_ARCHITECTURE: >-
|
||||
[
|
||||
{
|
||||
"runner": ["self-hosted", "macOS", "ARM64", "devbox"]
|
||||
}
|
||||
]
|
||||
STRATEGY_MATRIX_BUILD_TYPE: >-
|
||||
[
|
||||
"Debug",
|
||||
"Release"
|
||||
]
|
||||
STRATEGY_MATRIX_CMAKE_ARGS: >-
|
||||
[
|
||||
"-DCMAKE_POLICY_VERSION_MINIMUM=3.5 -DUnity=OFF",
|
||||
"-DCMAKE_POLICY_VERSION_MINIMUM=3.5 -DUnity=ON"
|
||||
]
|
||||
# STRATEGY_MATRIX_ARCHITECTURE: >-
|
||||
# [
|
||||
# {
|
||||
# "runner": ["self-hosted", "macOS", "ARM64", "mac-runner-m1"]
|
||||
# }
|
||||
# ]
|
||||
# STRATEGY_MATRIX_BUILD_TYPE: >-
|
||||
# [
|
||||
# "Release"
|
||||
# ]
|
||||
# STRATEGY_MATRIX_CMAKE_ARGS: >-
|
||||
# [
|
||||
# "-DCMAKE_POLICY_VERSION_MINIMUM=3.5 -DUnity=ON"
|
||||
# ]
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix and expose environment variables to be used by
|
||||
# following jobs. Exposing env vars this way is needed as they cannot be
|
||||
# directly passed as inputs to reusable workflows (although they can be passed
|
||||
# as inputs to actions).
|
||||
generate-outputs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Generate outputs
|
||||
id: generate
|
||||
run: |
|
||||
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
|
||||
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
|
||||
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
|
||||
outputs:
|
||||
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
|
||||
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
|
||||
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
|
||||
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
|
||||
|
||||
# Install and cache the dependencies, and then build and test the binary using
|
||||
# various configurations.
|
||||
build-test:
|
||||
needs:
|
||||
- generate-outputs
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
|
||||
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
|
||||
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Check configuration
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Checking path"
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo "Checking environment variables."
|
||||
env | sort
|
||||
- name: Check versions
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Checking CMake version."
|
||||
cmake --version
|
||||
|
||||
echo "Checking compiler version."
|
||||
clang --version
|
||||
|
||||
echo "Checking Conan version."
|
||||
conan --version
|
||||
|
||||
echo "Checking Ninja version."
|
||||
ninja --version
|
||||
- name: Configure Conan
|
||||
uses: ./.github/actions/configure-conan
|
||||
with:
|
||||
conan_global_conf: ${{ inputs.conan_global_conf }}
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
conan_remote_username: ${{ secrets.conan_remote_username }}
|
||||
conan_remote_password: ${{ secrets.conan_remote_password }}
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
- name: Build and test the binary
|
||||
uses: ./.github/actions/build-test
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_generator: 'Ninja'
|
||||
cmake_target: 'all'
|
||||
os: 'MacOS'
|
||||
202
.github/workflows/build-rhel.yml
vendored
Normal file
202
.github/workflows/build-rhel.yml
vendored
Normal file
@@ -0,0 +1,202 @@
|
||||
# This workflow builds and tests the binary on various Red Hat Enterprise Linux
|
||||
# configurations.
|
||||
name: RHEL
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: 'The directory where to build.'
|
||||
required: false
|
||||
type: string
|
||||
default: '.build'
|
||||
conan_remote_name:
|
||||
description: 'The name of the Conan remote to use.'
|
||||
required: true
|
||||
type: string
|
||||
conan_remote_url:
|
||||
description: 'The URL of the Conan remote to use.'
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
conan_remote_username:
|
||||
description: 'The username for logging into the Conan remote.'
|
||||
required: true
|
||||
conan_remote_password:
|
||||
description: 'The password for logging into the Conan remote.'
|
||||
required: true
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-rhel
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
# Global configuration for Conan. This is used to set the number of parallel
|
||||
# downloads, uploads, and build jobs. The verbosity is set to verbose to
|
||||
# provide more information during the build process.
|
||||
CONAN_GLOBAL_CONF: |
|
||||
core.download:parallel={{ os.cpu_count() }}
|
||||
core.upload:parallel={{ os.cpu_count() }}
|
||||
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
|
||||
tools.build:verbosity=verbose
|
||||
tools.compilation:verbosity=verbose
|
||||
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
|
||||
# duplication, we define it here using environment variables and create the
|
||||
# matrix in the first job. The matrix defined below should be kept in sync
|
||||
# with https://github.com/XRPLF/ci/blob/main/.github/workflows/rhel.yml.
|
||||
STRATEGY_MATRIX_ARCHITECTURE: >-
|
||||
[
|
||||
{
|
||||
"platform": "linux/amd64",
|
||||
"runner": ["self-hosted", "Linux", "X64", "devbox"]
|
||||
},
|
||||
{
|
||||
"platform": "linux/arm64",
|
||||
"runner": ["self-hosted", "Linux", "ARM64", "devbox"]
|
||||
}
|
||||
]
|
||||
STRATEGY_MATRIX_OS: >-
|
||||
[
|
||||
{
|
||||
"distro": "rhel",
|
||||
"release": "9.6",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro": "rhel",
|
||||
"release": "9.6",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro": "rhel",
|
||||
"release": "9.6",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any"
|
||||
}
|
||||
]
|
||||
STRATEGY_MATRIX_BUILD_TYPE: >-
|
||||
[
|
||||
"Debug",
|
||||
"Release"
|
||||
]
|
||||
STRATEGY_MATRIX_CMAKE_ARGS: >-
|
||||
[
|
||||
"-DUnity=OFF",
|
||||
"-DUnity=ON"
|
||||
]
|
||||
# STRATEGY_MATRIX_ARCHITECTURE: >-
|
||||
# [
|
||||
# {
|
||||
# "platform": "linux/amd64",
|
||||
# "runner": ["self-hosted", "Linux", "X64"]
|
||||
# }
|
||||
# ]
|
||||
# STRATEGY_MATRIX_OS: >-
|
||||
# [
|
||||
# {
|
||||
# "distro": "rhel",
|
||||
# "release": "9.6",
|
||||
# "compiler_name": "gcc",
|
||||
# "compiler_version": "13"
|
||||
# }
|
||||
# ]
|
||||
# STRATEGY_MATRIX_BUILD_TYPE: >-
|
||||
# [
|
||||
# "Release"
|
||||
# ]
|
||||
# STRATEGY_MATRIX_CMAKE_ARGS: >-
|
||||
# [
|
||||
# "-DUnity=ON"
|
||||
# ]
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix and expose environment variables to be used by
|
||||
# following jobs. Exposing env vars this way is needed as they cannot be
|
||||
# directly passed as inputs to reusable workflows (although they can be passed
|
||||
# as inputs to actions).
|
||||
generate-outputs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Generate outputs
|
||||
id: generate
|
||||
run: |
|
||||
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
|
||||
echo "strategy_matrix_os=$(jq -c <<< '${{ env.STRATEGY_MATRIX_OS }}')" >> "$GITHUB_OUTPUT"
|
||||
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
|
||||
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
|
||||
outputs:
|
||||
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
|
||||
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
|
||||
strategy_matrix_os: ${{ steps.generate.outputs.strategy_matrix_os }}
|
||||
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
|
||||
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
|
||||
|
||||
# Install and cache the dependencies, and then build and test the binary using
|
||||
# various configurations.
|
||||
build-test:
|
||||
needs:
|
||||
- generate-outputs
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ghcr.io/xrplf/ci/${{ matrix.os.distro }}-${{ matrix.os.release }}:${{ matrix.os.compiler_name }}-${{ matrix.os.compiler_version }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
|
||||
os: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_os) }}
|
||||
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
|
||||
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Check configuration
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Checking path"
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo "Checking environment variables."
|
||||
env | sort
|
||||
- name: Check versions
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Checking CMake version."
|
||||
cmake --version
|
||||
|
||||
echo "Checking compiler version."
|
||||
${CC} --version
|
||||
|
||||
echo "Checking Conan version."
|
||||
conan --version
|
||||
|
||||
echo "Checking Ninja version."
|
||||
ninja --version
|
||||
- name: Configure Conan
|
||||
uses: ./.github/actions/configure-conan
|
||||
with:
|
||||
conan_global_conf: ${{ inputs.conan_global_conf }}
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
conan_remote_username: ${{ secrets.conan_remote_username }}
|
||||
conan_remote_password: ${{ secrets.conan_remote_password }}
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
- name: Build and test the binary
|
||||
uses: ./.github/actions/build-test
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_generator: 'Ninja'
|
||||
cmake_target: 'all'
|
||||
os: 'Linux'
|
||||
225
.github/workflows/build-ubuntu.yml
vendored
Normal file
225
.github/workflows/build-ubuntu.yml
vendored
Normal file
@@ -0,0 +1,225 @@
|
||||
# This workflow builds and tests the binary on various Ubuntu configurations.
|
||||
name: Ubuntu
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: 'The directory where to build.'
|
||||
required: false
|
||||
type: string
|
||||
default: '.build'
|
||||
conan_remote_name:
|
||||
description: 'The name of the Conan remote to use.'
|
||||
required: true
|
||||
type: string
|
||||
conan_remote_url:
|
||||
description: 'The URL of the Conan remote to use.'
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
conan_remote_username:
|
||||
description: 'The username for logging into the Conan remote.'
|
||||
required: true
|
||||
conan_remote_password:
|
||||
description: 'The password for logging into the Conan remote.'
|
||||
required: true
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-ubuntu
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
# Global configuration for Conan. This is used to set the number of parallel
|
||||
# downloads, uploads, and build jobs. The verbosity is set to verbose to
|
||||
# provide more information during the build process.
|
||||
CONAN_GLOBAL_CONF: |
|
||||
core.download:parallel={{ os.cpu_count() }}
|
||||
core.upload:parallel={{ os.cpu_count() }}
|
||||
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
|
||||
tools.build:verbosity=verbose
|
||||
tools.compilation:verbosity=verbose
|
||||
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
|
||||
# duplication, we define it here using environment variables and create the
|
||||
# matrix in the first job. The matrix defined below should be kept in sync
|
||||
# with https://github.com/XRPLF/ci/blob/main/.github/workflows/ubuntu.yml.
|
||||
STRATEGY_MATRIX_ARCHITECTURE: >-
|
||||
[
|
||||
{
|
||||
"platform": "linux/amd64",
|
||||
"runner": [self-hosted, Linux, X64, devbox]
|
||||
},
|
||||
{
|
||||
"platform": "linux/arm64",
|
||||
"runner": [self-hosted, Linux, ARM64, devbox]
|
||||
}
|
||||
]
|
||||
STRATEGY_MATRIX_OS: >-
|
||||
[
|
||||
{
|
||||
"distro": "ubuntu",
|
||||
"release": "jammy",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12"
|
||||
},
|
||||
{
|
||||
"distro": "ubuntu",
|
||||
"release": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro": "ubuntu",
|
||||
"release": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro": "ubuntu",
|
||||
"release": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16"
|
||||
},
|
||||
{
|
||||
"distro": "ubuntu",
|
||||
"release": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17"
|
||||
},
|
||||
{
|
||||
"distro": "ubuntu",
|
||||
"release": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18"
|
||||
},
|
||||
{
|
||||
"distro": "ubuntu",
|
||||
"release": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19"
|
||||
}
|
||||
]
|
||||
STRATEGY_MATRIX_BUILD_TYPE: >-
|
||||
[
|
||||
"Debug",
|
||||
"Release"
|
||||
]
|
||||
STRATEGY_MATRIX_CMAKE_ARGS: >-
|
||||
[
|
||||
"-DUnity=OFF",
|
||||
"-DUnity=ON"
|
||||
]
|
||||
# STRATEGY_MATRIX_ARCHITECTURE: >-
|
||||
# [
|
||||
# {
|
||||
# "platform": "linux/amd64",
|
||||
# "runner": ["self-hosted", "Linux", "X64"]
|
||||
# }
|
||||
# ]
|
||||
# STRATEGY_MATRIX_OS: >-
|
||||
# [
|
||||
# {
|
||||
# "distro": "ubuntu",
|
||||
# "release": "jammy",
|
||||
# "compiler_name": "gcc",
|
||||
# "compiler_version": "12"
|
||||
# }
|
||||
# ]
|
||||
# STRATEGY_MATRIX_BUILD_TYPE: >-
|
||||
# [
|
||||
# "Release"
|
||||
# ]
|
||||
# STRATEGY_MATRIX_CMAKE_ARGS: >-
|
||||
# [
|
||||
# "-DUnity=ON"
|
||||
# ]
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix and expose environment variables to be used by
|
||||
# following jobs. Exposing env vars this way is needed as they cannot be
|
||||
# directly passed as inputs to reusable workflows (although they can be passed
|
||||
# as inputs to actions).
|
||||
generate-outputs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Generate outputs
|
||||
id: generate
|
||||
run: |
|
||||
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
|
||||
echo "strategy_matrix_os=$(jq -c <<< '${{ env.STRATEGY_MATRIX_OS }}')" >> "$GITHUB_OUTPUT"
|
||||
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
|
||||
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
|
||||
outputs:
|
||||
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
|
||||
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
|
||||
strategy_matrix_os: ${{ steps.generate.outputs.strategy_matrix_os }}
|
||||
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
|
||||
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
|
||||
|
||||
# Install and cache the dependencies, and then build and test the binary using
|
||||
# various configurations.
|
||||
build-test:
|
||||
needs:
|
||||
- generate-outputs
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ghcr.io/xrplf/ci/${{ matrix.os.distro }}-${{ matrix.os.release }}:${{ matrix.os.compiler_name }}-${{ matrix.os.compiler_version }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
|
||||
os: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_os) }}
|
||||
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
|
||||
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Check configuration
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Checking path"
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo "Checking environment variables."
|
||||
env | sort
|
||||
- name: Check versions
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Checking CMake version."
|
||||
cmake --version
|
||||
|
||||
echo "Checking compiler version."
|
||||
${CC} --version
|
||||
|
||||
echo "Checking Conan version."
|
||||
conan --version
|
||||
|
||||
echo "Checking Ninja version."
|
||||
ninja --version
|
||||
- name: Configure Conan
|
||||
uses: ./.github/actions/configure-conan
|
||||
with:
|
||||
conan_global_conf: ${{ inputs.conan_global_conf }}
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
conan_remote_username: ${{ secrets.conan_remote_username }}
|
||||
conan_remote_password: ${{ secrets.conan_remote_password }}
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
- name: Build and test the binary
|
||||
uses: ./.github/actions/build-test
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_generator: 'Ninja'
|
||||
cmake_target: 'all'
|
||||
os: 'Linux'
|
||||
159
.github/workflows/build-windows.yml
vendored
Normal file
159
.github/workflows/build-windows.yml
vendored
Normal file
@@ -0,0 +1,159 @@
|
||||
# This workflow builds and tests the binary on various Windows configurations.
|
||||
name: Windows
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: 'The directory where to build.'
|
||||
required: false
|
||||
type: string
|
||||
default: '.build'
|
||||
conan_remote_name:
|
||||
description: 'The name of the Conan remote to use.'
|
||||
required: true
|
||||
type: string
|
||||
conan_remote_url:
|
||||
description: 'The URL of the Conan remote to use.'
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
conan_remote_username:
|
||||
description: 'The username for logging into the Conan remote.'
|
||||
required: true
|
||||
conan_remote_password:
|
||||
description: 'The password for logging into the Conan remote.'
|
||||
required: true
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-windows
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
# Global configuration for Conan. This is used to set the number of parallel
|
||||
# downloads, uploads, and build jobs. The verbosity is set to verbose to
|
||||
# provide more information during the build process.
|
||||
CONAN_GLOBAL_CONF: |
|
||||
core.download:parallel={{ os.cpu_count() }}
|
||||
core.upload:parallel={{ os.cpu_count() }}
|
||||
tools.build:jobs={{ (os.cpu_count() * 4/5) | int }}
|
||||
tools.build:verbosity=verbose
|
||||
tools.compilation:verbosity=verbose
|
||||
# GitHub does not allow us to specify a reusable matrix strategy, so to avoid
|
||||
# duplication, we define it here using environment variables and create the
|
||||
# matrix in the first job.
|
||||
STRATEGY_MATRIX_ARCHITECTURE: >-
|
||||
[
|
||||
{
|
||||
"runner": ["self-hosted", "Windows", "devbox"]
|
||||
}
|
||||
]
|
||||
STRATEGY_MATRIX_BUILD_TYPE: >-
|
||||
[
|
||||
"Debug",
|
||||
"Release"
|
||||
]
|
||||
STRATEGY_MATRIX_CMAKE_ARGS: >-
|
||||
[
|
||||
"-DUnity=OFF",
|
||||
"-DUnity=ON"
|
||||
]
|
||||
# STRATEGY_MATRIX_ARCHITECTURE: >-
|
||||
# [
|
||||
# {
|
||||
# "runner": "windows-latest"
|
||||
# }
|
||||
# ]
|
||||
# STRATEGY_MATRIX_BUILD_TYPE: >-
|
||||
# [
|
||||
# "Debug"
|
||||
# ]
|
||||
# STRATEGY_MATRIX_CMAKE_ARGS: >-
|
||||
# [
|
||||
# "-DUnity=ON"
|
||||
# ]
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix and expose environment variables to be used by
|
||||
# following jobs. Exposing env vars this way is needed as they cannot be
|
||||
# directly passed as inputs to reusable workflows (although they can be passed
|
||||
# as inputs to actions).
|
||||
generate-outputs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Generate outputs
|
||||
id: generate
|
||||
run: |
|
||||
echo "strategy_matrix_architecture=$(jq -c <<< '${{ env.STRATEGY_MATRIX_ARCHITECTURE }}')" >> "$GITHUB_OUTPUT"
|
||||
echo "strategy_matrix_build_type=$(jq -c <<< '${{ env.STRATEGY_MATRIX_BUILD_TYPE }}')" >> "$GITHUB_OUTPUT"
|
||||
echo "strategy_matrix_cmake_args=$(jq -c <<< '${{ env.STRATEGY_MATRIX_CMAKE_ARGS }}')" >> "$GITHUB_OUTPUT"
|
||||
outputs:
|
||||
conan_global_conf: ${{ env.CONAN_GLOBAL_CONF }}
|
||||
strategy_matrix_architecture: ${{ steps.generate.outputs.strategy_matrix_architecture }}
|
||||
strategy_matrix_build_type: ${{ steps.generate.outputs.strategy_matrix_build_type }}
|
||||
strategy_matrix_cmake_args: ${{ steps.generate.outputs.strategy_matrix_cmake_args }}
|
||||
|
||||
# Install and cache the dependencies, and then build and test the binary using
|
||||
# various configurations.
|
||||
build-test:
|
||||
needs:
|
||||
- generate-outputs
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
architecture: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_architecture) }}
|
||||
build_type: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_build_type) }}
|
||||
cmake_args: ${{ fromJson(needs.generate-outputs.outputs.strategy_matrix_cmake_args) }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Check configuration
|
||||
shell: pwsh
|
||||
run: |
|
||||
echo "Checking path"
|
||||
$env:PATH -split ';' | Sort-Object
|
||||
|
||||
echo "Checking environment variables."
|
||||
- name: choose Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065
|
||||
with:
|
||||
python-version: 3.13
|
||||
- name: install Conan
|
||||
run: pip install wheel conan
|
||||
- name: Check versions
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Checking CMake version."
|
||||
cmake --version
|
||||
|
||||
echo "Checking Conan version."
|
||||
conan --version
|
||||
- name: Configure Conan
|
||||
uses: ./.github/actions/configure-conan
|
||||
with:
|
||||
conan_global_conf: ${{ inputs.conan_global_conf }}
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
conan_remote_username: ${{ secrets.conan_remote_username }}
|
||||
conan_remote_password: ${{ secrets.conan_remote_password }}
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
- name: Build and test the binary
|
||||
uses: ./.github/actions/build-test
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_generator: 'Visual Studio 17 2022'
|
||||
cmake_target: 'install'
|
||||
os: 'Windows'
|
||||
57
.github/workflows/check-clang-format.yml
vendored
Normal file
57
.github/workflows/check-clang-format.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
# This workflow checks if the code is formatted according to the .clang-format
|
||||
# rules.
|
||||
name: Clang Format
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on: workflow_call
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-clang-format
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
clang-format:
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-clang-format
|
||||
steps:
|
||||
# The $GITHUB_WORKSPACE and ${{ github.workspace }} might not point to the
|
||||
# same directory for jobs running in containers. The actions/checkout step
|
||||
# is *supposed* to checkout into $GITHUB_WORKSPACE and then add it to
|
||||
# safe.directory (see instructions at https://github.com/actions/checkout)
|
||||
# but that is apparently not happening for some container images. We
|
||||
# therefore preemptively add both directories to safe.directory. See also
|
||||
# https://github.com/actions/runner/issues/2058 for more details.
|
||||
- name: Configure git safe.directory
|
||||
run: |
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE
|
||||
git config --global --add safe.directory ${{ github.workspace }}
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Check version
|
||||
run: clang-format --version
|
||||
- name: Format code
|
||||
run: find include src tests -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format -i {} +
|
||||
- name: Check for differences
|
||||
env:
|
||||
MESSAGE: |
|
||||
One or more files did not conform to the formatting specified in
|
||||
.clang-format. Maybe you did not run 'git-clang-format' or
|
||||
'clang-format' before committing, or your version of clang-format
|
||||
has an incompatibility with the one used here (see the "Check
|
||||
version" step above).
|
||||
|
||||
Run 'git-clang-format --extensions cpp,h,hpp,ipp develop' in your
|
||||
repo, and then commit and push the changes.
|
||||
run: |
|
||||
DIFF=$(git status --porcelain)
|
||||
if [ -n "${DIFF}" ]; then
|
||||
# Print the files that changed to give the contributor a hint about
|
||||
# what to expect when running git-clang-format on their own machine.
|
||||
git status
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
fi
|
||||
46
.github/workflows/check-levelization.yml
vendored
Normal file
46
.github/workflows/check-levelization.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
# This workflow checks if the dependencies between the modules are correctly
|
||||
# indexed.
|
||||
name: Levelization
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on: workflow_call
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-levelization
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
levelization:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Check levelization
|
||||
run: Builds/levelization/levelization.sh
|
||||
- name: Check for differences
|
||||
env:
|
||||
MESSAGE: |
|
||||
|
||||
The dependency relationships between the modules in rippled have
|
||||
changed, which may be an improvement or a regression.
|
||||
|
||||
A rule of thumb is that if your changes caused something to be
|
||||
removed from loops.txt, it's probably an improvement, while if
|
||||
something was added, it's probably a regression.
|
||||
|
||||
Run './Builds/levelization/levelization.sh' in your repo, and then
|
||||
commit and push the changes. See Builds/levelization/README.md for
|
||||
more info.
|
||||
run: |
|
||||
DIFF=$(git status --porcelain)
|
||||
if [ -n "${DIFF}" ]; then
|
||||
# Print the differences to give the contributor a hint about what to
|
||||
# expect when running levelization on their own machine.
|
||||
git diff
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
fi
|
||||
84
.github/workflows/check-libxrpl.yml
vendored
Normal file
84
.github/workflows/check-libxrpl.yml
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
name: Check libXRPL compatibility with Clio
|
||||
env:
|
||||
CONAN_URL: https://conan.ripplex.io
|
||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'src/libxrpl/protocol/BuildInfo.cpp'
|
||||
- 'check-libxrpl.yml'
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
name: Publish libXRPL
|
||||
outputs:
|
||||
outcome: ${{ steps.upload.outputs.outcome }}
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.channel.outputs.channel }}
|
||||
runs-on: [self-hosted, heavy]
|
||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
||||
steps:
|
||||
- name: Wait for essential checks to succeed
|
||||
uses: lewagon/wait-on-check-action@v1.3.4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
running-workflow-name: wait-for-check-regexp
|
||||
check-regexp: '(dependencies|test).*linux.*' # Ignore windows and mac tests but make sure linux passes
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
wait-interval: 10
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Generate channel
|
||||
id: channel
|
||||
run: |
|
||||
echo channel="clio/pr_${{ github.event.pull_request.number }}" | tee ${GITHUB_OUTPUT}
|
||||
- name: Export new package
|
||||
run: |
|
||||
conan export . ${{ steps.channel.outputs.channel }}
|
||||
- name: Add Ripple Conan remote
|
||||
run: |
|
||||
conan remote list
|
||||
conan remote remove ripple || true
|
||||
# Do not quote the URL. An empty string will be accepted (with a non-fatal warning), but a missing argument will not.
|
||||
conan remote add ripple ${{ env.CONAN_URL }} --insert 0
|
||||
- name: Parse new version
|
||||
id: version
|
||||
run: |
|
||||
echo version="$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" \
|
||||
| awk -F '"' '{print $2}')" | tee ${GITHUB_OUTPUT}
|
||||
- name: Try to authenticate to Ripple Conan remote
|
||||
id: remote
|
||||
run: |
|
||||
# `conan user` implicitly uses the environment variables CONAN_LOGIN_USERNAME_<REMOTE> and CONAN_PASSWORD_<REMOTE>.
|
||||
# https://docs.conan.io/1/reference/commands/misc/user.html#using-environment-variables
|
||||
# https://docs.conan.io/1/reference/env_vars.html#conan-login-username-conan-login-username-remote-name
|
||||
# https://docs.conan.io/1/reference/env_vars.html#conan-password-conan-password-remote-name
|
||||
echo outcome=$(conan user --remote ripple --password >&2 \
|
||||
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
|
||||
- name: Upload new package
|
||||
id: upload
|
||||
if: (steps.remote.outputs.outcome == 'success')
|
||||
run: |
|
||||
echo "conan upload version ${{ steps.version.outputs.version }} on channel ${{ steps.channel.outputs.channel }}"
|
||||
echo outcome=$(conan upload xrpl/${{ steps.version.outputs.version }}@${{ steps.channel.outputs.channel }} --remote ripple --confirm >&2 \
|
||||
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
|
||||
notify_clio:
|
||||
name: Notify Clio
|
||||
runs-on: ubuntu-latest
|
||||
needs: publish
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
||||
steps:
|
||||
- name: Notify Clio about new version
|
||||
if: (needs.publish.outputs.outcome == 'success')
|
||||
run: |
|
||||
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
|
||||
-F "client_payload[version]=${{ needs.publish.outputs.version }}@${{ needs.publish.outputs.channel }}" \
|
||||
-F "client_payload[pr]=${{ github.event.pull_request.number }}"
|
||||
53
.github/workflows/documentation.yml
vendored
Normal file
53
.github/workflows/documentation.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
name: Documentation
|
||||
|
||||
# TODO: Use `workflow_run` to trigger this workflow after checks have completed.
|
||||
# This can only be done if the `checks` workflow already exists on the default
|
||||
# branch (i.e. `develop`), so we cannot do this yet.
|
||||
# See https://docs.github.com/en/actions/reference/workflows-and-actions/events-that-trigger-workflows#workflow_run.
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
doxygen:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Check configuration
|
||||
run: |
|
||||
echo "Checking path"
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo "Checking environment variables."
|
||||
env | sort
|
||||
- name: Check versions
|
||||
run: |
|
||||
echo "Checking CMake version."
|
||||
cmake --version
|
||||
|
||||
echo "Checking Doxygen version."
|
||||
doxygen --version
|
||||
- name: Build documentation
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -Donly_docs=TRUE ..
|
||||
cmake --build . --target docs --parallel $(nproc)
|
||||
- name: Publish documentation
|
||||
uses: peaceiris/actions-gh-pages@v4
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: build/docs/html
|
||||
90
.github/workflows/main.yml
vendored
Normal file
90
.github/workflows/main.yml
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
# This workflow runs all workflows to check, build and test the project on
|
||||
# various Linux flavors, as well as MacOS and Windows.
|
||||
name: Main
|
||||
|
||||
# This workflow is triggered on every push to the repository, including pull
|
||||
# requests. However, it will not run if the pull request is a draft unless it
|
||||
# has the 'DraftRunCI' label. As GitHub Actions does not support such `if`
|
||||
# conditions here, the individual jobs will check the pull request state and
|
||||
# labels to determine whether to run or not. The workflows called by the jobs
|
||||
# may also have their own conditions to partially or completely skip execution
|
||||
# as needed.
|
||||
on: push
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# check-clang-format:
|
||||
# if: github.event.pull_request.draft == false || contains(github.event.pull_request.labels.*.name, 'DraftRunCI')
|
||||
# uses: ./.github/workflows/check-clang-format.yml
|
||||
#
|
||||
# check-levelization:
|
||||
# if: github.event.pull_request.draft == false || contains(github.event.pull_request.labels.*.name, 'DraftRunCI')
|
||||
# uses: ./.github/workflows/check-levelization.yml
|
||||
|
||||
debian:
|
||||
# needs:
|
||||
# - check-clang-format
|
||||
# - check-levelization
|
||||
uses: ./.github/workflows/build-debian.yml
|
||||
with:
|
||||
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
|
||||
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
|
||||
secrets:
|
||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
|
||||
# rhel:
|
||||
## needs:
|
||||
## - check-clang-format
|
||||
## - check-levelization
|
||||
# uses: ./.github/workflows/build-rhel.yml
|
||||
# with:
|
||||
# conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
|
||||
# conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
|
||||
# secrets:
|
||||
# conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
# conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
|
||||
ubuntu:
|
||||
# needs:
|
||||
# - check-clang-format
|
||||
# - check-levelization
|
||||
uses: ./.github/workflows/build-ubuntu.yml
|
||||
with:
|
||||
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
|
||||
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
|
||||
secrets:
|
||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
|
||||
macos:
|
||||
# needs:
|
||||
# - check-clang-format
|
||||
# - check-levelization
|
||||
uses: ./.github/workflows/build-macos.yml
|
||||
with:
|
||||
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
|
||||
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
|
||||
secrets:
|
||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
|
||||
windows:
|
||||
# needs:
|
||||
# - check-clang-format
|
||||
# - check-levelization
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
with:
|
||||
conan_remote_name: ${{ vars.CONAN_REMOTE_NAME }}
|
||||
conan_remote_url: ${{ vars.CONAN_REMOTE_URL }}
|
||||
secrets:
|
||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
|
||||
66
.github/workflows/missing-commits.yml
vendored
Normal file
66
.github/workflows/missing-commits.yml
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
name: Check for missing commits
|
||||
|
||||
# TODO: Use `workflow_run` to trigger this workflow after checks have completed.
|
||||
# This can only be done if the `checks` workflow already exists on the default
|
||||
# branch (i.e. `develop`), so we cannot do this yet.
|
||||
# See https://docs.github.com/en/actions/reference/workflows-and-actions/events-that-trigger-workflows#workflow_run.
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- release
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check for missing commits
|
||||
env:
|
||||
MESSAGE: |
|
||||
|
||||
If you are reading this, then the commits indicated above are missing
|
||||
from the "develop" and/or "release" branch. Do a reverse-merge as soon
|
||||
as possible. See CONTRIBUTING.md for instructions.
|
||||
run: |
|
||||
set -o pipefail
|
||||
# Branches are ordered by how "canonical" they are. Every commit in one
|
||||
# branch should be in all the branches behind it.
|
||||
order=(master release develop)
|
||||
branches=()
|
||||
for branch in "${order[@]}"; do
|
||||
# Check that the branches exist so that this job will work on forked
|
||||
# repos, which don't necessarily have master and release branches.
|
||||
echo "Checking if ${branch} exists."
|
||||
if git ls-remote --exit-code --heads origin \
|
||||
refs/heads/${branch} > /dev/null; then
|
||||
branches+=(origin/${branch})
|
||||
fi
|
||||
done
|
||||
|
||||
prior=()
|
||||
for branch in "${branches[@]}"; do
|
||||
if [[ ${#prior[@]} -ne 0 ]]; then
|
||||
echo "Checking ${prior[@]} for commits missing from ${branch}."
|
||||
git log --oneline --no-merges "${prior[@]}" \
|
||||
^$branch | tee -a "missing-commits.txt"
|
||||
echo
|
||||
fi
|
||||
prior+=("${branch}")
|
||||
done
|
||||
|
||||
if [[ $(cat missing-commits.txt | wc -l) -ne 0 ]]; then
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
fi
|
||||
24
.gitignore
vendored
24
.gitignore
vendored
@@ -21,10 +21,12 @@ bin/project-cache.jam
|
||||
|
||||
# Ignore object files.
|
||||
*.o
|
||||
build
|
||||
.nih_c
|
||||
tags
|
||||
TAGS
|
||||
GTAGS
|
||||
GRTAGS
|
||||
GPATH
|
||||
bin/rippled
|
||||
Debug/*.*
|
||||
Release/*.*
|
||||
@@ -34,6 +36,12 @@ Release/*.*
|
||||
*.gcda
|
||||
*.gcov
|
||||
|
||||
# Levelization checking
|
||||
Builds/levelization/results/rawincludes.txt
|
||||
Builds/levelization/results/paths.txt
|
||||
Builds/levelization/results/includes/
|
||||
Builds/levelization/results/includedby/
|
||||
|
||||
# Ignore tmp directory.
|
||||
tmp
|
||||
|
||||
@@ -56,7 +64,7 @@ docs/html_doc
|
||||
# Xcode user-specific project settings
|
||||
# Xcode
|
||||
.DS_Store
|
||||
*/build/*
|
||||
/build/
|
||||
*.pbxuser
|
||||
!default.pbxuser
|
||||
*.mode1v3
|
||||
@@ -92,3 +100,15 @@ Builds/VisualStudio2015/*.sdf
|
||||
# MSVC
|
||||
*.pdb
|
||||
.vs/
|
||||
CMakeSettings.json
|
||||
compile_commands.json
|
||||
.clangd
|
||||
packages
|
||||
pkg_out
|
||||
pkg
|
||||
CMakeUserPresets.json
|
||||
bld.rippled/
|
||||
.vscode
|
||||
|
||||
# Suggested in-tree build directory
|
||||
/.build/
|
||||
|
||||
9
.gitmodules
vendored
9
.gitmodules
vendored
@@ -1,9 +0,0 @@
|
||||
[submodule "src/nudb/extras/beast"]
|
||||
path = src/nudb/extras/beast
|
||||
url = https://github.com/vinniefalco/Beast.git
|
||||
[submodule "src/nudb/extras/rocksdb"]
|
||||
path = src/nudb/extras/rocksdb
|
||||
url = https://github.com/facebook/rocksdb.git
|
||||
[submodule "src/nudb/doc/docca"]
|
||||
path = src/nudb/doc/docca
|
||||
url = https://github.com/vinniefalco/docca.git
|
||||
6
.pre-commit-config.yaml
Normal file
6
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
# .pre-commit-config.yaml
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||
rev: v18.1.8
|
||||
hooks:
|
||||
- id: clang-format
|
||||
76
.travis.yml
76
.travis.yml
@@ -1,76 +0,0 @@
|
||||
sudo: false
|
||||
language: cpp
|
||||
|
||||
env:
|
||||
global:
|
||||
# Maintenance note: to move to a new version
|
||||
# of boost, update both BOOST_ROOT and BOOST_URL.
|
||||
# Note that for simplicity, BOOST_ROOT's final
|
||||
# namepart must match the folder name internal
|
||||
# to boost's .tar.gz.
|
||||
- LCOV_ROOT=$HOME/lcov
|
||||
- GDB_ROOT=$HOME/gdb
|
||||
- BOOST_ROOT=$HOME/boost_1_67_0
|
||||
- BOOST_URL='http://sourceforge.net/projects/boost/files/boost/1.67.0/boost_1_67_0.tar.gz'
|
||||
|
||||
addons:
|
||||
apt:
|
||||
sources:
|
||||
- ubuntu-toolchain-r-test
|
||||
- llvm-toolchain-trusty-5.0
|
||||
packages:
|
||||
- gcc-5
|
||||
- g++-5
|
||||
- python-software-properties
|
||||
- protobuf-compiler
|
||||
- libprotobuf-dev
|
||||
- libssl-dev
|
||||
- libstdc++6
|
||||
- binutils-gold
|
||||
- cmake
|
||||
- lcov
|
||||
- llvm-5.0
|
||||
- clang-5.0
|
||||
|
||||
matrix:
|
||||
include:
|
||||
|
||||
- compiler: gcc
|
||||
env: GCC_VER=5 TARGET=debug
|
||||
|
||||
# - compiler: gcc
|
||||
# env: GCC_VER=5 TARGET=debug.nounity
|
||||
|
||||
# - compiler: gcc
|
||||
# env: GCC_VER=5 TARGET=coverage PATH=$PWD/cmake/bin:$PATH
|
||||
|
||||
- compiler: clang
|
||||
env: GCC_VER=5 TARGET=debug
|
||||
|
||||
# - compiler: clang
|
||||
# env: GCC_VER=5 TARGET=debug.nounity
|
||||
|
||||
# The clang cmake builds do not link.
|
||||
# - compiler: clang
|
||||
# env: GCC_VER=5 TARGET=debug CLANG_VER=3.8 PATH=$PWD/llvm-$LLVM_VERSION/bin:$PWD/cmake/bin:$PATH
|
||||
|
||||
# - compiler: clang
|
||||
# env: GCC_VER=5 TARGET=debug.nounity CLANG_VER=3.8 PATH=$PWD/llvm-$LLVM_VERSION/bin:$PWD/cmake/bin:$PATH
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $BOOST_ROOT
|
||||
|
||||
before_install:
|
||||
- bin/ci/ubuntu/install-dependencies.sh
|
||||
|
||||
script:
|
||||
- travis_retry bin/ci/ubuntu/build-and-test.sh
|
||||
|
||||
notifications:
|
||||
email:
|
||||
false
|
||||
irc:
|
||||
channels:
|
||||
- "chat.freenode.net#ripple-dev"
|
||||
|
||||
225
API-CHANGELOG.md
Normal file
225
API-CHANGELOG.md
Normal file
@@ -0,0 +1,225 @@
|
||||
# API Changelog
|
||||
|
||||
This changelog is intended to list all updates to the [public API methods](https://xrpl.org/public-api-methods.html).
|
||||
|
||||
For info about how [API versioning](https://xrpl.org/request-formatting.html#api-versioning) works, including examples, please view the [XLS-22d spec](https://github.com/XRPLF/XRPL-Standards/discussions/54). For details about the implementation of API versioning, view the [implementation PR](https://github.com/XRPLF/rippled/pull/3155). API versioning ensures existing integrations and users continue to receive existing behavior, while those that request a higher API version will experience new behavior.
|
||||
|
||||
The API version controls the API behavior you see. This includes what properties you see in responses, what parameters you're permitted to send in requests, and so on. You specify the API version in each of your requests. When a breaking change is introduced to the `rippled` API, a new version is released. To avoid breaking your code, you should set (or increase) your version when you're ready to upgrade.
|
||||
|
||||
For a log of breaking changes, see the **API Version [number]** headings. In general, breaking changes are associated with a particular API Version number. For non-breaking changes, scroll to the **XRP Ledger version [x.y.z]** headings. Non-breaking changes are associated with a particular XRP Ledger (`rippled`) release.
|
||||
|
||||
## API Version 2
|
||||
|
||||
API version 2 is available in `rippled` version 2.0.0 and later. To use this API, clients specify `"api_version" : 2` in each request.
|
||||
|
||||
#### Removed methods
|
||||
|
||||
In API version 2, the following deprecated methods are no longer available: (https://github.com/XRPLF/rippled/pull/4759)
|
||||
|
||||
- `tx_history` - Instead, use other methods such as `account_tx` or `ledger` with the `transactions` field set to `true`.
|
||||
- `ledger_header` - Instead, use the `ledger` method.
|
||||
|
||||
#### Modifications to JSON transaction element in V2
|
||||
|
||||
In API version 2, JSON elements for transaction output have been changed and made consistent for all methods which output transactions. (https://github.com/XRPLF/rippled/pull/4775)
|
||||
This helps to unify the JSON serialization format of transactions. (https://github.com/XRPLF/clio/issues/722, https://github.com/XRPLF/rippled/issues/4727)
|
||||
|
||||
- JSON transaction element is named `tx_json`
|
||||
- Binary transaction element is named `tx_blob`
|
||||
- JSON transaction metadata element is named `meta`
|
||||
- Binary transaction metadata element is named `meta_blob`
|
||||
|
||||
Additionally, these elements are now consistently available next to `tx_json` (i.e. sibling elements), where possible:
|
||||
|
||||
- `hash` - Transaction ID. This data was stored inside transaction output in API version 1, but in API version 2 is a sibling element.
|
||||
- `ledger_index` - Ledger index (only set on validated ledgers)
|
||||
- `ledger_hash` - Ledger hash (only set on closed or validated ledgers)
|
||||
- `close_time_iso` - Ledger close time expressed in ISO 8601 time format (only set on validated ledgers)
|
||||
- `validated` - Bool element set to `true` if the transaction is in a validated ledger, otherwise `false`
|
||||
|
||||
This change affects the following methods:
|
||||
|
||||
- `tx` - Transaction data moved into element `tx_json` (was inline inside `result`) or, if binary output was requested, moved from `tx` to `tx_blob`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
|
||||
- `account_tx` - Renamed transaction element from `tx` to `tx_json`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
|
||||
- `transaction_entry` - Renamed transaction metadata element from `metadata` to `meta`. Changed location of `hash` and added new elements
|
||||
- `subscribe` - Renamed transaction element from `transaction` to `tx_json`. Changed location of `hash` and added new elements
|
||||
- `sign`, `sign_for`, `submit` and `submit_multisigned` - Changed location of `hash` element.
|
||||
|
||||
#### Modification to `Payment` transaction JSON schema
|
||||
|
||||
When reading Payments, the `Amount` field should generally **not** be used. Instead, use [delivered_amount](https://xrpl.org/partial-payments.html#the-delivered_amount-field) to see the amount that the Payment delivered. To clarify its meaning, the `Amount` field is being renamed to `DeliverMax`. (https://github.com/XRPLF/rippled/pull/4733)
|
||||
|
||||
- In `Payment` transaction type, JSON RPC field `Amount` is renamed to `DeliverMax`. To enable smooth client transition, `Amount` is still handled, as described below: (https://github.com/XRPLF/rippled/pull/4733)
|
||||
- On JSON RPC input (e.g. `submit_multisigned` etc. methods), `Amount` is recognized as an alias to `DeliverMax` for both API version 1 and version 2 clients.
|
||||
- On JSON RPC input, submitting both `Amount` and `DeliverMax` fields is allowed _only_ if they are identical; otherwise such input is rejected with `rpcINVALID_PARAMS` error.
|
||||
- On JSON RPC output (e.g. `subscribe`, `account_tx` etc. methods), `DeliverMax` is present in both API version 1 and version 2.
|
||||
- On JSON RPC output, `Amount` is only present in API version 1 and _not_ in version 2.
|
||||
|
||||
#### Modifications to account_info response
|
||||
|
||||
- `signer_lists` is returned in the root of the response. (In API version 1, it was nested under `account_data`.) (https://github.com/XRPLF/rippled/pull/3770)
|
||||
- When using an invalid `signer_lists` value, the API now returns an "invalidParams" error. (https://github.com/XRPLF/rippled/pull/4585)
|
||||
- (`signer_lists` must be a boolean. In API version 1, strings were accepted and may return a normal response - i.e. as if `signer_lists` were `true`.)
|
||||
|
||||
#### Modifications to [account_tx](https://xrpl.org/account_tx.html#account_tx) response
|
||||
|
||||
- Using `ledger_index_min`, `ledger_index_max`, and `ledger_index` returns `invalidParams` because if you use `ledger_index_min` or `ledger_index_max`, then it does not make sense to also specify `ledger_index`. In API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4571)
|
||||
- The same applies for `ledger_index_min`, `ledger_index_max`, and `ledger_hash`. (https://github.com/XRPLF/rippled/issues/4545#issuecomment-1565065579)
|
||||
- Using a `ledger_index_min` or `ledger_index_max` beyond the range of ledgers that the server has:
|
||||
- returns `lgrIdxMalformed` in API version 2. Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/issues/4288)
|
||||
- Attempting to use a non-boolean value (such as a string) for the `binary` or `forward` parameters returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
|
||||
|
||||
#### Modifications to [noripple_check](https://xrpl.org/noripple_check.html#noripple_check) response
|
||||
|
||||
- Attempting to use a non-boolean value (such as a string) for the `transactions` parameter returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
|
||||
|
||||
## API Version 1
|
||||
|
||||
This version is supported by all `rippled` versions. For WebSocket and HTTP JSON-RPC requests, it is currently the default API version used when no `api_version` is specified.
|
||||
|
||||
The [commandline](https://xrpl.org/docs/references/http-websocket-apis/api-conventions/request-formatting/#commandline-format) always uses the latest API version. The command line is intended for ad-hoc usage by humans, not programs or automated scripts. The command line is not meant for use in production code.
|
||||
|
||||
### Inconsistency: server_info - network_id
|
||||
|
||||
The `network_id` field was added in the `server_info` response in version 1.5.0 (2019), but it is not returned in [reporting mode](https://xrpl.org/rippled-server-modes.html#reporting-mode). However, use of reporting mode is now discouraged, in favor of using [Clio](https://github.com/XRPLF/clio) instead.
|
||||
|
||||
## XRP Ledger server version 2.5.0
|
||||
|
||||
As of 2025-04-04, version 2.5.0 is in development. You can use a pre-release version by building from source or [using the `nightly` package](https://xrpl.org/docs/infrastructure/installation/install-rippled-on-ubuntu).
|
||||
|
||||
### Additions and bugfixes in 2.5.0
|
||||
|
||||
- `channel_authorize`: If `signing_support` is not enabled in the config, the RPC is disabled.
|
||||
|
||||
## XRP Ledger server version 2.4.0
|
||||
|
||||
[Version 2.4.0](https://github.com/XRPLF/rippled/releases/tag/2.4.0) was released on March 4, 2025.
|
||||
|
||||
### Additions and bugfixes in 2.4.0
|
||||
|
||||
- `ledger_entry`: `state` is added an alias for `ripple_state`.
|
||||
- `ledger_entry`: Enables case-insensitive filtering by canonical name in addition to case-sensitive filtering by RPC name.
|
||||
- `validators`: Added new field `validator_list_threshold` in response.
|
||||
- `simulate`: A new RPC that executes a [dry run of a transaction submission](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0069d-simulate#2-rpc-simulate)
|
||||
- Signing methods autofill fees better and properly handle transactions that don't have a base fee, and will also autofill the `NetworkID` field.
|
||||
|
||||
## XRP Ledger server version 2.3.0
|
||||
|
||||
[Version 2.3.0](https://github.com/XRPLF/rippled/releases/tag/2.3.0) was released on Nov 25, 2024.
|
||||
|
||||
### Breaking changes in 2.3.0
|
||||
|
||||
- `book_changes`: If the requested ledger version is not available on this node, a `ledgerNotFound` error is returned and the node does not attempt to acquire the ledger from the p2p network (as with other non-admin RPCs).
|
||||
|
||||
Admins can still attempt to retrieve old ledgers with the `ledger_request` RPC.
|
||||
|
||||
### Additions and bugfixes in 2.3.0
|
||||
|
||||
- `book_changes`: Returns a `validated` field in its response, which was missing in prior versions.
|
||||
|
||||
## XRP Ledger server version 2.2.0
|
||||
|
||||
[Version 2.2.0](https://github.com/XRPLF/rippled/releases/tag/2.2.0) was released on Jun 5, 2024. The following additions are non-breaking (because they are purely additive):
|
||||
|
||||
- The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781))
|
||||
|
||||
## XRP Ledger server version 2.0.0
|
||||
|
||||
[Version 2.0.0](https://github.com/XRPLF/rippled/releases/tag/2.0.0) was released on Jan 9, 2024. The following additions are non-breaking (because they are purely additive):
|
||||
|
||||
- `server_definitions`: A new RPC that generates a `definitions.json`-like output that can be used in XRPL libraries.
|
||||
- In `Payment` transactions, `DeliverMax` has been added. This is a replacement for the `Amount` field, which should not be used. Typically, the `delivered_amount` (in transaction metadata) should be used. To ease the transition, `DeliverMax` is present regardless of API version, since adding a field is non-breaking.
|
||||
- API version 2 has been moved from beta to supported, meaning that it is generally available (regardless of the `beta_rpc_api` setting).
|
||||
|
||||
## XRP Ledger server version 2.2.0
|
||||
|
||||
The following is a non-breaking addition to the API.
|
||||
|
||||
- The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781))
|
||||
|
||||
## XRP Ledger server version 1.12.0
|
||||
|
||||
[Version 1.12.0](https://github.com/XRPLF/rippled/releases/tag/1.12.0) was released on Sep 6, 2023. The following additions are non-breaking (because they are purely additive).
|
||||
|
||||
- `server_info`: Added `ports`, an array which advertises the RPC and WebSocket ports. This information is also included in the `/crawl` endpoint (which calls `server_info` internally). `grpc` and `peer` ports are also included. (https://github.com/XRPLF/rippled/pull/4427)
|
||||
- `ports` contains objects, each containing a `port` for the listening port (a number string), and a `protocol` array listing the supported protocols on that port.
|
||||
- This allows crawlers to build a more detailed topology without needing to port-scan nodes.
|
||||
- (For peers and other non-admin clients, the info about admin ports is excluded.)
|
||||
- Clawback: The following additions are gated by the Clawback amendment (`featureClawback`). (https://github.com/XRPLF/rippled/pull/4553)
|
||||
- Adds an [AccountRoot flag](https://xrpl.org/accountroot.html#accountroot-flags) called `lsfAllowTrustLineClawback` (https://github.com/XRPLF/rippled/pull/4617)
|
||||
- Adds the corresponding `asfAllowTrustLineClawback` [AccountSet Flag](https://xrpl.org/accountset.html#accountset-flags) as well.
|
||||
- Clawback is disabled by default, so if an issuer desires the ability to claw back funds, they must use an `AccountSet` transaction to set the AllowTrustLineClawback flag. They must do this before creating any trust lines, offers, escrows, payment channels, or checks.
|
||||
- Adds the [Clawback transaction type](https://github.com/XRPLF/XRPL-Standards/blob/master/XLS-39d-clawback/README.md#331-clawback-transaction), containing these fields:
|
||||
- `Account`: The issuer of the asset being clawed back. Must also be the sender of the transaction.
|
||||
- `Amount`: The amount being clawed back, with the `Amount.issuer` being the token holder's address.
|
||||
- Adds [AMM](https://github.com/XRPLF/XRPL-Standards/discussions/78) ([#4294](https://github.com/XRPLF/rippled/pull/4294), [#4626](https://github.com/XRPLF/rippled/pull/4626)) feature:
|
||||
- Adds `amm_info` API to retrieve AMM information for a given tokens pair.
|
||||
- Adds `AMMCreate` transaction type to create `AMM` instance.
|
||||
- Adds `AMMDeposit` transaction type to deposit funds into `AMM` instance.
|
||||
- Adds `AMMWithdraw` transaction type to withdraw funds from `AMM` instance.
|
||||
- Adds `AMMVote` transaction type to vote for the trading fee of `AMM` instance.
|
||||
- Adds `AMMBid` transaction type to bid for the Auction Slot of `AMM` instance.
|
||||
- Adds `AMMDelete` transaction type to delete `AMM` instance.
|
||||
- Adds `sfAMMID` to `AccountRoot` to indicate that the account is `AMM`'s account. `AMMID` is used to fetch `ltAMM`.
|
||||
- Adds `lsfAMMNode` `TrustLine` flag to indicate that one side of the `TrustLine` is `AMM` account.
|
||||
- Adds `tfLPToken`, `tfSingleAsset`, `tfTwoAsset`, `tfOneAssetLPToken`, `tfLimitLPToken`, `tfTwoAssetIfEmpty`,
|
||||
`tfWithdrawAll`, `tfOneAssetWithdrawAll` which allow a trader to specify different fields combination
|
||||
for `AMMDeposit` and `AMMWithdraw` transactions.
|
||||
- Adds new transaction result codes:
|
||||
- tecUNFUNDED_AMM: insufficient balance to fund AMM. The account does not have funds for liquidity provision.
|
||||
- tecAMM_BALANCE: AMM has invalid balance. Calculated balances greater than the current pool balances.
|
||||
- tecAMM_FAILED: AMM transaction failed. Fails due to a processing failure.
|
||||
- tecAMM_INVALID_TOKENS: AMM invalid LP tokens. Invalid input values, format, or calculated values.
|
||||
- tecAMM_EMPTY: AMM is in empty state. Transaction requires AMM in non-empty state (LP tokens > 0).
|
||||
- tecAMM_NOT_EMPTY: AMM is not in empty state. Transaction requires AMM in empty state (LP tokens == 0).
|
||||
- tecAMM_ACCOUNT: AMM account. Clawback of AMM account.
|
||||
- tecINCOMPLETE: Some work was completed, but more submissions required to finish. AMMDelete partially deletes the trustlines.
|
||||
|
||||
## XRP Ledger server version 1.11.0
|
||||
|
||||
[Version 1.11.0](https://github.com/XRPLF/rippled/releases/tag/1.11.0) was released on Jun 20, 2023.
|
||||
|
||||
### Breaking changes in 1.11
|
||||
|
||||
- Added the ability to mark amendments as obsolete. For the `feature` admin API, there is a new possible value for the `vetoed` field. (https://github.com/XRPLF/rippled/pull/4291)
|
||||
- The value of `vetoed` can now be `true`, `false`, or `"Obsolete"`.
|
||||
- Removed the acceptance of seeds or public keys in place of account addresses. (https://github.com/XRPLF/rippled/pull/4404)
|
||||
- This simplifies the API and encourages better security practices (i.e. seeds should never be sent over the network).
|
||||
- For the `ledger_data` method, when all entries are filtered out, the `state` field of the response is now an empty list (in other words, an empty array, `[]`). (Previously, it would return `null`.) While this is technically a breaking change, the new behavior is consistent with the documentation, so this is considered only a bug fix. (https://github.com/XRPLF/rippled/pull/4398)
|
||||
- If and when the `fixNFTokenRemint` amendment activates, there will be a new AccountRoot field, `FirstNFTSequence`. This field is set to the current account sequence when the account issues their first NFT. If an account has not issued any NFTs, then the field is not set. ([#4406](https://github.com/XRPLF/rippled/pull/4406))
|
||||
- There is a new account deletion restriction: an account can only be deleted if `FirstNFTSequence` + `MintedNFTokens` + `256` is less than the current ledger sequence.
|
||||
- This is potentially a breaking change if clients have logic for determining whether an account can be deleted.
|
||||
- NetworkID
|
||||
- For sidechains and networks with a network ID greater than 1024, there is a new [transaction common field](https://xrpl.org/transaction-common-fields.html), `NetworkID`. (https://github.com/XRPLF/rippled/pull/4370)
|
||||
- This field helps to prevent replay attacks and is now required for chains whose network ID is 1025 or higher.
|
||||
- The field must be omitted for Mainnet, so there is no change for Mainnet users.
|
||||
- There are three new local error codes:
|
||||
- `telNETWORK_ID_MAKES_TX_NON_CANONICAL`: a `NetworkID` is present but the chain's network ID is less than 1025. Remove the field from the transaction, and try again.
|
||||
- `telREQUIRES_NETWORK_ID`: a `NetworkID` is required, but is not present. Add the field to the transaction, and try again.
|
||||
- `telWRONG_NETWORK`: a `NetworkID` is specified, but it is for a different network. Submit the transaction to a different server which is connected to the correct network.
|
||||
|
||||
### Additions and bug fixes in 1.11
|
||||
|
||||
- Added `nftoken_id`, `nftoken_ids` and `offer_id` meta fields into NFT `tx` and `account_tx` responses. (https://github.com/XRPLF/rippled/pull/4447)
|
||||
- Added an `account_flags` object to the `account_info` method response. (https://github.com/XRPLF/rippled/pull/4459)
|
||||
- Added `NFTokenPages` to the `account_objects` RPC. (https://github.com/XRPLF/rippled/pull/4352)
|
||||
- Fixed: `marker` returned from the `account_lines` command would not work on subsequent commands. (https://github.com/XRPLF/rippled/pull/4361)
|
||||
|
||||
## XRP Ledger server version 1.10.0
|
||||
|
||||
[Version 1.10.0](https://github.com/XRPLF/rippled/releases/tag/1.10.0)
|
||||
was released on Mar 14, 2023.
|
||||
|
||||
### Breaking changes in 1.10
|
||||
|
||||
- If the `XRPFees` feature is enabled, the `fee_ref` field will be
|
||||
removed from the [ledger subscription stream](https://xrpl.org/subscribe.html#ledger-stream), because it will no longer
|
||||
have any meaning.
|
||||
|
||||
# Unit tests for API changes
|
||||
|
||||
The following information is useful to developers contributing to this project:
|
||||
|
||||
The purpose of unit tests is to catch bugs and prevent regressions. In general, it often makes sense to create a test function when there is a breaking change to the API. For APIs that have changed in a new API version, the tests should be modified so that both the prior version and the new version are properly tested.
|
||||
|
||||
To take one example: for `account_info` version 1, WebSocket and JSON-RPC behavior should be tested. The latest API version, i.e. API version 2, should be tested over WebSocket, JSON-RPC, and command line.
|
||||
422
BUILD.md
Normal file
422
BUILD.md
Normal file
@@ -0,0 +1,422 @@
|
||||
| :warning: **WARNING** :warning:
|
||||
|---|
|
||||
| These instructions assume you have a C++ development environment ready with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up on Linux, macOS, or Windows, [see this guide](./docs/build/environment.md). |
|
||||
|
||||
> These instructions also assume a basic familiarity with Conan and CMake.
|
||||
> If you are unfamiliar with Conan,
|
||||
> you can read our [crash course](./docs/build/conan.md)
|
||||
> or the official [Getting Started][3] walkthrough.
|
||||
|
||||
## Branches
|
||||
|
||||
For a stable release, choose the `master` branch or one of the [tagged
|
||||
releases](https://github.com/ripple/rippled/releases).
|
||||
|
||||
```
|
||||
git checkout master
|
||||
```
|
||||
|
||||
For the latest release candidate, choose the `release` branch.
|
||||
|
||||
```
|
||||
git checkout release
|
||||
```
|
||||
|
||||
For the latest set of untested features, or to contribute, choose the `develop`
|
||||
branch.
|
||||
|
||||
```
|
||||
git checkout develop
|
||||
```
|
||||
|
||||
## Minimum Requirements
|
||||
|
||||
See [System Requirements](https://xrpl.org/system-requirements.html).
|
||||
|
||||
Building rippled generally requires git, Python, Conan, CMake, and a C++ compiler. Some guidance on setting up such a [C++ development environment can be found here](./docs/build/environment.md).
|
||||
|
||||
- [Python 3.7](https://www.python.org/downloads/)
|
||||
- [Conan 1.60](https://conan.io/downloads.html)[^1]
|
||||
- [CMake 3.16](https://cmake.org/download/)
|
||||
|
||||
[^1]: It is possible to build with Conan 2.x,
|
||||
but the instructions are significantly different,
|
||||
which is why we are not recommending it yet.
|
||||
Notably, the `conan profile update` command is removed in 2.x.
|
||||
Profiles must be edited by hand.
|
||||
|
||||
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
The [minimum compiler versions][2] required are:
|
||||
|
||||
| Compiler | Version |
|
||||
|-------------|---------|
|
||||
| GCC | 11 |
|
||||
| Clang | 13 |
|
||||
| Apple Clang | 13.1.6 |
|
||||
| MSVC | 19.23 |
|
||||
|
||||
### Linux
|
||||
|
||||
The Ubuntu operating system has received the highest level of
|
||||
quality assurance, testing, and support.
|
||||
|
||||
Here are [sample instructions for setting up a C++ development environment on Linux](./docs/build/environment.md#linux).
|
||||
|
||||
### Mac
|
||||
|
||||
Many rippled engineers use macOS for development.
|
||||
|
||||
Here are [sample instructions for setting up a C++ development environment on macOS](./docs/build/environment.md#macos).
|
||||
|
||||
### Windows
|
||||
|
||||
Windows is not recommended for production use at this time.
|
||||
|
||||
- Additionally, 32-bit Windows development is not supported.
|
||||
|
||||
[Boost]: https://www.boost.org/
|
||||
|
||||
## Steps
|
||||
|
||||
### Set Up Conan
|
||||
|
||||
After you have a [C++ development environment](./docs/build/environment.md) ready with Git, Python, Conan, CMake, and a C++ compiler, you may need to set up your Conan profile.
|
||||
|
||||
These instructions assume a basic familiarity with Conan and CMake.
|
||||
|
||||
If you are unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official [Getting Started][3] walkthrough.
|
||||
|
||||
You'll need at least one Conan profile:
|
||||
|
||||
```
|
||||
conan profile new default --detect
|
||||
```
|
||||
|
||||
Update the compiler settings:
|
||||
|
||||
```
|
||||
conan profile update settings.compiler.cppstd=20 default
|
||||
```
|
||||
|
||||
Configure Conan (1.x only) to use recipe revisions:
|
||||
|
||||
```
|
||||
conan config set general.revisions_enabled=1
|
||||
```
|
||||
|
||||
**Linux** developers will commonly have a default Conan [profile][] that compiles
|
||||
with GCC and links with libstdc++.
|
||||
If you are linking with libstdc++ (see profile setting `compiler.libcxx`),
|
||||
then you will need to choose the `libstdc++11` ABI:
|
||||
|
||||
```
|
||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
||||
```
|
||||
|
||||
|
||||
Ensure inter-operability between `boost::string_view` and `std::string_view` types:
|
||||
|
||||
```
|
||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_BEAST_USE_STD_STRING_VIEW"]' default
|
||||
conan profile update 'env.CXXFLAGS="-DBOOST_BEAST_USE_STD_STRING_VIEW"' default
|
||||
```
|
||||
|
||||
If you have other flags in the `conf.tools.build` or `env.CXXFLAGS` sections, make sure to retain the existing flags and append the new ones. You can check them with:
|
||||
```
|
||||
conan profile show default
|
||||
```
|
||||
|
||||
|
||||
**Windows** developers may need to use the x64 native build tools.
|
||||
An easy way to do that is to run the shortcut "x64 Native Tools Command
|
||||
Prompt" for the version of Visual Studio that you have installed.
|
||||
|
||||
Windows developers must also build `rippled` and its dependencies for the x64
|
||||
architecture:
|
||||
|
||||
```
|
||||
conan profile update settings.arch=x86_64 default
|
||||
```
|
||||
|
||||
### Multiple compilers
|
||||
|
||||
When `/usr/bin/g++` exists on a platform, it is the default cpp compiler. This
|
||||
default works for some users.
|
||||
|
||||
However, if this compiler cannot build rippled or its dependencies, then you can
|
||||
install another compiler and set Conan and CMake to use it.
|
||||
Update the `conf.tools.build:compiler_executables` setting in order to set the correct variables (`CMAKE_<LANG>_COMPILER`) in the
|
||||
generated CMake toolchain file.
|
||||
For example, on Ubuntu 20, you may have gcc at `/usr/bin/gcc` and g++ at `/usr/bin/g++`; if that is the case, you can select those compilers with:
|
||||
```
|
||||
conan profile update 'conf.tools.build:compiler_executables={"c": "/usr/bin/gcc", "cpp": "/usr/bin/g++"}' default
|
||||
```
|
||||
|
||||
Replace `/usr/bin/gcc` and `/usr/bin/g++` with paths to the desired compilers.
|
||||
|
||||
It should choose the compiler for dependencies as well,
|
||||
but not all of them have a Conan recipe that respects this setting (yet).
|
||||
For the rest, you can set these environment variables.
|
||||
Replace `<path>` with paths to the desired compilers:
|
||||
|
||||
- `conan profile update env.CC=<path> default`
|
||||
- `conan profile update env.CXX=<path> default`
|
||||
|
||||
Export our [Conan recipe for Snappy](./external/snappy).
|
||||
It does not explicitly link the C++ standard library,
|
||||
which allows you to statically link it with GCC, if you want.
|
||||
|
||||
```
|
||||
# Conan 2.x
|
||||
conan export --version 1.1.10 external/snappy
|
||||
```
|
||||
|
||||
Export our [Conan recipe for SOCI](./external/soci).
|
||||
It patches their CMake to correctly import its dependencies.
|
||||
|
||||
```
|
||||
# Conan 2.x
|
||||
conan export --version 4.0.3 external/soci
|
||||
```
|
||||
|
||||
### Build and Test
|
||||
|
||||
1. Create a build directory and move into it.
|
||||
|
||||
```
|
||||
mkdir .build
|
||||
cd .build
|
||||
```
|
||||
|
||||
You can use any directory name. Conan treats your working directory as an
|
||||
install folder and generates files with implementation details.
|
||||
You don't need to worry about these files, but make sure to change
|
||||
your working directory to your build directory before calling Conan.
|
||||
|
||||
**Note:** You can specify a directory for the installation files by adding
|
||||
the `install-folder` or `-if` option to every `conan install` command
|
||||
in the next step.
|
||||
|
||||
2. Use conan to generate CMake files for every configuration you want to build:
|
||||
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
```
|
||||
|
||||
To build Debug, in the next step, be sure to set `-DCMAKE_BUILD_TYPE=Debug`
|
||||
|
||||
For a single-configuration generator, e.g. `Unix Makefiles` or `Ninja`,
|
||||
you only need to run this command once.
|
||||
For a multi-configuration generator, e.g. `Visual Studio`, you may want to
|
||||
run it more than once.
|
||||
|
||||
Each of these commands should also have a different `build_type` setting.
|
||||
A second command with the same `build_type` setting will overwrite the files
|
||||
generated by the first. You can pass the build type on the command line with
|
||||
`--settings build_type=$BUILD_TYPE` or in the profile itself,
|
||||
under the section `[settings]` with the key `build_type`.
|
||||
|
||||
If you are using a Microsoft Visual C++ compiler,
|
||||
then you will need to ensure consistency between the `build_type` setting
|
||||
and the `compiler.runtime` setting.
|
||||
|
||||
When `build_type` is `Release`, `compiler.runtime` should be `MT`.
|
||||
|
||||
When `build_type` is `Debug`, `compiler.runtime` should be `MTd`.
|
||||
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release --settings compiler.runtime=MT
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug --settings compiler.runtime=MTd
|
||||
```
|
||||
|
||||
3. Configure CMake and pass the toolchain file generated by Conan, located at
|
||||
`$OUTPUT_FOLDER/build/generators/conan_toolchain.cmake`.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
Pass the CMake variable [`CMAKE_BUILD_TYPE`][build_type]
|
||||
and make sure it matches the one of the `build_type` settings
|
||||
you chose in the previous step.
|
||||
|
||||
For example, to build Debug, in the next command, replace "Release" with "Debug"
|
||||
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release -Dxrpld=ON -Dtests=ON ..
|
||||
```
|
||||
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -Dxrpld=ON -Dtests=ON ..
|
||||
```
|
||||
|
||||
**Note:** You can pass build options for `rippled` in this step.
|
||||
|
||||
5. Build `rippled`.
|
||||
|
||||
For a single-configuration generator, it will build whatever configuration
|
||||
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator,
|
||||
you must pass the option `--config` to select the build configuration.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
cmake --build . -j $(nproc)
|
||||
```
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
cmake --build . --config Release
|
||||
cmake --build . --config Debug
|
||||
```
|
||||
|
||||
6. Test rippled.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
./rippled --unittest
|
||||
```
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
./Release/rippled --unittest
|
||||
./Debug/rippled --unittest
|
||||
```
|
||||
|
||||
The location of `rippled` in your build directory depends on your CMake
|
||||
generator. Pass `--help` to see the rest of the command line options.
|
||||
|
||||
|
||||
## Coverage report
|
||||
|
||||
The coverage report is intended for developers using compilers GCC
|
||||
or Clang (including Apple Clang). It is generated by the build target `coverage`,
|
||||
which is only enabled when the `coverage` option is set, e.g. with
|
||||
`--options coverage=True` in `conan` or `-Dcoverage=ON` variable in `cmake`
|
||||
|
||||
Prerequisites for the coverage report:
|
||||
|
||||
- [gcovr tool][gcovr] (can be installed e.g. with [pip][python-pip])
|
||||
- `gcov` for GCC (installed with the compiler by default) or
|
||||
- `llvm-cov` for Clang (installed with the compiler by default)
|
||||
- `Debug` build type
|
||||
|
||||
A coverage report is created when the following steps are completed, in order:
|
||||
|
||||
1. `rippled` binary built with instrumentation data, enabled by the `coverage`
|
||||
option mentioned above
|
||||
2. completed run of unit tests, which populates coverage capture data
|
||||
3. completed run of the `gcovr` tool (which internally invokes either `gcov` or `llvm-cov`)
|
||||
to assemble both instrumentation data and the coverage capture data into a coverage report
|
||||
|
||||
The above steps are automated into a single target `coverage`. The instrumented
|
||||
`rippled` binary can also be used for regular development or testing work, at
|
||||
the cost of extra disk space utilization and a small performance hit
|
||||
(to store coverage capture). In case of a spurious failure of unit tests, it is
|
||||
possible to re-run the `coverage` target without rebuilding the `rippled` binary
|
||||
(since it is simply a dependency of the coverage report target). It is also possible
|
||||
to select only specific tests for the purpose of the coverage report, by setting
|
||||
the `coverage_test` variable in `cmake`
|
||||
|
||||
The default coverage report format is `html-details`, but the user
|
||||
can override it to any of the formats listed in `Builds/CMake/CodeCoverage.cmake`
|
||||
by setting the `coverage_format` variable in `cmake`. It is also possible
|
||||
to generate more than one format at a time by setting the `coverage_extra_args`
|
||||
variable in `cmake`. The specific command line used to run the `gcovr` tool will be
|
||||
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
|
||||
|
||||
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
|
||||
set to the number of available CPU cores. This may cause spurious test
|
||||
errors on Apple. Developers can override the number of unit test jobs with
|
||||
the `coverage_test_parallelism` variable in `cmake`.
|
||||
|
||||
Example use with some cmake variables set:
|
||||
|
||||
```
|
||||
cd .build
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
cmake -DCMAKE_BUILD_TYPE=Debug -Dcoverage=ON -Dxrpld=ON -Dtests=ON -Dcoverage_test_parallelism=2 -Dcoverage_format=html-details -Dcoverage_extra_args="--json coverage.json" -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
|
||||
cmake --build . --target coverage
|
||||
```
|
||||
|
||||
After the `coverage` target is completed, the generated coverage report will be
|
||||
stored inside the build directory, as either of:
|
||||
|
||||
- file named `coverage.`_extension_ , with a suitable extension for the report format, or
|
||||
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
|
||||
|
||||
|
||||
## Options
|
||||
|
||||
| Option | Default Value | Description |
|
||||
| --- | ---| ---|
|
||||
| `assert` | OFF | Enable assertions.
|
||||
| `coverage` | OFF | Prepare the coverage report. |
|
||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||
| `tests` | OFF | Build tests. |
|
||||
| `unity` | ON | Configure a unity build. |
|
||||
| `xrpld` | OFF | Build the xrpld (`rippled`) application, and not just the libxrpl library. |
|
||||
|
||||
[Unity builds][5] may be faster for the first build
|
||||
(at the cost of much more memory) since they concatenate sources into fewer
|
||||
translation units. Non-unity builds may be faster for incremental builds,
|
||||
and can be helpful for detecting `#include` omissions.
|
||||
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Conan
|
||||
|
||||
After any updates or changes to dependencies, you may need to do the following:
|
||||
|
||||
1. Remove your build directory.
|
||||
2. Remove the Conan cache: `conan remove "*" -c`
|
||||
3. Re-run [conan install](#build-and-test).
|
||||
|
||||
### 'protobuf/port_def.inc' file not found
|
||||
|
||||
If `cmake --build .` results in an error due to a missing a protobuf file, then you might have generated CMake files for a different `build_type` than the `CMAKE_BUILD_TYPE` you passed to conan.
|
||||
|
||||
```
|
||||
/rippled/.build/pb-xrpl.libpb/xrpl/proto/ripple.pb.h:10:10: fatal error: 'google/protobuf/port_def.inc' file not found
|
||||
10 | #include <google/protobuf/port_def.inc>
|
||||
| ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
1 error generated.
|
||||
```
|
||||
|
||||
For example, if you want to build Debug:
|
||||
|
||||
1. For conan install, pass `--settings build_type=Debug`
|
||||
2. For cmake, pass `-DCMAKE_BUILD_TYPE=Debug`
|
||||
|
||||
## Add a Dependency
|
||||
|
||||
If you want to experiment with a new package, follow these steps:
|
||||
|
||||
1. Search for the package on [Conan Center](https://conan.io/center/).
|
||||
2. Modify [`conanfile.py`](./conanfile.py):
|
||||
- Add a version of the package to the `requires` property.
|
||||
- Change any default options for the package by adding them to the
|
||||
`default_options` property (with syntax `'$package:$option': $value`).
|
||||
3. Modify [`CMakeLists.txt`](./CMakeLists.txt):
|
||||
- Add a call to `find_package($package REQUIRED)`.
|
||||
- Link a library from the package to the target `ripple_libs`
|
||||
(search for the existing call to `target_link_libraries(ripple_libs INTERFACE ...)`).
|
||||
4. Start coding! Don't forget to include whatever headers you need from the package.
|
||||
|
||||
|
||||
[1]: https://github.com/conan-io/conan-center-index/issues/13168
|
||||
[2]: https://en.cppreference.com/w/cpp/compiler_support/20
|
||||
[3]: https://docs.conan.io/en/latest/getting_started.html
|
||||
[5]: https://en.wikipedia.org/wiki/Unity_build
|
||||
[6]: https://github.com/boostorg/beast/issues/2648
|
||||
[7]: https://github.com/boostorg/beast/issues/2661
|
||||
[gcovr]: https://gcovr.com/en/stable/getting-started.html
|
||||
[python-pip]: https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/
|
||||
[build_type]: https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
[profile]: https://docs.conan.io/en/latest/reference/profiles.html
|
||||
@@ -1,117 +0,0 @@
|
||||
|
||||
## "target" parsing..DEPRECATED and will be removed in future
|
||||
macro(parse_target)
|
||||
if (target)
|
||||
# Parse the target
|
||||
set(remaining ${target})
|
||||
while (remaining)
|
||||
# get the component up to the next dot or end
|
||||
string(REGEX REPLACE "^\\.?([^\\.]+).*$" "\\1" cur_component ${remaining})
|
||||
string(REGEX REPLACE "^\\.?[^\\.]+(.*$)" "\\1" remaining ${remaining})
|
||||
|
||||
if (${cur_component} STREQUAL gcc)
|
||||
if (DEFINED ENV{GNU_CC})
|
||||
set(CMAKE_C_COMPILER $ENV{GNU_CC})
|
||||
elseif ($ENV{CC} MATCHES .*gcc.*)
|
||||
set(CMAKE_C_COMPILER $ENV{CC})
|
||||
else()
|
||||
find_program(CMAKE_C_COMPILER gcc)
|
||||
endif()
|
||||
|
||||
if (DEFINED ENV{GNU_CXX})
|
||||
set(CMAKE_CXX_COMPILER $ENV{GNU_CXX})
|
||||
elseif ($ENV{CXX} MATCHES .*g\\+\\+.*)
|
||||
set(CMAKE_CXX_COMPILER $ENV{CXX})
|
||||
else()
|
||||
find_program(CMAKE_CXX_COMPILER g++)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL clang)
|
||||
if (DEFINED ENV{CLANG_CC})
|
||||
set(CMAKE_C_COMPILER $ENV{CLANG_CC})
|
||||
elseif ($ENV{CC} MATCHES .*clang.*)
|
||||
set(CMAKE_C_COMPILER $ENV{CC})
|
||||
else()
|
||||
find_program(CMAKE_C_COMPILER clang)
|
||||
endif()
|
||||
|
||||
if (DEFINED ENV{CLANG_CXX})
|
||||
set(CMAKE_CXX_COMPILER $ENV{CLANG_CXX})
|
||||
elseif ($ENV{CXX} MATCHES .*clang.*)
|
||||
set(CMAKE_CXX_COMPILER $ENV{CXX})
|
||||
else()
|
||||
find_program(CMAKE_CXX_COMPILER clang++)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL msvc)
|
||||
# TBD
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL unity)
|
||||
set(unity ON CACHE BOOL "" FORCE)
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL nounity)
|
||||
set(unity OFF CACHE BOOL "" FORCE)
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL debug)
|
||||
set(release false)
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL release)
|
||||
set(release true)
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL coverage)
|
||||
set(coverage ON CACHE BOOL "" FORCE)
|
||||
set(debug true)
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL profile)
|
||||
set(profile ON CACHE BOOL "" FORCE)
|
||||
endif()
|
||||
endwhile()
|
||||
endif()
|
||||
|
||||
if(CMAKE_C_COMPILER MATCHES "-NOTFOUND$" OR
|
||||
CMAKE_CXX_COMPILER MATCHES "-NOTFOUND$")
|
||||
message(FATAL_ERROR "Can not find appropriate compiler for target ${target}")
|
||||
endif()
|
||||
|
||||
if (release)
|
||||
set(CMAKE_BUILD_TYPE Release)
|
||||
else()
|
||||
set(CMAKE_BUILD_TYPE Debug)
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
############################################################
|
||||
|
||||
macro(group_sources_in source_dir curdir)
|
||||
file(GLOB children RELATIVE ${source_dir}/${curdir}
|
||||
${source_dir}/${curdir}/*)
|
||||
foreach (child ${children})
|
||||
if (IS_DIRECTORY ${source_dir}/${curdir}/${child})
|
||||
group_sources_in(${source_dir} ${curdir}/${child})
|
||||
else()
|
||||
string(REPLACE "/" "\\" groupname ${curdir})
|
||||
source_group(${groupname} FILES
|
||||
${source_dir}/${curdir}/${child})
|
||||
endif()
|
||||
endforeach()
|
||||
endmacro()
|
||||
|
||||
macro(group_sources curdir)
|
||||
group_sources_in(${PROJECT_SOURCE_DIR} ${curdir})
|
||||
endmacro()
|
||||
|
||||
macro (exclude_if_included target_)
|
||||
if (NOT ${CMAKE_CURRENT_SOURCE_DIR} STREQUAL ${CMAKE_SOURCE_DIR})
|
||||
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_ALL ON)
|
||||
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON)
|
||||
endif ()
|
||||
endmacro ()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,47 +0,0 @@
|
||||
# - Try to find jemalloc
|
||||
# Once done this will define
|
||||
# JEMALLOC_FOUND - System has jemalloc
|
||||
# JEMALLOC_INCLUDE_DIRS - The jemalloc include directories
|
||||
# JEMALLOC_LIBRARIES - The libraries needed to use jemalloc
|
||||
|
||||
if(NOT USE_BUNDLED_JEMALLOC)
|
||||
find_package(PkgConfig)
|
||||
if (PKG_CONFIG_FOUND)
|
||||
pkg_check_modules(PC_JEMALLOC QUIET jemalloc)
|
||||
endif()
|
||||
else()
|
||||
set(PC_JEMALLOC_INCLUDEDIR)
|
||||
set(PC_JEMALLOC_INCLUDE_DIRS)
|
||||
set(PC_JEMALLOC_LIBDIR)
|
||||
set(PC_JEMALLOC_LIBRARY_DIRS)
|
||||
set(LIMIT_SEARCH NO_DEFAULT_PATH)
|
||||
endif()
|
||||
|
||||
set(JEMALLOC_DEFINITIONS ${PC_JEMALLOC_CFLAGS_OTHER})
|
||||
|
||||
find_path(JEMALLOC_INCLUDE_DIR jemalloc/jemalloc.h
|
||||
PATHS ${PC_JEMALLOC_INCLUDEDIR} ${PC_JEMALLOC_INCLUDE_DIRS}
|
||||
${LIMIT_SEARCH})
|
||||
|
||||
# If we're asked to use static linkage, add libjemalloc.a as a preferred library name.
|
||||
if(JEMALLOC_USE_STATIC)
|
||||
list(APPEND JEMALLOC_NAMES
|
||||
"${CMAKE_STATIC_LIBRARY_PREFIX}jemalloc${CMAKE_STATIC_LIBRARY_SUFFIX}")
|
||||
endif()
|
||||
|
||||
list(APPEND JEMALLOC_NAMES jemalloc)
|
||||
|
||||
find_library(JEMALLOC_LIBRARY NAMES ${JEMALLOC_NAMES}
|
||||
HINTS ${PC_JEMALLOC_LIBDIR} ${PC_JEMALLOC_LIBRARY_DIRS}
|
||||
${LIMIT_SEARCH})
|
||||
|
||||
set(JEMALLOC_LIBRARIES ${JEMALLOC_LIBRARY})
|
||||
set(JEMALLOC_INCLUDE_DIRS ${JEMALLOC_INCLUDE_DIR})
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
# handle the QUIETLY and REQUIRED arguments and set JEMALLOC_FOUND to TRUE
|
||||
# if all listed variables are TRUE
|
||||
find_package_handle_standard_args(JeMalloc DEFAULT_MSG
|
||||
JEMALLOC_LIBRARY JEMALLOC_INCLUDE_DIR)
|
||||
|
||||
mark_as_advanced(JEMALLOC_INCLUDE_DIR JEMALLOC_LIBRARY)
|
||||
@@ -1,55 +0,0 @@
|
||||
include (CMakeFindDependencyMacro)
|
||||
# need to represent system dependencies of the lib here
|
||||
#[=========================================================[
|
||||
Boost
|
||||
#]=========================================================]
|
||||
if (static OR APPLE OR MSVC)
|
||||
set (Boost_USE_STATIC_LIBS ON)
|
||||
endif ()
|
||||
set (Boost_USE_MULTITHREADED ON)
|
||||
if (static OR MSVC)
|
||||
set (Boost_USE_STATIC_RUNTIME ON)
|
||||
else ()
|
||||
set (Boost_USE_STATIC_RUNTIME OFF)
|
||||
endif ()
|
||||
find_dependency (Boost 1.67
|
||||
COMPONENTS
|
||||
chrono
|
||||
context
|
||||
coroutine
|
||||
date_time
|
||||
filesystem
|
||||
program_options
|
||||
regex
|
||||
serialization
|
||||
system
|
||||
thread)
|
||||
#[=========================================================[
|
||||
OpenSSL
|
||||
#]=========================================================]
|
||||
if (APPLE AND NOT DEFINED ENV{OPENSSL_ROOT_DIR})
|
||||
find_program (HOMEBREW brew)
|
||||
if (NOT HOMEBREW STREQUAL "HOMEBREW-NOTFOUND")
|
||||
execute_process (COMMAND ${HOMEBREW} --prefix openssl
|
||||
OUTPUT_VARIABLE OPENSSL_ROOT_DIR
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if ((NOT DEFINED OPENSSL_ROOT) AND (DEFINED ENV{OPENSSL_ROOT}))
|
||||
set (OPENSSL_ROOT $ENV{OPENSSL_ROOT})
|
||||
endif ()
|
||||
file (TO_CMAKE_PATH "${OPENSSL_ROOT}" OPENSSL_ROOT)
|
||||
|
||||
if (static OR APPLE OR MSVC)
|
||||
set (OPENSSL_USE_STATIC_LIBS ON)
|
||||
endif ()
|
||||
set (OPENSSL_MSVC_STATIC_RT ON)
|
||||
find_dependency (OpenSSL 1.0.2 REQUIRED)
|
||||
find_dependency (ZLIB)
|
||||
if (TARGET ZLIB::ZLIB)
|
||||
set_target_properties(OpenSSL::Crypto PROPERTIES
|
||||
INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
|
||||
endif ()
|
||||
|
||||
include ("${CMAKE_CURRENT_LIST_DIR}/RippleTargets.cmake")
|
||||
397
Builds/Test.py
397
Builds/Test.py
@@ -1,397 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of rippled: https://github.com/ripple/rippled
|
||||
# Copyright (c) 2012 - 2017 Ripple Labs Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and/or distribute this software for any
|
||||
# purpose with or without fee is hereby granted, provided that the above
|
||||
# copyright notice and this permission notice appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
# ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""
|
||||
Invocation:
|
||||
|
||||
./Builds/Test.py - builds and tests all configurations
|
||||
|
||||
The build must succeed without shell aliases for this to work.
|
||||
|
||||
To pass flags to cmake, put them at the very end of the command line, after
|
||||
the -- flag - like this:
|
||||
|
||||
./Builds/Test.py -- -j4 # Pass -j4 to cmake --build
|
||||
|
||||
|
||||
Common problems:
|
||||
|
||||
1) Boost not found. Solution: export BOOST_ROOT=[path to boost folder]
|
||||
|
||||
2) OpenSSL not found. Solution: export OPENSSL_ROOT=[path to OpenSSL folder]
|
||||
|
||||
3) cmake is not found. Solution: Be sure cmake directory is on your $PATH
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import itertools
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def powerset(iterable):
|
||||
"""powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"""
|
||||
s = list(iterable)
|
||||
return itertools.chain.from_iterable(itertools.combinations(s, r) for r in range(len(s) + 1))
|
||||
|
||||
IS_WINDOWS = platform.system().lower() == 'windows'
|
||||
IS_OS_X = platform.system().lower() == 'darwin'
|
||||
|
||||
# CMake
|
||||
if IS_WINDOWS:
|
||||
CMAKE_UNITY_CONFIGS = ['Debug', 'Release']
|
||||
CMAKE_NONUNITY_CONFIGS = ['Debug', 'Release']
|
||||
else:
|
||||
CMAKE_UNITY_CONFIGS = []
|
||||
CMAKE_NONUNITY_CONFIGS = []
|
||||
CMAKE_UNITY_COMBOS = { '' : [['rippled'], CMAKE_UNITY_CONFIGS],
|
||||
'.nounity' : [['rippled'], CMAKE_NONUNITY_CONFIGS] }
|
||||
|
||||
if IS_WINDOWS:
|
||||
CMAKE_DIR_TARGETS = { ('msvc' + unity,) : targets for unity, targets in
|
||||
CMAKE_UNITY_COMBOS.items() }
|
||||
elif IS_OS_X:
|
||||
CMAKE_DIR_TARGETS = { (build + unity,) : targets
|
||||
for build in ['debug', 'release']
|
||||
for unity, targets in CMAKE_UNITY_COMBOS.items() }
|
||||
else:
|
||||
CMAKE_DIR_TARGETS = { (cc + "." + build + unity,) : targets
|
||||
for cc in ['gcc', 'clang']
|
||||
for build in ['debug', 'release', 'coverage', 'profile']
|
||||
for unity, targets in CMAKE_UNITY_COMBOS.items() }
|
||||
|
||||
# list of tuples of all possible options
|
||||
if IS_WINDOWS or IS_OS_X:
|
||||
CMAKE_ALL_GENERATE_OPTIONS = [tuple(x) for x in powerset(['-GNinja', '-Dassert=true'])]
|
||||
else:
|
||||
CMAKE_ALL_GENERATE_OPTIONS = list(set(
|
||||
[tuple(x) for x in powerset(['-GNinja', '-Dstatic=true', '-Dassert=true', '-Dsan=address'])] +
|
||||
[tuple(x) for x in powerset(['-GNinja', '-Dstatic=true', '-Dassert=true', '-Dsan=thread'])]))
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Test.py - run ripple tests'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--all', '-a',
|
||||
action='store_true',
|
||||
help='Build all configurations.',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--keep_going', '-k',
|
||||
action='store_true',
|
||||
help='Keep going after one configuration has failed.',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--silent', '-s',
|
||||
action='store_true',
|
||||
help='Silence all messages except errors',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--verbose', '-v',
|
||||
action='store_true',
|
||||
help=('Report more information about which commands are executed and the '
|
||||
'results.'),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--test', '-t',
|
||||
default='',
|
||||
help='Add a prefix for unit tests',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--testjobs',
|
||||
default='0',
|
||||
type=int,
|
||||
help='Run tests in parallel'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--ipv6',
|
||||
action='store_true',
|
||||
help='Use IPv6 localhost when running unit tests.',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--clean', '-c',
|
||||
action='store_true',
|
||||
help='delete all build artifacts after testing',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--quiet', '-q',
|
||||
action='store_true',
|
||||
help='Reduce output where possible (unit tests)',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--dir', '-d',
|
||||
default=(),
|
||||
nargs='*',
|
||||
help='Specify one or more CMake dir names. '
|
||||
'Will also be used as -Dtarget=<dir> running cmake.'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--target',
|
||||
default=(),
|
||||
nargs='*',
|
||||
help='Specify one or more CMake build targets. '
|
||||
'Will be used as --target <target> running cmake --build.'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--config',
|
||||
default=(),
|
||||
nargs='*',
|
||||
help='Specify one or more CMake build configs. '
|
||||
'Will be used as --config <config> running cmake --build.'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--generator_option',
|
||||
action='append',
|
||||
help='Specify a CMake generator option. Repeat for multiple options. '
|
||||
'Will be passed to the cmake generator. '
|
||||
'Due to limits of the argument parser, arguments starting with \'-\' '
|
||||
'must be attached to this option. e.g. --generator_option=-GNinja.')
|
||||
|
||||
parser.add_argument(
|
||||
'--build_option',
|
||||
action='append',
|
||||
help='Specify a build option. Repeat for multiple options. '
|
||||
'Will be passed to the build tool via cmake --build. '
|
||||
'Due to limits of the argument parser, arguments starting with \'-\' '
|
||||
'must be attached to this option. e.g. --build_option=-j8.')
|
||||
|
||||
parser.add_argument(
|
||||
'extra_args',
|
||||
default=(),
|
||||
nargs='*',
|
||||
help='Extra arguments are passed through to the tools'
|
||||
)
|
||||
|
||||
ARGS = parser.parse_args()
|
||||
|
||||
def decodeString(line):
|
||||
# Python 2 vs. Python 3
|
||||
if isinstance(line, str):
|
||||
return line
|
||||
else:
|
||||
return line.decode()
|
||||
|
||||
def shell(cmd, args=(), silent=False, cust_env=None):
|
||||
""""Execute a shell command and return the output."""
|
||||
silent = ARGS.silent or silent
|
||||
verbose = not silent and ARGS.verbose
|
||||
if verbose:
|
||||
print('$' + cmd, *args)
|
||||
|
||||
command = (cmd,) + args
|
||||
|
||||
# shell is needed in Windows to find executable in the path
|
||||
process = subprocess.Popen(
|
||||
command,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
env=cust_env,
|
||||
shell=IS_WINDOWS)
|
||||
lines = []
|
||||
count = 0
|
||||
# readline returns '' at EOF
|
||||
for line in iter(process.stdout.readline, ''):
|
||||
if process.poll() is None:
|
||||
decoded = decodeString(line)
|
||||
lines.append(decoded)
|
||||
if verbose:
|
||||
print(decoded, end='')
|
||||
elif not silent:
|
||||
count += 1
|
||||
if count >= 80:
|
||||
print()
|
||||
count = 0
|
||||
else:
|
||||
print('.', end='')
|
||||
else:
|
||||
break
|
||||
|
||||
if not verbose and count:
|
||||
print()
|
||||
process.wait()
|
||||
return process.returncode, lines
|
||||
|
||||
def get_cmake_dir(cmake_dir):
|
||||
return os.path.join('build' , 'cmake' , cmake_dir)
|
||||
|
||||
def run_cmake(directory, cmake_dir, args):
|
||||
print('Generating build in', directory, 'with', *args or ('default options',))
|
||||
old_dir = os.getcwd()
|
||||
if not os.path.exists(directory):
|
||||
os.makedirs(directory)
|
||||
os.chdir(directory)
|
||||
if IS_WINDOWS and not any(arg.startswith("-G") for arg in args) and not os.path.exists("CMakeCache.txt"):
|
||||
if '--ninja' in args:
|
||||
args += ( '-GNinja', )
|
||||
else:
|
||||
args += ( '-GVisual Studio 14 2015 Win64', )
|
||||
# hack to extract cmake options/args from the legacy target format
|
||||
if re.search('\.unity', cmake_dir):
|
||||
args += ( '-Dunity=ON', )
|
||||
if re.search('\.nounity', cmake_dir):
|
||||
args += ( '-Dunity=OFF', )
|
||||
if re.search('coverage', cmake_dir):
|
||||
args += ( '-Dcoverage=ON', )
|
||||
if re.search('profile', cmake_dir):
|
||||
args += ( '-Dprofile=ON', )
|
||||
if re.search('debug', cmake_dir):
|
||||
args += ( '-DCMAKE_BUILD_TYPE=Debug', )
|
||||
if re.search('release', cmake_dir):
|
||||
args += ( '-DCMAKE_BUILD_TYPE=Release', )
|
||||
if re.search('gcc', cmake_dir):
|
||||
args += ( '-DCMAKE_C_COMPILER=gcc', '-DCMAKE_CXX_COMPILER=g++', )
|
||||
if re.search('clang', cmake_dir):
|
||||
args += ( '-DCMAKE_C_COMPILER=clang', '-DCMAKE_CXX_COMPILER=clang++', )
|
||||
|
||||
args += ( os.path.join('..', '..', '..'), )
|
||||
resultcode, lines = shell('cmake', args)
|
||||
|
||||
if resultcode:
|
||||
print('Generating FAILED:')
|
||||
if not ARGS.verbose:
|
||||
print(*lines, sep='')
|
||||
sys.exit(1)
|
||||
|
||||
os.chdir(old_dir)
|
||||
|
||||
def run_cmake_build(directory, target, config, args):
|
||||
print('Building', target, config, 'in', directory, 'with', *args or ('default options',))
|
||||
build_args=('--build', directory)
|
||||
if target:
|
||||
build_args += ('--target', target)
|
||||
if config:
|
||||
build_args += ('--config', config)
|
||||
if args:
|
||||
build_args += ('--',)
|
||||
build_args += tuple(args)
|
||||
resultcode, lines = shell('cmake', build_args)
|
||||
|
||||
if resultcode:
|
||||
print('Build FAILED:')
|
||||
if not ARGS.verbose:
|
||||
print(*lines, sep='')
|
||||
sys.exit(1)
|
||||
|
||||
def run_cmake_tests(directory, target, config):
|
||||
failed = []
|
||||
if IS_WINDOWS:
|
||||
target += '.exe'
|
||||
executable = os.path.join(directory, config if config else 'Debug', target)
|
||||
if(not os.path.exists(executable)):
|
||||
executable = os.path.join(directory, target)
|
||||
print('Unit tests for', executable)
|
||||
testflag = '--unittest'
|
||||
quiet = ''
|
||||
testjobs = ''
|
||||
ipv6 = ''
|
||||
if ARGS.test:
|
||||
testflag += ('=' + ARGS.test)
|
||||
if ARGS.quiet:
|
||||
quiet = '-q'
|
||||
if ARGS.ipv6:
|
||||
ipv6 = '--unittest-ipv6'
|
||||
if ARGS.testjobs:
|
||||
testjobs = ('--unittest-jobs=' + str(ARGS.testjobs))
|
||||
resultcode, lines = shell(executable, (testflag, quiet, testjobs, ipv6))
|
||||
|
||||
if resultcode:
|
||||
if not ARGS.verbose:
|
||||
print('ERROR:', *lines, sep='')
|
||||
failed.append([target, 'unittest'])
|
||||
|
||||
return failed
|
||||
|
||||
def main():
|
||||
all_failed = []
|
||||
if ARGS.all:
|
||||
build_dir_targets = CMAKE_DIR_TARGETS
|
||||
generator_options = CMAKE_ALL_GENERATE_OPTIONS
|
||||
else:
|
||||
build_dir_targets = { tuple(ARGS.dir) : [ARGS.target, ARGS.config] }
|
||||
if ARGS.generator_option:
|
||||
generator_options = [tuple(ARGS.generator_option)]
|
||||
else:
|
||||
generator_options = [tuple()]
|
||||
|
||||
if not build_dir_targets:
|
||||
# Let CMake choose the build tool.
|
||||
build_dir_targets = { () : [] }
|
||||
|
||||
if ARGS.build_option:
|
||||
ARGS.build_option = ARGS.build_option + list(ARGS.extra_args)
|
||||
else:
|
||||
ARGS.build_option = list(ARGS.extra_args)
|
||||
|
||||
for args in generator_options:
|
||||
for build_dirs, (build_targets, build_configs) in build_dir_targets.items():
|
||||
if not build_dirs:
|
||||
build_dirs = ('default',)
|
||||
if not build_targets:
|
||||
build_targets = ('rippled',)
|
||||
if not build_configs:
|
||||
build_configs = ('',)
|
||||
for cmake_dir in build_dirs:
|
||||
cmake_full_dir = get_cmake_dir(cmake_dir)
|
||||
run_cmake(cmake_full_dir, cmake_dir, args)
|
||||
|
||||
for target in build_targets:
|
||||
for config in build_configs:
|
||||
run_cmake_build(cmake_full_dir, target, config, ARGS.build_option)
|
||||
failed = run_cmake_tests(cmake_full_dir, target, config)
|
||||
|
||||
if failed:
|
||||
print('FAILED:', *(':'.join(f) for f in failed))
|
||||
if not ARGS.keep_going:
|
||||
sys.exit(1)
|
||||
else:
|
||||
all_failed.extend([decodeString(cmake_dir +
|
||||
"." + target + "." + config), ':'.join(f)]
|
||||
for f in failed)
|
||||
else:
|
||||
print('Success')
|
||||
if ARGS.clean:
|
||||
shutil.rmtree(cmake_full_dir)
|
||||
|
||||
if all_failed:
|
||||
if len(all_failed) > 1:
|
||||
print()
|
||||
print('FAILED:', *(':'.join(f) for f in all_failed))
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
sys.exit(0)
|
||||
@@ -1,45 +0,0 @@
|
||||
{
|
||||
// See https://go.microsoft.com//fwlink//?linkid=834763 for more information about this file.
|
||||
"configurations": [
|
||||
{
|
||||
"name": "x64-Debug",
|
||||
"generator": "Visual Studio 15 2017 Win64",
|
||||
"configurationType": "Debug",
|
||||
"inheritEnvironments": [ "msvc_x64_x64" ],
|
||||
"buildRoot": "${thisFileDir}\\build\\${name}",
|
||||
"cmakeCommandArgs": "",
|
||||
"buildCommandArgs": "-v:minimal",
|
||||
"ctestCommandArgs": "",
|
||||
"variables": [
|
||||
{
|
||||
"name": "BOOST_ROOT",
|
||||
"value": "C:\\lib\\boost"
|
||||
},
|
||||
{
|
||||
"name": "OPENSSL_ROOT",
|
||||
"value": "C:\\lib\\OpenSSL-Win64"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "x64-Release",
|
||||
"generator": "Visual Studio 15 2017 Win64",
|
||||
"configurationType": "Release",
|
||||
"inheritEnvironments": [ "msvc_x64_x64" ],
|
||||
"buildRoot": "${thisFileDir}\\build\\${name}",
|
||||
"cmakeCommandArgs": "",
|
||||
"buildCommandArgs": "-v:minimal",
|
||||
"ctestCommandArgs": "",
|
||||
"variables": [
|
||||
{
|
||||
"name": "BOOST_ROOT",
|
||||
"value": "C:\\lib\\boost"
|
||||
},
|
||||
{
|
||||
"name": "OPENSSL_ROOT",
|
||||
"value": "C:\\lib\\OpenSSL-Win64"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,265 +0,0 @@
|
||||
# Visual Studio 2017 Build Instructions
|
||||
|
||||
## Important
|
||||
|
||||
We do not recommend Windows for rippled production use at this time. Currently,
|
||||
the Ubuntu platform has received the highest level of quality assurance,
|
||||
testing, and support. Additionally, 32-bit Windows versions are not supported.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
To clone the source code repository, create branches for inspection or
|
||||
modification, build rippled under Visual Studio, and run the unit tests you will
|
||||
need these software components
|
||||
|
||||
| Component | Minimum Recommended Version |
|
||||
|-----------|-----------------------|
|
||||
| [Visual Studio 2017](README.md#install-visual-studio-2017)| 15.5.4 |
|
||||
| [Git for Windows](README.md#install-git-for-windows)| 2.16.1|
|
||||
| [Google Protocol Buffers Compiler](README.md#install-google-protocol-buffers-compiler) | 2.5.1|
|
||||
| [OpenSSL Library](README.md#install-openssl) | 1.0.2n |
|
||||
| [Boost library](README.md#build-boost) | 1.67.0 |
|
||||
| [CMake for Windows](README.md#optional-install-cmake-for-windows)* | 3.10.2 |
|
||||
|
||||
\* Only needed if not using the integrated CMake in VS 2017 and prefer generating dedicated project/solution files.
|
||||
|
||||
## Install Software
|
||||
|
||||
### Install Visual Studio 2017
|
||||
|
||||
If not already installed on your system, download your choice of installer from
|
||||
the [Visual Studio 2017
|
||||
Download](https://www.visualstudio.com/downloads/download-visual-studio-vs)
|
||||
page, run the installer, and follow the directions. **You may need to choose the
|
||||
`Desktop development with C++` workload to install all necessary C++ features.**
|
||||
|
||||
Any version of Visual Studio 2017 may be used to build rippled. The **Visual
|
||||
Studio 2017 Community** edition is available free of charge (see [the product
|
||||
page](https://www.visualstudio.com/products/visual-studio-community-vs) for
|
||||
licensing details), while paid editions may be used for an initial free-trial
|
||||
period.
|
||||
|
||||
### Install Git for Windows
|
||||
|
||||
Git is a distributed revision control system. The Windows version also provides
|
||||
the bash shell and many Windows versions of Unix commands. While there are other
|
||||
varieties of Git (such as TortoiseGit, which has a native Windows interface and
|
||||
integrates with the Explorer shell), we recommend installing [Git for
|
||||
Windows](https://git-scm.com/) since it provides a Unix-like command line
|
||||
environment useful for running shell scripts. Use of the bash shell under
|
||||
Windows is mandatory for running the unit tests.
|
||||
|
||||
### Install Google Protocol Buffers Compiler
|
||||
|
||||
Building rippled requires **protoc.exe** version 2. Version 3 is not currently
|
||||
supported.. At your option you may build it yourself from the sources in the
|
||||
[Google Protocol Buffers](https://github.com/google/protobuf) repository, or you
|
||||
may download a
|
||||
[protoc.exe](https://ripple.github.io/Downloads/protoc/2.5.1/protoc.exe)
|
||||
([alternate
|
||||
link](https://github.com/ripple/Downloads/raw/gh-pages/protoc/2.5.1/protoc.exe))
|
||||
precompiled Windows executable from the [Ripple
|
||||
Organization](https://github.com/ripple).
|
||||
|
||||
Either way, once you have the required version of **protoc.exe**, copy it into a
|
||||
standard location that is in your command line `%PATH%`.
|
||||
|
||||
* **NOTE:** If you use an older version of the compiler, the build will fail
|
||||
with errors related to a mismatch of the version of protocol buffer headers
|
||||
versus the compiler. Likewise, if you use version 3 or newer, the build will
|
||||
fail.
|
||||
|
||||
### Install OpenSSL
|
||||
|
||||
[Download OpenSSL.](http://slproweb.com/products/Win32OpenSSL.html) There will
|
||||
four `Win64` bit variants available, you want the non-light `v1.0` line. As of
|
||||
this writing, you **should** select
|
||||
|
||||
* Win64 OpenSSL v1.0.2n.
|
||||
|
||||
and should **not** select
|
||||
|
||||
* Win64 OpenSSL v1.0.2n light
|
||||
* Win64 OpenSSL v1.1.0g
|
||||
* Win64 OpenSSL v1.1.0g light
|
||||
|
||||
Run the installer, and choose an appropriate location for your OpenSSL
|
||||
installation. In this guide we use `C:\lib\OpenSSL-Win64` as the destination
|
||||
location.
|
||||
|
||||
You may be informed on running the installer that "Visual C++ 2008
|
||||
Redistributables" must first be installed first. If so, download it from the
|
||||
[same page](http://slproweb.com/products/Win32OpenSSL.html), again making sure
|
||||
to get the correct 32-/64-bit variant.
|
||||
|
||||
* NOTE: Since rippled links statically to OpenSSL, it does not matter where the
|
||||
OpenSSL .DLL files are placed, or what version they are. rippled does not use
|
||||
or require any external .DLL files to run other than the standard operating
|
||||
system ones.
|
||||
|
||||
### Build Boost
|
||||
|
||||
Boost 1.67 or later is required.
|
||||
|
||||
After [downloading boost](http://www.boost.org/users/download/) and unpacking it
|
||||
to `c:\lib`. As of this writing, the most recent version of boost is 1.67.0,
|
||||
which will unpack into a directory named `boost_1_67_0`. We recommended either
|
||||
renaming this directory to `boost`, or creating a junction link `mklink /J boost
|
||||
boost_1_67_0`, so that you can more easily switch between versions.
|
||||
|
||||
Next, open **Developer Command Prompt** and type the following commands
|
||||
|
||||
```powershell
|
||||
cd C:\lib\boost
|
||||
bootstrap
|
||||
```
|
||||
|
||||
The rippled application is linked statically to the standard runtimes and
|
||||
external dependencies on Windows, to ensure that the behavior of the executable
|
||||
is not affected by changes in outside files. Therefore, it is necessary to build
|
||||
the required boost static libraries using this command:
|
||||
|
||||
```powershell
|
||||
bjam -j<Num Parallel> --toolset=msvc-14.1 address-model=64 architecture=x86 link=static threading=multi runtime-link=shared,static stage
|
||||
```
|
||||
|
||||
where you should replace `<Num Parallel>` with the number of parallel
|
||||
invocations to use build, e.g. `bjam -j4 ...` would use up to 4 concurrent build
|
||||
shell commands for the build.
|
||||
|
||||
Building the boost libraries may take considerable time. When the build process
|
||||
is completed, take note of both the reported compiler include paths and linker
|
||||
library paths as they will be required later.
|
||||
|
||||
### (Optional) Install CMake for Windows
|
||||
|
||||
[CMake](http://cmake.org) is a cross platform build system generator. Visual
|
||||
Studio 2017 includes an integrated version of CMake that avoids having to
|
||||
manually run CMake, but it is undergoing continuous improvement. Users that
|
||||
prefer to use standard Visual Studio project and solution files need to install
|
||||
a dedicated version of Cmake to generate them. The latest version can be found
|
||||
at the [CMake download site](https://cmake.org/download/). It is recommended you
|
||||
select the install option to add CMake to your path.
|
||||
|
||||
As of this writing, the latest version of CMake for windows is 3.10.2.
|
||||
|
||||
## Clone the rippled repository
|
||||
|
||||
If you are familiar with cloning github repositories, just follow your normal
|
||||
process and clone `git@github.com:ripple/rippled.git`. Otherwise follow this
|
||||
section for instructions.
|
||||
|
||||
1. If you don't have a github account, sign up for one at
|
||||
[github.com](https://github.com/).
|
||||
2. Make sure you have Github ssh keys. For help see
|
||||
[generating-ssh-keys](https://help.github.com/articles/generating-ssh-keys).
|
||||
|
||||
Open the "Git Bash" shell that was installed with "Git for Windows" in the step
|
||||
above. Navigate to the directory where you want to clone rippled (git bash uses
|
||||
`/c` for windows's `C:` and forward slash where windows uses backslash, so
|
||||
`C:\Users\joe\projs` would be `/c/Users/joe/projs` in git bash). Now clone the
|
||||
repository and optionally switch to the *master* branch. Type the following at
|
||||
the bash prompt:
|
||||
|
||||
```powershell
|
||||
git clone git@github.com:ripple/rippled.git
|
||||
cd rippled
|
||||
```
|
||||
If you receive an error about not having the "correct access rights" make sure
|
||||
you have Github ssh keys, as described above.
|
||||
|
||||
For a stable release, choose the `master` branch or one of the tagged releases
|
||||
listed on [rippled's GitHub page](https://github.com/ripple/rippled/releases).
|
||||
|
||||
```
|
||||
git checkout master
|
||||
```
|
||||
|
||||
To test the latest release candidate, choose the `release` branch.
|
||||
|
||||
```
|
||||
git checkout release
|
||||
```
|
||||
|
||||
If you are doing development work and want the latest set of untested features,
|
||||
you can consider using the `develop` branch instead.
|
||||
|
||||
```
|
||||
git checkout develop
|
||||
```
|
||||
|
||||
# Build using Visual Studio integrated CMake
|
||||
|
||||
In Visual Studio 2017, Microsoft added [integrated IDE support for
|
||||
cmake](https://blogs.msdn.microsoft.com/vcblog/2016/10/05/cmake-support-in-visual-studio/).
|
||||
To begin, simply:
|
||||
|
||||
1. Launch Visual Studio and choose **File | Open | Folder**, navigating to the
|
||||
cloned rippled folder.
|
||||
2. Right-click on `CMakeLists.txt` in the **Solution Explorer - Folder View** to
|
||||
generate a `CMakeSettings.json` file. A sample settings file is provided
|
||||
[here](/Builds/VisualStudio2017/CMakeSettings-example.json). Customize the
|
||||
settings for `BOOST_ROOT`, `OPENSSL_ROOT` to match the install paths if they
|
||||
differ from those in the file.
|
||||
4. Select either the `x64-Release` or `x64-Debug` configuration from the
|
||||
**Project Setings** drop-down. This should invoke the built-in CMake project
|
||||
generator. If not, you can right-click on the `CMakeLists.txt` file and
|
||||
choose **Cache | Generate Cache**.
|
||||
5. Select either the `rippled.exe` (unity) or `rippled_classic.exe` (non-unity)
|
||||
option in the **Select Startup Item** drop-down. This will be the target
|
||||
built when you press F7. Alternatively, you can choose a target to build from
|
||||
the top-level **CMake | Build** menu. Note that at this time, there are other
|
||||
targets listed that come from third party visual studio files embedded in the
|
||||
rippled repo, e.g. `datagen.vcxproj`. Please ignore them.
|
||||
|
||||
For details on configuring debugging sessions or further customization of CMake,
|
||||
please refer to the [CMake tools for VS
|
||||
documentation](https://docs.microsoft.com/en-us/cpp/ide/cmake-tools-for-visual-cpp).
|
||||
|
||||
If using the provided `CMakeSettings.json` file, the executable will be in
|
||||
```
|
||||
.\build\x64-Release\Release\rippled(_classic).exe
|
||||
```
|
||||
or
|
||||
```
|
||||
.\build\x64-Debug\Debug\rippled(_classic).exe
|
||||
```
|
||||
where these paths are relative to your cloned git repository.
|
||||
|
||||
# Build using stand-alone CMake
|
||||
|
||||
This requires having installed [CMake for
|
||||
Windows](README.md#optional-install-cmake-for-windows). We do not recommend
|
||||
mixing this method with the integrated CMake method for the same repository
|
||||
clone. Assuming you included the cmake executable folder in your path,
|
||||
execute the following commands within your `rippled` cloned repository:
|
||||
|
||||
```
|
||||
mkdir build\cmake
|
||||
cd build\cmake
|
||||
cmake ..\.. -G"Visual Studio 15 2017 Win64" -DBOOST_ROOT="C:\lib\boost_1_67_0" -DOPENSSL_ROOT="C:\lib\OpenSSL-Win64"
|
||||
```
|
||||
Now launch Visual Studio 2017 and select **File | Open | Project/Solution**.
|
||||
Navigate to the `build\cmake` folder created above and select the `rippled.sln`
|
||||
file. You can then choose whether to build the `Debug` or `Release` solution
|
||||
configuration. Within the **Solution Explorer**, selected either the `rippled`
|
||||
(unity build) project or the `rippled_classic` (non-unity) project, and
|
||||
right-click to build.
|
||||
|
||||
The executable will be in
|
||||
```
|
||||
.\build\cmake\Release\rippled(_classic).exe
|
||||
```
|
||||
or
|
||||
````
|
||||
.\build\cmake\Debug\rippled(_classic).exe
|
||||
````
|
||||
where these paths are relative to your cloned git repository.
|
||||
|
||||
# Unit Test (Recommended)
|
||||
|
||||
`rippled` builds a set of unit tests into the server executable. To run these
|
||||
unit tests after building, pass the `--unittest` option to the compiled
|
||||
`rippled` executable. The executable will exit with summary info after running
|
||||
the unit tests.
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
num_procs=$(lscpu -p | grep -v '^#' | sort -u -t, -k 2,4 | wc -l) # number of physical cores
|
||||
|
||||
path=$(cd $(dirname $0) && pwd)
|
||||
cd $(dirname $path)
|
||||
${path}/Test.py -a -c --testjobs=${num_procs} -- -j${num_procs}
|
||||
114
Builds/levelization/README.md
Normal file
114
Builds/levelization/README.md
Normal file
@@ -0,0 +1,114 @@
|
||||
# Levelization
|
||||
|
||||
Levelization is the term used to describe efforts to prevent rippled from
|
||||
having or creating cyclic dependencies.
|
||||
|
||||
rippled code is organized into directories under `src/rippled` (and
|
||||
`src/test`) representing modules. The modules are intended to be
|
||||
organized into "tiers" or "levels" such that a module from one level can
|
||||
only include code from lower levels. Additionally, a module
|
||||
in one level should never include code in an `impl` folder of any level
|
||||
other than it's own.
|
||||
|
||||
Unfortunately, over time, enforcement of levelization has been
|
||||
inconsistent, so the current state of the code doesn't necessarily
|
||||
reflect these rules. Whenever possible, developers should refactor any
|
||||
levelization violations they find (by moving files or individual
|
||||
classes). At the very least, don't make things worse.
|
||||
|
||||
The table below summarizes the _desired_ division of modules, based on the
|
||||
state of the rippled code when it was created. The levels are numbered from
|
||||
the bottom up with the lower level, lower numbered, more independent
|
||||
modules listed first, and the higher level, higher numbered modules with
|
||||
more dependencies listed later.
|
||||
|
||||
**tl;dr:** The modules listed first are more independent than the modules
|
||||
listed later.
|
||||
|
||||
| Level / Tier | Module(s) |
|
||||
|--------------|-----------------------------------------------|
|
||||
| 01 | ripple/beast ripple/unity
|
||||
| 02 | ripple/basics
|
||||
| 03 | ripple/json ripple/crypto
|
||||
| 04 | ripple/protocol
|
||||
| 05 | ripple/core ripple/conditions ripple/consensus ripple/resource ripple/server
|
||||
| 06 | ripple/peerfinder ripple/ledger ripple/nodestore ripple/net
|
||||
| 07 | ripple/shamap ripple/overlay
|
||||
| 08 | ripple/app
|
||||
| 09 | ripple/rpc
|
||||
| 10 | ripple/perflog
|
||||
| 11 | test/jtx test/beast test/csf
|
||||
| 12 | test/unit_test
|
||||
| 13 | test/crypto test/conditions test/json test/resource test/shamap test/peerfinder test/basics test/overlay
|
||||
| 14 | test
|
||||
| 15 | test/net test/protocol test/ledger test/consensus test/core test/server test/nodestore
|
||||
| 16 | test/rpc test/app
|
||||
|
||||
(Note that `test` levelization is *much* less important and *much* less
|
||||
strictly enforced than `ripple` levelization, other than the requirement
|
||||
that `test` code should *never* be included in `ripple` code.)
|
||||
|
||||
## Validation
|
||||
|
||||
The [levelization.sh](levelization.sh) script takes no parameters,
|
||||
reads no environment variables, and can be run from any directory,
|
||||
as long as it is in the expected location in the rippled repo.
|
||||
It can be run at any time from within a checked out repo, and will
|
||||
do an analysis of all the `#include`s in
|
||||
the rippled source. The only caveat is that it runs much slower
|
||||
under Windows than in Linux. It hasn't yet been tested under MacOS.
|
||||
It generates many files of [results](results):
|
||||
|
||||
* `rawincludes.txt`: The raw dump of the `#includes`
|
||||
* `paths.txt`: A second dump grouping the source module
|
||||
to the destination module, deduped, and with frequency counts.
|
||||
* `includes/`: A directory where each file represents a module and
|
||||
contains a list of modules and counts that the module _includes_.
|
||||
* `includedby/`: Similar to `includes/`, but the other way around. Each
|
||||
file represents a module and contains a list of modules and counts
|
||||
that _include_ the module.
|
||||
* [`loops.txt`](results/loops.txt): A list of direct loops detected
|
||||
between modules as they actually exist, as opposed to how they are
|
||||
desired as described above. In a perfect repo, this file will be
|
||||
empty.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../.github/workflows/check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
* [`ordering.txt`](results/ordering.txt): A list showing relationships
|
||||
between modules where there are no loops as they actually exist, as
|
||||
opposed to how they are desired as described above.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../.github/workflows/check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
* [`levelization.yml`](../../.github/workflows/check-levelization.yml)
|
||||
Github Actions workflow to test that levelization loops haven't
|
||||
changed. Unfortunately, if changes are detected, it can't tell if
|
||||
they are improvements or not, so if you have resolved any issues or
|
||||
done anything else to improve levelization, run `levelization.sh`,
|
||||
and commit the updated results.
|
||||
|
||||
The `loops.txt` and `ordering.txt` files relate the modules
|
||||
using comparison signs, which indicate the number of times each
|
||||
module is included in the other.
|
||||
|
||||
* `A > B` means that A should probably be at a higher level than B,
|
||||
because B is included in A significantly more than A is included in B.
|
||||
These results can be included in both `loops.txt` and `ordering.txt`.
|
||||
Because `ordering.txt`only includes relationships where B is not
|
||||
included in A at all, it will only include these types of results.
|
||||
* `A ~= B` means that A and B are included in each other a different
|
||||
number of times, but the values are so close that the script can't
|
||||
definitively say that one should be above the other. These results
|
||||
will only be included in `loops.txt`.
|
||||
* `A == B` means that A and B include each other the same number of
|
||||
times, so the script has no clue which should be higher. These results
|
||||
will only be included in `loops.txt`.
|
||||
|
||||
The committed files hide the detailed values intentionally, to
|
||||
prevent false alarms and merging issues, and because it's easy to
|
||||
get those details locally.
|
||||
|
||||
1. Run `levelization.sh`
|
||||
2. Grep the modules in `paths.txt`.
|
||||
* For example, if a cycle is found `A ~= B`, simply `grep -w
|
||||
A Builds/levelization/results/paths.txt | grep -w B`
|
||||
130
Builds/levelization/levelization.sh
Executable file
130
Builds/levelization/levelization.sh
Executable file
@@ -0,0 +1,130 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Usage: levelization.sh
|
||||
# This script takes no parameters, reads no environment variables,
|
||||
# and can be run from any directory, as long as it is in the expected
|
||||
# location in the repo.
|
||||
|
||||
pushd $( dirname $0 )
|
||||
|
||||
if [ -v PS1 ]
|
||||
then
|
||||
# if the shell is interactive, clean up any flotsam before analyzing
|
||||
git clean -ix
|
||||
fi
|
||||
|
||||
# Ensure all sorting is ASCII-order consistently across platforms.
|
||||
export LANG=C
|
||||
|
||||
rm -rfv results
|
||||
mkdir results
|
||||
includes="$( pwd )/results/rawincludes.txt"
|
||||
pushd ../..
|
||||
echo Raw includes:
|
||||
grep -r '^[ ]*#include.*/.*\.h' include src | \
|
||||
grep -v boost | tee ${includes}
|
||||
popd
|
||||
pushd results
|
||||
|
||||
oldifs=${IFS}
|
||||
IFS=:
|
||||
mkdir includes
|
||||
mkdir includedby
|
||||
echo Build levelization paths
|
||||
exec 3< ${includes} # open rawincludes.txt for input
|
||||
while read -r -u 3 file include
|
||||
do
|
||||
level=$( echo ${file} | cut -d/ -f 2,3 )
|
||||
# If the "level" indicates a file, cut off the filename
|
||||
if [[ "${level##*.}" != "${level}" ]]
|
||||
then
|
||||
# Use the "toplevel" label as a workaround for `sort`
|
||||
# inconsistencies between different utility versions
|
||||
level="$( dirname ${level} )/toplevel"
|
||||
fi
|
||||
level=$( echo ${level} | tr '/' '.' )
|
||||
|
||||
includelevel=$( echo ${include} | sed 's/.*["<]//; s/[">].*//' | \
|
||||
cut -d/ -f 1,2 )
|
||||
if [[ "${includelevel##*.}" != "${includelevel}" ]]
|
||||
then
|
||||
# Use the "toplevel" label as a workaround for `sort`
|
||||
# inconsistencies between different utility versions
|
||||
includelevel="$( dirname ${includelevel} )/toplevel"
|
||||
fi
|
||||
includelevel=$( echo ${includelevel} | tr '/' '.' )
|
||||
|
||||
if [[ "$level" != "$includelevel" ]]
|
||||
then
|
||||
echo $level $includelevel | tee -a paths.txt
|
||||
fi
|
||||
done
|
||||
echo Sort and dedup paths
|
||||
sort -ds paths.txt | uniq -c | tee sortedpaths.txt
|
||||
mv sortedpaths.txt paths.txt
|
||||
exec 3>&- #close fd 3
|
||||
IFS=${oldifs}
|
||||
unset oldifs
|
||||
|
||||
echo Split into flat-file database
|
||||
exec 4<paths.txt # open paths.txt for input
|
||||
while read -r -u 4 count level include
|
||||
do
|
||||
echo ${include} ${count} | tee -a includes/${level}
|
||||
echo ${level} ${count} | tee -a includedby/${include}
|
||||
done
|
||||
exec 4>&- #close fd 4
|
||||
|
||||
loops="$( pwd )/loops.txt"
|
||||
ordering="$( pwd )/ordering.txt"
|
||||
pushd includes
|
||||
echo Search for loops
|
||||
# Redirect stdout to a file
|
||||
exec 4>&1
|
||||
exec 1>"${loops}"
|
||||
for source in *
|
||||
do
|
||||
if [[ -f "$source" ]]
|
||||
then
|
||||
exec 5<"${source}" # open for input
|
||||
while read -r -u 5 include includefreq
|
||||
do
|
||||
if [[ -f $include ]]
|
||||
then
|
||||
if grep -q -w $source $include
|
||||
then
|
||||
if grep -q -w "Loop: $include $source" "${loops}"
|
||||
then
|
||||
continue
|
||||
fi
|
||||
sourcefreq=$( grep -w $source $include | cut -d\ -f2 )
|
||||
echo "Loop: $source $include"
|
||||
# If the counts are close, indicate that the two modules are
|
||||
# on the same level, though they shouldn't be
|
||||
if [[ $(( $includefreq - $sourcefreq )) -gt 3 ]]
|
||||
then
|
||||
echo -e " $source > $include\n"
|
||||
elif [[ $(( $sourcefreq - $includefreq )) -gt 3 ]]
|
||||
then
|
||||
echo -e " $include > $source\n"
|
||||
elif [[ $sourcefreq -eq $includefreq ]]
|
||||
then
|
||||
echo -e " $include == $source\n"
|
||||
else
|
||||
echo -e " $include ~= $source\n"
|
||||
fi
|
||||
else
|
||||
echo "$source > $include" >> "${ordering}"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
exec 5>&- #close fd 5
|
||||
fi
|
||||
done
|
||||
exec 1>&4 #close fd 1
|
||||
exec 4>&- #close fd 4
|
||||
cat "${ordering}"
|
||||
cat "${loops}"
|
||||
popd
|
||||
popd
|
||||
popd
|
||||
42
Builds/levelization/results/loops.txt
Normal file
42
Builds/levelization/results/loops.txt
Normal file
@@ -0,0 +1,42 @@
|
||||
Loop: test.jtx test.toplevel
|
||||
test.toplevel > test.jtx
|
||||
|
||||
Loop: test.jtx test.unit_test
|
||||
test.unit_test == test.jtx
|
||||
|
||||
Loop: xrpld.app xrpld.core
|
||||
xrpld.app > xrpld.core
|
||||
|
||||
Loop: xrpld.app xrpld.ledger
|
||||
xrpld.app > xrpld.ledger
|
||||
|
||||
Loop: xrpld.app xrpld.net
|
||||
xrpld.app > xrpld.net
|
||||
|
||||
Loop: xrpld.app xrpld.overlay
|
||||
xrpld.overlay > xrpld.app
|
||||
|
||||
Loop: xrpld.app xrpld.peerfinder
|
||||
xrpld.peerfinder ~= xrpld.app
|
||||
|
||||
Loop: xrpld.app xrpld.rpc
|
||||
xrpld.rpc > xrpld.app
|
||||
|
||||
Loop: xrpld.app xrpld.shamap
|
||||
xrpld.app > xrpld.shamap
|
||||
|
||||
Loop: xrpld.core xrpld.net
|
||||
xrpld.net > xrpld.core
|
||||
|
||||
Loop: xrpld.core xrpld.perflog
|
||||
xrpld.perflog == xrpld.core
|
||||
|
||||
Loop: xrpld.net xrpld.rpc
|
||||
xrpld.rpc ~= xrpld.net
|
||||
|
||||
Loop: xrpld.overlay xrpld.rpc
|
||||
xrpld.rpc ~= xrpld.overlay
|
||||
|
||||
Loop: xrpld.perflog xrpld.rpc
|
||||
xrpld.rpc ~= xrpld.perflog
|
||||
|
||||
197
Builds/levelization/results/ordering.txt
Normal file
197
Builds/levelization/results/ordering.txt
Normal file
@@ -0,0 +1,197 @@
|
||||
libxrpl.basics > xrpl.basics
|
||||
libxrpl.crypto > xrpl.basics
|
||||
libxrpl.json > xrpl.basics
|
||||
libxrpl.json > xrpl.json
|
||||
libxrpl.protocol > xrpl.basics
|
||||
libxrpl.protocol > xrpl.json
|
||||
libxrpl.protocol > xrpl.protocol
|
||||
libxrpl.resource > xrpl.basics
|
||||
libxrpl.resource > xrpl.json
|
||||
libxrpl.resource > xrpl.resource
|
||||
libxrpl.server > xrpl.basics
|
||||
libxrpl.server > xrpl.json
|
||||
libxrpl.server > xrpl.protocol
|
||||
libxrpl.server > xrpl.server
|
||||
test.app > test.jtx
|
||||
test.app > test.rpc
|
||||
test.app > test.toplevel
|
||||
test.app > test.unit_test
|
||||
test.app > xrpl.basics
|
||||
test.app > xrpld.app
|
||||
test.app > xrpld.core
|
||||
test.app > xrpld.ledger
|
||||
test.app > xrpld.nodestore
|
||||
test.app > xrpld.overlay
|
||||
test.app > xrpld.rpc
|
||||
test.app > xrpl.json
|
||||
test.app > xrpl.protocol
|
||||
test.app > xrpl.resource
|
||||
test.basics > test.jtx
|
||||
test.basics > test.unit_test
|
||||
test.basics > xrpl.basics
|
||||
test.basics > xrpld.perflog
|
||||
test.basics > xrpld.rpc
|
||||
test.basics > xrpl.json
|
||||
test.basics > xrpl.protocol
|
||||
test.beast > xrpl.basics
|
||||
test.conditions > xrpl.basics
|
||||
test.conditions > xrpld.conditions
|
||||
test.consensus > test.csf
|
||||
test.consensus > test.toplevel
|
||||
test.consensus > test.unit_test
|
||||
test.consensus > xrpl.basics
|
||||
test.consensus > xrpld.app
|
||||
test.consensus > xrpld.consensus
|
||||
test.consensus > xrpld.ledger
|
||||
test.consensus > xrpl.json
|
||||
test.core > test.jtx
|
||||
test.core > test.toplevel
|
||||
test.core > test.unit_test
|
||||
test.core > xrpl.basics
|
||||
test.core > xrpld.core
|
||||
test.core > xrpld.perflog
|
||||
test.core > xrpl.json
|
||||
test.core > xrpl.server
|
||||
test.csf > xrpl.basics
|
||||
test.csf > xrpld.consensus
|
||||
test.csf > xrpl.json
|
||||
test.csf > xrpl.protocol
|
||||
test.json > test.jtx
|
||||
test.json > xrpl.json
|
||||
test.jtx > xrpl.basics
|
||||
test.jtx > xrpld.app
|
||||
test.jtx > xrpld.core
|
||||
test.jtx > xrpld.ledger
|
||||
test.jtx > xrpld.net
|
||||
test.jtx > xrpld.rpc
|
||||
test.jtx > xrpl.json
|
||||
test.jtx > xrpl.protocol
|
||||
test.jtx > xrpl.resource
|
||||
test.jtx > xrpl.server
|
||||
test.ledger > test.jtx
|
||||
test.ledger > test.toplevel
|
||||
test.ledger > xrpl.basics
|
||||
test.ledger > xrpld.app
|
||||
test.ledger > xrpld.core
|
||||
test.ledger > xrpld.ledger
|
||||
test.ledger > xrpl.protocol
|
||||
test.nodestore > test.jtx
|
||||
test.nodestore > test.toplevel
|
||||
test.nodestore > test.unit_test
|
||||
test.nodestore > xrpl.basics
|
||||
test.nodestore > xrpld.core
|
||||
test.nodestore > xrpld.nodestore
|
||||
test.nodestore > xrpld.unity
|
||||
test.overlay > test.jtx
|
||||
test.overlay > test.toplevel
|
||||
test.overlay > test.unit_test
|
||||
test.overlay > xrpl.basics
|
||||
test.overlay > xrpld.app
|
||||
test.overlay > xrpld.overlay
|
||||
test.overlay > xrpld.peerfinder
|
||||
test.overlay > xrpld.shamap
|
||||
test.overlay > xrpl.protocol
|
||||
test.peerfinder > test.beast
|
||||
test.peerfinder > test.unit_test
|
||||
test.peerfinder > xrpl.basics
|
||||
test.peerfinder > xrpld.core
|
||||
test.peerfinder > xrpld.peerfinder
|
||||
test.peerfinder > xrpl.protocol
|
||||
test.protocol > test.toplevel
|
||||
test.protocol > xrpl.basics
|
||||
test.protocol > xrpl.json
|
||||
test.protocol > xrpl.protocol
|
||||
test.resource > test.unit_test
|
||||
test.resource > xrpl.basics
|
||||
test.resource > xrpl.resource
|
||||
test.rpc > test.jtx
|
||||
test.rpc > test.toplevel
|
||||
test.rpc > xrpl.basics
|
||||
test.rpc > xrpld.app
|
||||
test.rpc > xrpld.core
|
||||
test.rpc > xrpld.net
|
||||
test.rpc > xrpld.overlay
|
||||
test.rpc > xrpld.rpc
|
||||
test.rpc > xrpl.json
|
||||
test.rpc > xrpl.protocol
|
||||
test.rpc > xrpl.resource
|
||||
test.server > test.jtx
|
||||
test.server > test.toplevel
|
||||
test.server > test.unit_test
|
||||
test.server > xrpl.basics
|
||||
test.server > xrpld.app
|
||||
test.server > xrpld.core
|
||||
test.server > xrpld.rpc
|
||||
test.server > xrpl.json
|
||||
test.server > xrpl.server
|
||||
test.shamap > test.unit_test
|
||||
test.shamap > xrpl.basics
|
||||
test.shamap > xrpld.nodestore
|
||||
test.shamap > xrpld.shamap
|
||||
test.shamap > xrpl.protocol
|
||||
test.toplevel > test.csf
|
||||
test.toplevel > xrpl.json
|
||||
test.unit_test > xrpl.basics
|
||||
tests.libxrpl > xrpl.basics
|
||||
xrpl.json > xrpl.basics
|
||||
xrpl.protocol > xrpl.basics
|
||||
xrpl.protocol > xrpl.json
|
||||
xrpl.resource > xrpl.basics
|
||||
xrpl.resource > xrpl.json
|
||||
xrpl.resource > xrpl.protocol
|
||||
xrpl.server > xrpl.basics
|
||||
xrpl.server > xrpl.json
|
||||
xrpl.server > xrpl.protocol
|
||||
xrpld.app > test.unit_test
|
||||
xrpld.app > xrpl.basics
|
||||
xrpld.app > xrpld.conditions
|
||||
xrpld.app > xrpld.consensus
|
||||
xrpld.app > xrpld.nodestore
|
||||
xrpld.app > xrpld.perflog
|
||||
xrpld.app > xrpl.json
|
||||
xrpld.app > xrpl.protocol
|
||||
xrpld.app > xrpl.resource
|
||||
xrpld.conditions > xrpl.basics
|
||||
xrpld.conditions > xrpl.protocol
|
||||
xrpld.consensus > xrpl.basics
|
||||
xrpld.consensus > xrpl.json
|
||||
xrpld.consensus > xrpl.protocol
|
||||
xrpld.core > xrpl.basics
|
||||
xrpld.core > xrpl.json
|
||||
xrpld.core > xrpl.protocol
|
||||
xrpld.ledger > xrpl.basics
|
||||
xrpld.ledger > xrpl.json
|
||||
xrpld.ledger > xrpl.protocol
|
||||
xrpld.net > xrpl.basics
|
||||
xrpld.net > xrpl.json
|
||||
xrpld.net > xrpl.protocol
|
||||
xrpld.net > xrpl.resource
|
||||
xrpld.nodestore > xrpl.basics
|
||||
xrpld.nodestore > xrpld.core
|
||||
xrpld.nodestore > xrpld.unity
|
||||
xrpld.nodestore > xrpl.json
|
||||
xrpld.nodestore > xrpl.protocol
|
||||
xrpld.overlay > xrpl.basics
|
||||
xrpld.overlay > xrpld.core
|
||||
xrpld.overlay > xrpld.peerfinder
|
||||
xrpld.overlay > xrpld.perflog
|
||||
xrpld.overlay > xrpl.json
|
||||
xrpld.overlay > xrpl.protocol
|
||||
xrpld.overlay > xrpl.resource
|
||||
xrpld.overlay > xrpl.server
|
||||
xrpld.peerfinder > xrpl.basics
|
||||
xrpld.peerfinder > xrpld.core
|
||||
xrpld.peerfinder > xrpl.protocol
|
||||
xrpld.perflog > xrpl.basics
|
||||
xrpld.perflog > xrpl.json
|
||||
xrpld.rpc > xrpl.basics
|
||||
xrpld.rpc > xrpld.core
|
||||
xrpld.rpc > xrpld.ledger
|
||||
xrpld.rpc > xrpld.nodestore
|
||||
xrpld.rpc > xrpl.json
|
||||
xrpld.rpc > xrpl.protocol
|
||||
xrpld.rpc > xrpl.resource
|
||||
xrpld.rpc > xrpl.server
|
||||
xrpld.shamap > xrpl.basics
|
||||
xrpld.shamap > xrpld.nodestore
|
||||
xrpld.shamap > xrpl.protocol
|
||||
@@ -1,228 +0,0 @@
|
||||
# Linux Build Instructions
|
||||
|
||||
This document focuses on building rippled for development purposes under recent
|
||||
Ubuntu linux distributions. To build rippled for Redhat, Fedora or Centos
|
||||
builds, including docker based builds for those distributions, please consult
|
||||
the [rippled-package-builder](https://github.com/ripple/rippled-package-builder)
|
||||
repository.
|
||||
|
||||
Development is regularly done on Ubuntu 16.04 or later. For non Ubuntu
|
||||
distributions, the steps below should work be installing the appropriate
|
||||
dependencies using that distribution's package management tools.
|
||||
|
||||
## Dependencies
|
||||
|
||||
Use `apt-get` to install the dependencies provided by the distribution
|
||||
|
||||
```
|
||||
$ apt-get update
|
||||
$ apt-get install -y gcc g++ wget git cmake protobuf-compiler libprotobuf-dev libssl-dev
|
||||
```
|
||||
|
||||
Advanced users can choose to install newer versions of gcc, or the clang compiler.
|
||||
At this time, rippled only supports protobuf version 2. Using version 3 of
|
||||
protobuf will give errors.
|
||||
|
||||
### Build Boost
|
||||
|
||||
Boost 1.67 or later is required. We recommend downloading and compiling boost
|
||||
with the following process: After changing to the directory where
|
||||
you wish to download and compile boost, run
|
||||
|
||||
```
|
||||
$ wget https://dl.bintray.com/boostorg/release/1.67.0/source/boost_1_67_0.tar.gz
|
||||
$ tar -xzf boost_1_67_0.tar.gz
|
||||
$ cd boost_1_67_0
|
||||
$ ./bootstrap.sh
|
||||
$ ./b2 headers
|
||||
$ ./b2 -j<Num Parallel>
|
||||
```
|
||||
|
||||
### (Optional) Dependencies for Building Source Documentation
|
||||
|
||||
Source code documentation is not required for running/debugging rippled. That
|
||||
said, the documentation contains some helpful information about specific
|
||||
components of the application. For more information on how to install and run
|
||||
the necessary components, see [this document](../../docs/README.md)
|
||||
|
||||
## Build
|
||||
|
||||
### Clone the rippled repository
|
||||
|
||||
From a shell:
|
||||
|
||||
```
|
||||
git clone git@github.com:ripple/rippled.git
|
||||
cd rippled
|
||||
```
|
||||
|
||||
For a stable release, choose the `master` branch or one of the tagged releases
|
||||
listed on [GitHub](https://github.com/ripple/rippled/releases).
|
||||
|
||||
```
|
||||
git checkout master
|
||||
```
|
||||
|
||||
or to test the latest release candidate, choose the `release` branch.
|
||||
|
||||
```
|
||||
git checkout release
|
||||
```
|
||||
|
||||
If you are doing development work and want the latest set of untested
|
||||
features, you can consider using the `develop` branch instead.
|
||||
|
||||
```
|
||||
git checkout develop
|
||||
```
|
||||
|
||||
### Configure Library Paths
|
||||
|
||||
If you didn't persistently set the `BOOST_ROOT` environment variable to the
|
||||
directory in which you compiled boost, then you should set it temporarily.
|
||||
|
||||
For example, you built Boost in your home directory `~/boost_1_67_0`, you
|
||||
would do for any shell in which you want to build:
|
||||
|
||||
```
|
||||
export BOOST_ROOT=~/boost_1_67_0
|
||||
```
|
||||
|
||||
Alternatively, you can add `DBOOST_ROOT=~/boost_1_67_0` to the command line when
|
||||
invoking `cmake`.
|
||||
|
||||
### Generate and Build
|
||||
|
||||
All builds should be done in a separate directory from the source tree root
|
||||
(a subdirectory is fine). For example, from the root of the ripple source tree:
|
||||
|
||||
```
|
||||
mkdir my_build
|
||||
cd my_build
|
||||
```
|
||||
|
||||
followed by:
|
||||
|
||||
```
|
||||
cmake -DCMAKE_BUILD_TYPE=Debug ..
|
||||
```
|
||||
|
||||
`CMAKE_BUILD_TYPE` can be changed as desired for `Debug` vs.
|
||||
`Release` builds (all four standard cmake build types are supported).
|
||||
|
||||
To select a different compiler (most likely gcc will be found by default), pass
|
||||
`-DCMAKE_C_COMPILER=<path/to/c-compiler>` and
|
||||
`-DCMAKE_CXX_COMPILER=</path/to/cxx-compiler>` when configuring. If you prefer,
|
||||
you can instead set `CC` and `CXX` environment variables which cmake will honor.
|
||||
|
||||
Once you have generated the build system, you can run the build via cmake:
|
||||
|
||||
```
|
||||
cmake --build . -- -j <parallel jobs>
|
||||
```
|
||||
|
||||
the `-j` parameter in this example tells the build tool to compile several
|
||||
files in parallel. This value should be chosen roughly based on the number of
|
||||
cores you have available and/or want to use for building.
|
||||
|
||||
When the build completes succesfully, you will have a `rippled` executable in
|
||||
the current directory, which can be used to connect to the network (when
|
||||
properly configured) or to run unit tests.
|
||||
|
||||
#### Options During Configuration:
|
||||
|
||||
The CMake file defines a number of configure-time options which can be
|
||||
examined by running `cmake-gui` or `ccmake` to generated the build. In
|
||||
particular, the `unity` option allows you to select between the unity and
|
||||
non-unity builds. `unity` builds are faster to compile since they combine
|
||||
multiple sources into a single compiliation unit - this is the default if you
|
||||
don't specify. `nounity` builds can be helpful for detecting include omissions
|
||||
or for finding other build-related issues, but aren't generally needed for
|
||||
testing and running.
|
||||
|
||||
* `-Dunity=ON` to enable/disable unity builds (defaults to ON)
|
||||
* `-Dassert=ON` to enable asserts
|
||||
* `-Djemalloc=ON` to enable jemalloc support for heap checking
|
||||
* `-Dsan=thread` to enable the thread sanitizer with clang
|
||||
* `-Dsan=address` to enable the address sanitizer with clang
|
||||
* `-Dstatic=ON` to enable static linking library dependencies
|
||||
|
||||
Several other infrequently used options are available - run `ccmake` or
|
||||
`cmake-gui` for a list of all options.
|
||||
|
||||
#### Optional Installation
|
||||
|
||||
The rippled cmake build supports an installation target that will install
|
||||
rippled as well as a support library that can be used to sign transactions. In
|
||||
order to build and install the files, specify the `install` target when
|
||||
building, e.g.:
|
||||
|
||||
```
|
||||
cmake -DCMAKE_BUILD_TYPE=Debug -DCMAKE_INSTALL_PREFIX=/opt/local ..
|
||||
cmake --build . --target install -- -j <parallel jobs>
|
||||
```
|
||||
|
||||
We recommend specifying `CMAKE_INSTALL_PREFIX` when configuring in order to
|
||||
explicitly control the install location for your files. Without this setting,
|
||||
cmake will typically install in `/usr/local`. It is also possible to "rehome"
|
||||
the installation by specifying the `DESTDIR` env variable during the install phase,
|
||||
e.g.:
|
||||
|
||||
```
|
||||
DESTDIR=~/mylibs cmake --build . --target install -- -j <parallel jobs>
|
||||
```
|
||||
|
||||
in which case, the files would be installed in the `CMAKE_INSTALL_PREFIX` within
|
||||
the specified `DESTDIR` path.
|
||||
|
||||
#### Signing Library
|
||||
|
||||
If you want to use the signing support library to create an application, there
|
||||
are two simple mechanisms with cmake + git that facilitate this.
|
||||
|
||||
With either option below, you will have access to a library from the
|
||||
rippled project that you can link to in your own project's CMakeLists.txt, e.g.:
|
||||
|
||||
```
|
||||
target_link_libraries (my-signing-app Ripple::xrpl_core)
|
||||
```
|
||||
|
||||
##### Option 1: git submodules + add_subdirectory
|
||||
|
||||
First, add the rippled repo as a submodule to your project repo:
|
||||
|
||||
```
|
||||
git submodule add -b master https://github.com/ripple/rippled.git vendor/rippled
|
||||
```
|
||||
|
||||
change the `vendor/rippled` path as desired for your repo layout. Furthermore,
|
||||
change the branch name if you want to track a different rippled branch, such
|
||||
as `develop`.
|
||||
|
||||
Second, to bring this submodule into your project, just add the rippled subdirectory:
|
||||
|
||||
```
|
||||
add_subdirectory (vendor/rippled)
|
||||
```
|
||||
|
||||
##### Option 2: installed rippled + find_package
|
||||
|
||||
First, follow the "Optional Installation" instructions above to
|
||||
build and install the desired version of rippled.
|
||||
|
||||
To make use of the installed files, add the following to your CMakeLists.txt file:
|
||||
|
||||
```
|
||||
set (CMAKE_MODULE_PATH /opt/local/lib/cmake/ripple ${CMAKE_MODULE_PATH})
|
||||
find_package(Ripple REQUIRED)
|
||||
```
|
||||
|
||||
change the `/opt/local` module path above to match your chosen installation prefix.
|
||||
|
||||
## Unit Tests (Recommended)
|
||||
|
||||
`rippled` builds a set of unit tests into the server executable. To run these unit
|
||||
tests after building, pass the `--unittest` option to the compiled `rippled`
|
||||
executable. The executable will exit with summary info after running the unit tests.
|
||||
|
||||
|
||||
@@ -1,231 +0,0 @@
|
||||
# macos Build Instructions
|
||||
|
||||
## Important
|
||||
|
||||
We don't recommend macos for rippled production use at this time. Currently, the
|
||||
Ubuntu platform has received the highest level of quality assurance and
|
||||
testing. That said, macos is suitable for many development/test tasks.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
You'll need macos 10.8 or later.
|
||||
|
||||
To clone the source code repository, create branches for inspection or
|
||||
modification, build rippled using clang, and run the system tests you will need
|
||||
these software components:
|
||||
|
||||
* [XCode](https://developer.apple.com/xcode/)
|
||||
* [Homebrew](http://brew.sh/)
|
||||
* [Boost](http://boost.org/)
|
||||
* other misc utilities and libraries installed via homebrew
|
||||
|
||||
## Install Software
|
||||
|
||||
### Install XCode
|
||||
|
||||
If not already installed on your system, download and install XCode using the
|
||||
appstore or by using [this link](https://developer.apple.com/xcode/).
|
||||
|
||||
For more info, see "Step 1: Download and Install the Command Line Tools"
|
||||
[here](http://www.moncefbelyamani.com/how-to-install-xcode-homebrew-git-rvm-ruby-on-mac)
|
||||
|
||||
The command line tools can be installed through the terminal with the command:
|
||||
|
||||
```
|
||||
xcode-select --install
|
||||
```
|
||||
|
||||
### Install Homebrew
|
||||
|
||||
> "[Homebrew](http://brew.sh/) installs the stuff you need that Apple didn’t."
|
||||
|
||||
Open a terminal and type:
|
||||
|
||||
```
|
||||
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
|
||||
```
|
||||
|
||||
For more info, see "Step 2: Install Homebrew"
|
||||
[here](http://www.moncefbelyamani.com/how-to-install-xcode-homebrew-git-rvm-ruby-on-mac#step-2)
|
||||
|
||||
### Install Dependencies Using Homebrew
|
||||
|
||||
`brew` will generally install the latest stable version of any package, which
|
||||
should satisfy the the minimum version requirements for rippled.
|
||||
|
||||
```
|
||||
brew update
|
||||
brew install git cmake pkg-config protobuf openssl ninja
|
||||
```
|
||||
|
||||
### Build Boost
|
||||
|
||||
Boost 1.67 or later is required.
|
||||
|
||||
We want to compile boost with clang/libc++
|
||||
|
||||
Download [a release](https://dl.bintray.com/boostorg/release/1.67.0/source/boost_1_67_0.tar.bz2)
|
||||
|
||||
Extract it to a folder, making note of where, open a terminal, then:
|
||||
|
||||
```
|
||||
./bootstrap.sh
|
||||
./b2 cxxflags="-std=c++14"
|
||||
```
|
||||
|
||||
Create an environment variable `BOOST_ROOT` in one of your `rc` files, pointing
|
||||
to the root of the extracted directory.
|
||||
|
||||
### Dependencies for Building Source Documentation
|
||||
|
||||
Source code documentation is not required for running/debugging rippled. That
|
||||
said, the documentation contains some helpful information about specific
|
||||
components of the application. For more information on how to install and run
|
||||
the necessary components, see [this document](../../docs/README.md)
|
||||
|
||||
## Build
|
||||
|
||||
### Clone the rippled repository
|
||||
|
||||
From a shell:
|
||||
|
||||
```
|
||||
git clone git@github.com:ripple/rippled.git
|
||||
cd rippled
|
||||
```
|
||||
|
||||
For a stable release, choose the `master` branch or one of the tagged releases
|
||||
listed on [GitHub](https://github.com/ripple/rippled/releases GitHub).
|
||||
|
||||
```
|
||||
git checkout master
|
||||
```
|
||||
|
||||
or to test the latest release candidate, choose the `release` branch.
|
||||
|
||||
```
|
||||
git checkout release
|
||||
```
|
||||
|
||||
If you are doing development work and want the latest set of untested
|
||||
features, you can consider using the `develop` branch instead.
|
||||
|
||||
```
|
||||
git checkout develop
|
||||
```
|
||||
|
||||
### Configure Library Paths
|
||||
|
||||
If you didn't persistently set the `BOOST_ROOT` environment variable to the
|
||||
root of the extracted directory above, then you should set it temporarily.
|
||||
|
||||
For example, assuming your username were `Abigail` and you extracted Boost
|
||||
1.67.0 in `/Users/Abigail/Downloads/boost_1_67_0`, you would do for any
|
||||
shell in which you want to build:
|
||||
|
||||
```
|
||||
export BOOST_ROOT=/Users/Abigail/Downloads/boost_1_67_0
|
||||
```
|
||||
|
||||
### Generate and Build
|
||||
|
||||
For simple command line building we recommend using the *Unix Makefile* or
|
||||
*Ninja* generator with cmake. All builds should be done in a separate directory
|
||||
from the source tree root (a subdirectory is fine). For example, from the root
|
||||
of the ripple source tree:
|
||||
|
||||
```
|
||||
mkdir my_build
|
||||
cd my_build
|
||||
```
|
||||
|
||||
followed by:
|
||||
|
||||
```
|
||||
cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE=Debug ..
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```
|
||||
cmake -G "Ninja" -DCMAKE_BUILD_TYPE=Debug ..
|
||||
```
|
||||
|
||||
`CMAKE_BUILD_TYPE` can be changed as desired for `Debug` vs.
|
||||
`Release` builds (all four standard cmake build types are supported).
|
||||
|
||||
Once you have generated the build system, you can run the build via cmake:
|
||||
|
||||
```
|
||||
cmake --build . -- -j 4
|
||||
```
|
||||
|
||||
the `-j` parameter in this example tells the build tool to compile several
|
||||
files in parallel. This value should be chosen roughly based on the number of
|
||||
cores you have available and/or want to use for building.
|
||||
|
||||
When the build completes succesfully, you will have a `rippled` executable in
|
||||
the current directory, which can be used to connect to the network (when
|
||||
properly configured) or to run unit tests.
|
||||
|
||||
If you prefer to have an XCode project to use for building, ask CMake to
|
||||
generate that instead:
|
||||
|
||||
```
|
||||
cmake -GXcode ..
|
||||
```
|
||||
|
||||
After generation succeeds, the xcode project file can be opened and used to
|
||||
build/debug. However, just as with other generators, cmake knows how to build
|
||||
using the xcode project as well:
|
||||
|
||||
```
|
||||
cmake --build . -- -jobs 4
|
||||
```
|
||||
|
||||
This will invoke the `xcodebuild` utility to compile the project. See `xcodebuild
|
||||
--help` for details about build options.
|
||||
|
||||
#### Optional installation
|
||||
|
||||
If you'd like to install the artifacts of the build, we have preliminary
|
||||
support for standard CMake installation targets. We recommend explicitly
|
||||
setting the installation location when configuring, e.g.:
|
||||
|
||||
```
|
||||
cmake -DCMAKE_INSTALL_PREFIX=/opt/local ..
|
||||
```
|
||||
|
||||
(change the destination as desired), and then build the `install` target:
|
||||
|
||||
```
|
||||
cmake --build . --target install -- -jobs 4
|
||||
```
|
||||
|
||||
#### Options During Configuration:
|
||||
|
||||
The CMake file defines a number of configure-time options which can be
|
||||
examined by running `cmake-gui` or `ccmake` to generated the build. In
|
||||
particular, the `unity` option allows you to select between the unity and
|
||||
non-unity builds. `unity` builds are faster to compile since they combine
|
||||
multiple sources into a single compiliation unit - this is the default if you
|
||||
don't specify. `nounity` builds can be helpful for detecting include omissions
|
||||
or for finding other build-related issues, but aren't generally needed for
|
||||
testing and running.
|
||||
|
||||
* `-Dunity=ON` to enable/disable unity builds (defaults to ON)
|
||||
* `-Dassert=ON` to enable asserts
|
||||
* `-Djemalloc=ON` to enable jemalloc support for heap checking
|
||||
* `-Dsan=thread` to enable the thread sanitizer with clang
|
||||
* `-Dsan=address` to enable the address sanitizer with clang
|
||||
|
||||
Several other infrequently used options are available - run `ccmake` or
|
||||
`cmake-gui` for a list of all options.
|
||||
|
||||
## Unit Tests (Recommended)
|
||||
|
||||
`rippled` builds a set of unit tests into the server executable. To run these unit
|
||||
tests after building, pass the `--unittest` option to the compiled `rippled`
|
||||
executable. The executable will exit with summary info after running the unit tests.
|
||||
|
||||
|
||||
2255
CMakeLists.txt
2255
CMakeLists.txt
File diff suppressed because it is too large
Load Diff
1021
CONTRIBUTING.md
Normal file
1021
CONTRIBUTING.md
Normal file
File diff suppressed because it is too large
Load Diff
750
Jenkinsfile
vendored
750
Jenkinsfile
vendored
@@ -1,750 +0,0 @@
|
||||
#!/usr/bin/env groovy
|
||||
|
||||
import groovy.json.JsonOutput
|
||||
import java.text.*
|
||||
|
||||
all_status = [:]
|
||||
commit_id = ''
|
||||
git_fork = 'ripple'
|
||||
git_repo = 'rippled'
|
||||
//
|
||||
// this is not the actual token, but an ID/key into the jenkins
|
||||
// credential store which httpRequest can access.
|
||||
//
|
||||
github_cred = '6bd3f3b9-9a35-493e-8aef-975403c82d3e'
|
||||
//
|
||||
// root API url for our repo (default, overriden below)
|
||||
//
|
||||
github_api = 'https://api.github.com/repos/ripple/rippled'
|
||||
|
||||
try {
|
||||
stage ('Startup Checks') {
|
||||
// here we check the commit author against collaborators
|
||||
// we need a node to do this because readJSON requires
|
||||
// a filesystem (although we just pass it text)
|
||||
node {
|
||||
checkout scm
|
||||
commit_id = getCommitID()
|
||||
//
|
||||
// NOTE this getUserRemoteConfigs call requires a one-time
|
||||
// In-process Script Approval (configure jenkins). We need this
|
||||
// to detect the remote repo to interact with via the github API.
|
||||
//
|
||||
def remote_url = scm.getUserRemoteConfigs()[0].getUrl()
|
||||
if (remote_url) {
|
||||
echo "GIT URL scm: $remote_url"
|
||||
git_fork = remote_url.tokenize('/')[2]
|
||||
git_repo = remote_url.tokenize('/')[3].split('\\.')[0]
|
||||
echo "GIT FORK: $git_fork"
|
||||
echo "GIT REPO: $git_repo"
|
||||
github_api = "https://api.github.com/repos/${git_fork}/${git_repo}"
|
||||
echo "API URL REPO: $github_api"
|
||||
}
|
||||
|
||||
if (env.CHANGE_AUTHOR) {
|
||||
def collab_found = false;
|
||||
//
|
||||
// this means we have some sort of PR , so verify the author
|
||||
//
|
||||
echo "CHANGE AUTHOR ---> $CHANGE_AUTHOR"
|
||||
echo "CHANGE TARGET ---> $CHANGE_TARGET"
|
||||
echo "CHANGE ID ---> $CHANGE_ID"
|
||||
//
|
||||
// check the commit author against collaborators
|
||||
// we need a node to do this because readJSON requires
|
||||
// a filesystem (although we just pass it text)
|
||||
//
|
||||
def response = httpRequest(
|
||||
timeout: 10,
|
||||
authentication: github_cred,
|
||||
url: "${github_api}/collaborators")
|
||||
def collab_data = readJSON(
|
||||
text: response.content)
|
||||
for (collaborator in collab_data) {
|
||||
if (collaborator['login'] == "$CHANGE_AUTHOR") {
|
||||
echo "$CHANGE_AUTHOR is a collaborator!"
|
||||
collab_found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (! collab_found) {
|
||||
echo "$CHANGE_AUTHOR is not a collaborator - waiting for manual approval."
|
||||
try {
|
||||
response = httpRequest(
|
||||
timeout: 10,
|
||||
authentication: github_cred,
|
||||
url: getCommentURL(),
|
||||
contentType: 'APPLICATION_JSON',
|
||||
httpMode: 'POST',
|
||||
requestBody: JsonOutput.toJson([
|
||||
body: """
|
||||
**Thank you** for your submission. It will be reviewed soon and submitted for processing in CI.
|
||||
"""
|
||||
])
|
||||
)
|
||||
}
|
||||
catch (e) {
|
||||
echo 'had a problem interacting with github...comments are probably not updated'
|
||||
}
|
||||
|
||||
try {
|
||||
input (
|
||||
message: "User $CHANGE_AUTHOR has submitted PR #$CHANGE_ID. " +
|
||||
"**Please review** the changes for any CI/security concerns " +
|
||||
"and then decide whether to proceed with building.")
|
||||
}
|
||||
catch(e) {
|
||||
def user = e.getCauses()[0].getUser().toString()
|
||||
all_status['startup'] = [
|
||||
false,
|
||||
'Approval Check',
|
||||
"Build aborted by [${user}]",
|
||||
"[console](${env.BUILD_URL}/console)"]
|
||||
error "Aborted by: [${user}]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage ('Parallel Build') {
|
||||
String[][] variants = [
|
||||
['gcc.Release' ,'-Dassert=ON' ,'MANUAL_TESTS=true' ],
|
||||
['gcc.Debug' ,'-Dcoverage=ON' ],
|
||||
['docs' ],
|
||||
['msvc.Debug' ],
|
||||
['msvc.Debug' ,'' ,'NINJA_BUILD=true' ],
|
||||
['msvc.Debug' ,'-Dunity=OFF' ],
|
||||
['msvc.Release' ],
|
||||
['clang.Debug' ],
|
||||
['clang.Debug' ,'-Dunity=OFF' ],
|
||||
['gcc.Debug' ],
|
||||
['gcc.Debug' ,'-Dunity=OFF' ],
|
||||
['clang.Release' ,'-Dassert=ON' ],
|
||||
['gcc.Release' ,'-Dassert=ON' ],
|
||||
['gcc.Debug' ,'-Dstatic=OFF' ],
|
||||
['gcc.Debug' ,'-Dstatic=OFF -DBUILD_SHARED_LIBS=ON' ],
|
||||
['gcc.Debug' ,'' ,'NINJA_BUILD=true' ],
|
||||
['clang.Debug' ,'-Dunity=OFF -Dsan=address' ,'PARALLEL_TESTS=false', 'DEBUGGER=false'],
|
||||
['clang.Debug' ,'-Dunity=OFF -Dsan=undefined' ,'PARALLEL_TESTS=false'],
|
||||
// TODO - tsan runs currently fail/hang
|
||||
//['clang.Debug' ,'-Dunity=OFF -Dsan=thread' ,'PARALLEL_TESTS=false'],
|
||||
]
|
||||
|
||||
// create a map of all builds
|
||||
// that we want to run. The map
|
||||
// is string keys and node{} object values
|
||||
def builds = [:]
|
||||
for (int index = 0; index < variants.size(); index++) {
|
||||
def bldtype = variants[index][0]
|
||||
def cmake_extra = variants[index].size() > 1 ? variants[index][1] : ''
|
||||
def bldlabel = bldtype + cmake_extra
|
||||
def extra_env = variants[index].size() > 2 ? variants[index][2..-1] : []
|
||||
for (int j = 0; j < extra_env.size(); j++) {
|
||||
bldlabel += "_" + extra_env[j]
|
||||
}
|
||||
bldlabel = bldlabel.replace('-', '_')
|
||||
bldlabel = bldlabel.replace(' ', '')
|
||||
bldlabel = bldlabel.replace('=', '_')
|
||||
|
||||
def compiler = getFirstPart(bldtype)
|
||||
def config = getSecondPart(bldtype)
|
||||
if (compiler == 'docs') {
|
||||
compiler = 'gcc'
|
||||
}
|
||||
def cc =
|
||||
(compiler == 'clang') ? '/opt/llvm-5.0.1/bin/clang' : 'gcc'
|
||||
def cxx =
|
||||
(compiler == 'clang') ? '/opt/llvm-5.0.1/bin/clang++' : 'g++'
|
||||
def ucc = isNoUnity(cmake_extra) ? 'true' : 'false'
|
||||
def node_type =
|
||||
(compiler == 'msvc') ? 'rippled-win' : 'rippled-dev'
|
||||
// the default disposition for parallel test..disabled
|
||||
// for coverage, enabled otherwise. Can still be overridden
|
||||
// by explicitly setting with extra env settings above.
|
||||
def pt = isCoverage(cmake_extra) ? 'false' : 'true'
|
||||
def max_minutes = 25
|
||||
|
||||
def env_vars = [
|
||||
"BUILD_TYPE=${config}",
|
||||
"COMPILER=${compiler}",
|
||||
"PARALLEL_TESTS=${pt}",
|
||||
'BUILD=cmake',
|
||||
"MAX_TIME=${max_minutes}m",
|
||||
"BUILD_DIR=${bldlabel}",
|
||||
"CMAKE_EXTRA_ARGS=-Dwerr=ON ${cmake_extra}",
|
||||
'VERBOSE_BUILD=true']
|
||||
|
||||
builds[bldlabel] = {
|
||||
node(node_type) {
|
||||
checkout scm
|
||||
dir ('build') {
|
||||
deleteDir()
|
||||
}
|
||||
def cdir = upDir(pwd())
|
||||
echo "BASEDIR: ${cdir}"
|
||||
echo "COMPILER: ${compiler}"
|
||||
echo "BUILD_TYPE: ${config}"
|
||||
echo "USE_CC: ${ucc}"
|
||||
if (compiler == 'msvc') {
|
||||
env_vars.addAll([
|
||||
'BOOST_ROOT=c:\\lib\\boost_1_67',
|
||||
'PROJECT_NAME=rippled',
|
||||
'MSBUILDDISABLENODEREUSE=1', // this ENV setting is probably redundant since we also pass /nr:false to msbuild
|
||||
'OPENSSL_ROOT=c:\\OpenSSL-Win64'])
|
||||
}
|
||||
else {
|
||||
env_vars.addAll([
|
||||
'NINJA_BUILD=false',
|
||||
"CCACHE_BASEDIR=${cdir}",
|
||||
'PLANTUML_JAR=/opt/plantuml/plantuml.jar',
|
||||
'APP_ARGS=--unittest-ipv6',
|
||||
'CCACHE_NOHASHDIR=true',
|
||||
"CC=${cc}",
|
||||
"CXX=${cxx}",
|
||||
'LCOV_ROOT=""',
|
||||
'PATH+CMAKE_BIN=/opt/local/cmake',
|
||||
'GDB_ROOT=/opt/local/gdb',
|
||||
'BOOST_ROOT=/opt/local/boost_1_67_0',
|
||||
"USE_CCACHE=${ucc}"])
|
||||
}
|
||||
|
||||
if (extra_env.size() > 0) {
|
||||
env_vars.addAll(extra_env)
|
||||
}
|
||||
|
||||
withCredentials(
|
||||
[string(
|
||||
credentialsId: 'RIPPLED_CODECOV_TOKEN',
|
||||
variable: 'CODECOV_TOKEN')])
|
||||
{
|
||||
withEnv(env_vars) {
|
||||
myStage(bldlabel)
|
||||
try {
|
||||
timeout(
|
||||
time: max_minutes * 2,
|
||||
units: 'MINUTES')
|
||||
{
|
||||
if (compiler == 'msvc') {
|
||||
powershell "Remove-Item -Path \"${bldlabel}.txt\" -Force -ErrorAction Ignore"
|
||||
// we capture stdout to variable because I could
|
||||
// not figure out how to make powershell redirect internally
|
||||
output = powershell (
|
||||
returnStdout: true,
|
||||
script: windowsBuildCmd())
|
||||
// if the powershell command fails (has nonzero exit)
|
||||
// then the command above throws, we don't get our output,
|
||||
// and we never create this output file.
|
||||
// SEE https://issues.jenkins-ci.org/browse/JENKINS-44930
|
||||
// Alternatively, figure out how to reliably redirect
|
||||
// all output above to a file (Start/Stop transcript does not work)
|
||||
writeFile(
|
||||
file: "${bldlabel}.txt",
|
||||
text: output)
|
||||
}
|
||||
else {
|
||||
sh "rm -fv ${bldlabel}.txt"
|
||||
// execute the bld command in a redirecting shell
|
||||
// to capture output
|
||||
sh redhatBuildCmd(bldlabel)
|
||||
}
|
||||
}
|
||||
}
|
||||
finally {
|
||||
if (bldtype == 'docs') {
|
||||
publishHTML(
|
||||
allowMissing: true,
|
||||
alwaysLinkToLastBuild: false,
|
||||
keepAll: true,
|
||||
reportName: 'Doxygen',
|
||||
reportDir: 'build/docs/html_doc',
|
||||
reportFiles: 'index.html')
|
||||
}
|
||||
def envs = ''
|
||||
for (int j = 0; j < extra_env.size(); j++) {
|
||||
envs += ", <br/>" + extra_env[j]
|
||||
}
|
||||
def cmake_txt = cmake_extra
|
||||
if (cmake_txt != '') {
|
||||
cmake_txt = " <br/>" + cmake_txt
|
||||
}
|
||||
def st = reportStatus(bldlabel, bldtype + cmake_txt + envs, env.BUILD_URL)
|
||||
lock('rippled_dev_status') {
|
||||
all_status[bldlabel] = st
|
||||
}
|
||||
} //try-catch-finally
|
||||
} //withEnv
|
||||
} //withCredentials
|
||||
} //node
|
||||
} //builds item
|
||||
} //for variants
|
||||
|
||||
// Also add a single build job for doing the RPM build
|
||||
// on a docker node
|
||||
builds['rpm'] = {
|
||||
node('docker') {
|
||||
def bldlabel = 'rpm'
|
||||
def remote =
|
||||
(git_fork == 'ripple') ? 'origin' : git_fork
|
||||
|
||||
withCredentials(
|
||||
[string(
|
||||
credentialsId: 'RIPPLED_RPM_ROLE_ID',
|
||||
variable: 'ROLE_ID')])
|
||||
{
|
||||
withEnv([
|
||||
'docker_image=artifactory.ops.ripple.com:6555/rippled-rpm-builder:latest',
|
||||
"git_commit=${commit_id}",
|
||||
"git_remote=${remote}",
|
||||
"rpm_release=${env.BUILD_ID}"])
|
||||
{
|
||||
try {
|
||||
sh "rm -fv ${bldlabel}.txt"
|
||||
sh "if [ -d rpm-out ]; then rm -rf rpm-out; fi"
|
||||
sh rpmBuildCmd(bldlabel)
|
||||
}
|
||||
finally {
|
||||
def st = reportStatus(bldlabel, bldlabel, env.BUILD_URL)
|
||||
lock('rippled_dev_status') {
|
||||
all_status[bldlabel] = st
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: 'rpm-out/*.rpm',
|
||||
allowEmptyArchive: true)
|
||||
}
|
||||
} //withEnv
|
||||
} //withCredentials
|
||||
} //node
|
||||
}
|
||||
|
||||
// this actually executes all the builds we just defined
|
||||
// above, in parallel as slaves are available
|
||||
parallel builds
|
||||
}
|
||||
}
|
||||
finally {
|
||||
// anything here should run always...
|
||||
stage ('Final Status') {
|
||||
node {
|
||||
def results = makeResultText()
|
||||
try {
|
||||
def res = getCommentID() //get array return b/c jenkins does not allow multiple direct return/assign
|
||||
def comment_id = res[0]
|
||||
def url_comment = res[1]
|
||||
def mode = 'PATCH'
|
||||
if (comment_id == 0) {
|
||||
echo 'no existing status comment found'
|
||||
mode = 'POST'
|
||||
}
|
||||
|
||||
def body = JsonOutput.toJson([
|
||||
body: results
|
||||
])
|
||||
|
||||
response = httpRequest(
|
||||
timeout: 10,
|
||||
authentication: github_cred,
|
||||
url: url_comment,
|
||||
contentType: 'APPLICATION_JSON',
|
||||
httpMode: mode,
|
||||
requestBody: body)
|
||||
}
|
||||
catch (e) {
|
||||
echo 'had a problem interacting with github...status is probably not updated'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------
|
||||
// util functions
|
||||
// ---------------
|
||||
def myStage(name) {
|
||||
echo """
|
||||
+++++++++++++++++++++++++++++++++++++++++
|
||||
>> building ${name}
|
||||
+++++++++++++++++++++++++++++++++++++++++
|
||||
"""
|
||||
}
|
||||
|
||||
def printGitInfo(id, log) {
|
||||
echo """
|
||||
+++++++++++++++++++++++++++++++++++++++++
|
||||
>> Building commit ID ${id}
|
||||
>>
|
||||
${log}
|
||||
+++++++++++++++++++++++++++++++++++++++++
|
||||
"""
|
||||
}
|
||||
|
||||
def makeResultText () {
|
||||
def start_time = new Date()
|
||||
def sdf = new SimpleDateFormat('yyyyMMdd - HH:mm:ss')
|
||||
def datestamp = sdf.format(start_time)
|
||||
|
||||
def results = """
|
||||
## Jenkins Build Summary
|
||||
|
||||
Built from [this commit](https://github.com/${git_fork}/${git_repo}/commit/${commit_id})
|
||||
|
||||
Built at __${datestamp}__
|
||||
|
||||
### Test Results
|
||||
|
||||
Build Type | Log | Result | Status
|
||||
---------- | --- | ------ | ------
|
||||
"""
|
||||
for ( e in all_status) {
|
||||
results += e.value[1] + ' | ' + e.value[3] + ' | ' + e.value[2] + ' | ' +
|
||||
(e.value[0] ? 'PASS :white_check_mark: ' : 'FAIL :red_circle: ') + '\n'
|
||||
}
|
||||
results += '\n'
|
||||
echo 'FINAL BUILD RESULTS'
|
||||
echo results
|
||||
results
|
||||
}
|
||||
|
||||
def getCommentURL () {
|
||||
def url_c = ''
|
||||
if (env.CHANGE_ID && env.CHANGE_ID ==~ /\d+/) {
|
||||
//
|
||||
// CHANGE_ID indicates we are building a PR
|
||||
// find PR comments
|
||||
//
|
||||
def resp = httpRequest(
|
||||
timeout: 10,
|
||||
authentication: github_cred,
|
||||
url: "${github_api}/pulls/$CHANGE_ID")
|
||||
def result = readJSON(text: resp.content)
|
||||
//
|
||||
// follow issue comments link
|
||||
//
|
||||
url_c = result['_links']['issue']['href'] + '/comments'
|
||||
}
|
||||
else {
|
||||
//
|
||||
// if not a PR, just search comments for our commit ID
|
||||
//
|
||||
url_c =
|
||||
"${github_api}/commits/${commit_id}/comments"
|
||||
}
|
||||
url_c
|
||||
}
|
||||
|
||||
def getCommentID () {
|
||||
def url_c = getCommentURL()
|
||||
def response = httpRequest(
|
||||
timeout: 10,
|
||||
authentication: github_cred,
|
||||
url: url_c)
|
||||
def data = readJSON(text: response.content)
|
||||
def comment_id = 0
|
||||
// see if we can find and existing comment here with
|
||||
// a heading that matches ours...
|
||||
for (comment in data) {
|
||||
if (comment['body'] =~ /(?m)^##\s+Jenkins Build/) {
|
||||
comment_id = comment['id']
|
||||
echo "existing status comment ${comment_id} found"
|
||||
url_c = comment['url']
|
||||
break;
|
||||
}
|
||||
}
|
||||
[comment_id, url_c]
|
||||
}
|
||||
|
||||
def getCommitID () {
|
||||
def cid = sh (
|
||||
script: 'git rev-parse HEAD',
|
||||
returnStdout: true)
|
||||
cid = cid.trim()
|
||||
echo "commit ID is ${cid}"
|
||||
commit_log = sh (
|
||||
script: "git show --name-status ${cid}",
|
||||
returnStdout: true)
|
||||
printGitInfo (cid, commit_log)
|
||||
cid
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
def getResults(text, label) {
|
||||
// example:
|
||||
/// 194.5s, 154 suites, 948 cases, 360485 tests total, 0 failures
|
||||
// or build log format:
|
||||
// [msvc.release] 71.3s, 162 suites, 995 cases, 318901 tests total, 1 failure
|
||||
def matcher =
|
||||
text == '' ?
|
||||
manager.getLogMatcher(/\[${label}\].+?(\d+) case[s]?, (\d+) test[s]? total, (\d+) (failure(s?))/) :
|
||||
text =~ /(\d+) case[s]?, (\d+) test[s]? total, (\d+) (failure(s?))/
|
||||
matcher ? matcher[0][1] + ' cases, ' + matcher[0][3] + ' failed' : 'no test results'
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
def getFailures(text, label) {
|
||||
// [see above for format]
|
||||
def matcher =
|
||||
text == '' ?
|
||||
manager.getLogMatcher(/\[${label}\].+?(\d+) test[s]? total, (\d+) (failure(s?))/) :
|
||||
text =~ /(\d+) test[s]? total, (\d+) (failure(s?))/
|
||||
// if we didn't match, then return 1 since something is
|
||||
// probably wrong, e.g. maybe the build failed...
|
||||
matcher ? matcher[0][2] as Integer : 1i
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
def getTime(text, label) {
|
||||
// look for text following a label 'real' for
|
||||
// wallclock time. Some `time`s report fractional
|
||||
// seconds and we can omit those in what we report
|
||||
def matcher =
|
||||
text == '' ?
|
||||
manager.getLogMatcher(/(?m)^\[${label}\]\s+real\s+(.+)\.(\d+?)[s]?/) :
|
||||
text =~ /(?m)^real\s+(.+)\.(\d+?)[s]?/
|
||||
if (matcher) {
|
||||
return matcher[0][1] + 's'
|
||||
}
|
||||
|
||||
// alternatively, look for powershell elapsed time
|
||||
// format, e.g. :
|
||||
// TotalSeconds : 523.2140529
|
||||
def matcher2 =
|
||||
text == '' ?
|
||||
manager.getLogMatcher(/(?m)^\[${label}\]\s+TotalSeconds\s+:\s+(\d+)\.(\d+?)?/) :
|
||||
text =~ /(?m)^TotalSeconds\s+:\s+(\d+)\.(\d+?)?/
|
||||
matcher2 ? matcher2[0][1] + 's' : 'n/a'
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
def getFirstPart(bld) {
|
||||
def matcher = bld =~ /^(.+?)\.(.+)$/
|
||||
matcher ? matcher[0][1] : bld
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
def isNoUnity(bld) {
|
||||
def matcher = bld =~ /-Dunity=(off|OFF)/
|
||||
matcher ? true : false
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
def isCoverage(bld) {
|
||||
def matcher = bld =~ /-Dcoverage=(on|ON)/
|
||||
matcher ? true : false
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
def getSecondPart(bld) {
|
||||
def matcher = bld =~ /^(.+?)\.(.+)$/
|
||||
matcher ? matcher[0][2] : bld
|
||||
}
|
||||
|
||||
// because I can't seem to find path manipulation
|
||||
// functions in groovy....
|
||||
@NonCPS
|
||||
def upDir(path) {
|
||||
def matcher = path =~ /^(.+)\/(.+?)/
|
||||
matcher ? matcher[0][1] : path
|
||||
}
|
||||
|
||||
// the shell command used for building on redhat
|
||||
def redhatBuildCmd(bldlabel) {
|
||||
'''\
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
log_file=''' + "${bldlabel}.txt" + '''
|
||||
exec 3>&1 1>>${log_file} 2>&1
|
||||
ccache -s
|
||||
source /opt/rh/devtoolset-7/enable
|
||||
/usr/bin/time -p ./bin/ci/ubuntu/build-and-test.sh 2>&1
|
||||
ccache -s
|
||||
'''
|
||||
}
|
||||
|
||||
// the powershell command used for building an RPM
|
||||
def windowsBuildCmd() {
|
||||
'''
|
||||
# Enable streams 3-6
|
||||
$WarningPreference = 'Continue'
|
||||
$VerbosePreference = 'Continue'
|
||||
$DebugPreference = 'Continue'
|
||||
$InformationPreference = 'Continue'
|
||||
|
||||
Invoke-BatchFile "${env:ProgramFiles(x86)}\\Microsoft Visual Studio\\2017\\Community\\VC\\Auxiliary\\Build\\vcvarsall.bat" x86_amd64
|
||||
Get-ChildItem env:* | Sort-Object name
|
||||
cl
|
||||
cmake --version
|
||||
New-Item -ItemType Directory -Force -Path "build/$env:BUILD_DIR" -ErrorAction Stop
|
||||
$sw = [Diagnostics.Stopwatch]::StartNew()
|
||||
try {
|
||||
Push-Location "build/$env:BUILD_DIR"
|
||||
if ($env:NINJA_BUILD -eq "true") {
|
||||
Invoke-Expression "& cmake -G`"Ninja`" -DCMAKE_BUILD_TYPE=$env:BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=ON $env:CMAKE_EXTRA_ARGS ../.."
|
||||
}
|
||||
else {
|
||||
Invoke-Expression "& cmake -G`"Visual Studio 15 2017 Win64`" -DCMAKE_VERBOSE_MAKEFILE=ON $env:CMAKE_EXTRA_ARGS ../.."
|
||||
}
|
||||
if ($LastExitCode -ne 0) { throw "CMake failed" }
|
||||
|
||||
## as of 01/2018, DO NOT USE cmake to run the actual build step. for some
|
||||
## reason, cmake spawning the build under jenkins causes MSBUILD/ninja to
|
||||
## get stuck at the end of the build. Perhaps cmake is spawning
|
||||
## incorrectly or failing to pass certain params
|
||||
|
||||
if ($env:NINJA_BUILD -eq "true") {
|
||||
ninja -j $env:NUMBER_OF_PROCESSORS -v
|
||||
}
|
||||
else {
|
||||
msbuild /fl /m /nr:false /p:Configuration="$env:BUILD_TYPE" /p:Platform=x64 /p:GenerateFullPaths=True /v:normal /nologo /clp:"ShowCommandLine;DisableConsoleColor" "$env:PROJECT_NAME.vcxproj"
|
||||
}
|
||||
if ($LastExitCode -ne 0) { throw "CMake build failed" }
|
||||
|
||||
$exe = "./$env:BUILD_TYPE/$env:PROJECT_NAME"
|
||||
if ($env:NINJA_BUILD -eq "true") {
|
||||
$exe = "./$env:PROJECT_NAME"
|
||||
}
|
||||
"Exe is at $exe"
|
||||
$params = '--unittest', '--quiet', '--unittest-log'
|
||||
if ($env:PARALLEL_TESTS -eq "true") {
|
||||
$params = $params += "--unittest-jobs=$env:NUMBER_OF_PROCESSORS"
|
||||
}
|
||||
& $exe $params
|
||||
if ($LastExitCode -ne 0) { throw "Unit tests failed" }
|
||||
}
|
||||
catch {
|
||||
throw
|
||||
}
|
||||
finally {
|
||||
$sw.Stop()
|
||||
$sw.Elapsed
|
||||
Pop-Location
|
||||
}
|
||||
'''
|
||||
}
|
||||
|
||||
// the shell command used for building an RPM
|
||||
def rpmBuildCmd(bldlabel) {
|
||||
'''\
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
log_file=''' + "${bldlabel}.txt" + '''
|
||||
exec 3>&1 1>>${log_file} 2>&1
|
||||
|
||||
# Vault Steps
|
||||
SECRET_ID=$(cat /.vault/rippled-build-role/secret-id)
|
||||
export VAULT_TOKEN=$(/usr/local/ripple/ops-toolbox/vault/vault_approle_auth -r ${ROLE_ID} -s ${SECRET_ID} -t)
|
||||
/usr/local/ripple/ops-toolbox/vault/vault_get_sts_token.py -r rippled-build-role
|
||||
|
||||
mkdir -p rpm-out
|
||||
|
||||
docker pull "${docker_image}"
|
||||
|
||||
echo "Running build container"
|
||||
|
||||
docker run --rm \
|
||||
-v $PWD/rpm-out:/opt/rippled-rpm/out \
|
||||
-e "GIT_COMMIT=$git_commit" \
|
||||
-e "GIT_REMOTE=$git_remote" \
|
||||
-e "RPM_RELEASE=$rpm_release" \
|
||||
"${docker_image}"
|
||||
|
||||
. rpm-out/build_vars
|
||||
|
||||
cd rpm-out
|
||||
tar xvf rippled-*.tar.gz
|
||||
ls -la *.rpm
|
||||
#################################
|
||||
## for now we don't want the src
|
||||
## and debugsource rpms for testing
|
||||
## or archiving...
|
||||
#################################
|
||||
rm rippled-debugsource*.rpm
|
||||
rm *.src.rpm
|
||||
mkdir rpm-main
|
||||
cp *.rpm rpm-main
|
||||
cd rpm-main
|
||||
cd ../..
|
||||
|
||||
cat > test_rpm.sh << "EOL"
|
||||
#!/bin/bash
|
||||
|
||||
function error {
|
||||
echo $1
|
||||
exit 1
|
||||
}
|
||||
|
||||
yum install -y yum-utils openssl-static zlib-static
|
||||
rpm -i /opt/rippled-rpm/*.rpm
|
||||
rc=$?; if [[ $rc != 0 ]]; then
|
||||
error "error installing rpms"
|
||||
fi
|
||||
|
||||
/opt/ripple/bin/rippled --unittest
|
||||
rc=$?; if [[ $rc != 0 ]]; then
|
||||
error "rippled --unittest failed"
|
||||
fi
|
||||
|
||||
/opt/ripple/bin/validator-keys --unittest
|
||||
rc=$?; if [[ $rc != 0 ]]; then
|
||||
error "validator-keys --unittest failed"
|
||||
fi
|
||||
|
||||
EOL
|
||||
|
||||
chmod +x test_rpm.sh
|
||||
|
||||
echo "Running test container"
|
||||
|
||||
docker run --rm \
|
||||
-v $PWD/rpm-out/rpm-main:/opt/rippled-rpm \
|
||||
-v $PWD:/opt/rippled --entrypoint /opt/rippled/test_rpm.sh \
|
||||
centos:latest
|
||||
'''
|
||||
}
|
||||
|
||||
// post processing step after each build:
|
||||
// * archives the log file
|
||||
// * adds short description/status to build status
|
||||
// * returns an array of result info to add to the all_build summary
|
||||
def reportStatus(label, type, bldurl) {
|
||||
def outstr = ''
|
||||
def loglink = "[console](${bldurl}/console)"
|
||||
def logfile = "${label}.txt"
|
||||
if ( fileExists(logfile) ) {
|
||||
archiveArtifacts( artifacts: logfile )
|
||||
outstr = readFile(logfile)
|
||||
loglink = "[logfile](${bldurl}/artifact/${logfile})"
|
||||
}
|
||||
def st = getResults(outstr, label)
|
||||
def time = getTime(outstr, label)
|
||||
def fail_count = getFailures(outstr, label)
|
||||
outstr = null
|
||||
def txtcolor =
|
||||
fail_count == 0 ? 'DarkGreen' : 'Crimson'
|
||||
def shortbld = label
|
||||
// this is just an attempt to shorten the
|
||||
// summary text label to the point of absurdity..
|
||||
shortbld = shortbld.replace('Debug', 'dbg')
|
||||
shortbld = shortbld.replace('Release', 'rel')
|
||||
shortbld = shortbld.replace('true', 'Y')
|
||||
shortbld = shortbld.replace('false', 'N')
|
||||
shortbld = shortbld.replace('Dcoverage', 'cov')
|
||||
shortbld = shortbld.replace('Dassert', 'asrt')
|
||||
shortbld = shortbld.replace('Dunity', 'unty')
|
||||
shortbld = shortbld.replace('Dsan=address', 'asan')
|
||||
shortbld = shortbld.replace('Dsan=thread', 'tsan')
|
||||
shortbld = shortbld.replace('Dsan=undefined', 'ubsan')
|
||||
shortbld = shortbld.replace('PARALLEL_TEST', 'PL')
|
||||
shortbld = shortbld.replace('MANUAL_TESTS', 'MAN')
|
||||
shortbld = shortbld.replace('NINJA_BUILD', 'ninja')
|
||||
shortbld = shortbld.replace('DEBUGGER', 'gdb')
|
||||
shortbld = shortbld.replace('ON', 'Y')
|
||||
shortbld = shortbld.replace('OFF', 'N')
|
||||
manager.addShortText(
|
||||
"${shortbld}: ${st}, t: ${time}",
|
||||
txtcolor,
|
||||
'white',
|
||||
'0px',
|
||||
'white')
|
||||
[fail_count == 0, type, "${st}, t: ${time}", loglink]
|
||||
}
|
||||
|
||||
17
LICENSE.md
Normal file
17
LICENSE.md
Normal file
@@ -0,0 +1,17 @@
|
||||
ISC License
|
||||
|
||||
Copyright (c) 2011, Arthur Britto, David Schwartz, Jed McCaleb, Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant.
|
||||
Copyright (c) 2012-2020, the XRP Ledger developers.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
57
README.md
57
README.md
@@ -1,36 +1,48 @@
|
||||
[](https://codecov.io/gh/XRPLF/rippled)
|
||||
|
||||
# The XRP Ledger
|
||||
|
||||
The XRP Ledger is a decentralized cryptographic ledger powered by a network of peer-to-peer servers. The XRP Ledger uses a novel Byzantine Fault Tolerant consensus algorithm to settle and record transactions in a secure distributed database without a central operator.
|
||||
The [XRP Ledger](https://xrpl.org/) is a decentralized cryptographic ledger powered by a network of peer-to-peer nodes. The XRP Ledger uses a novel Byzantine Fault Tolerant consensus algorithm to settle and record transactions in a secure distributed database without a central operator.
|
||||
|
||||
## XRP
|
||||
XRP is a public, counterparty-less asset native to the XRP Ledger, and is designed to bridge the many different currencies in use worldwide. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP. Its creators gifted 80 billion XRP to a company, now called [Ripple](https://ripple.com/), to develop the XRP Ledger and its ecosystem. Ripple uses XRP the help build the Internet of Value, ushering in a world in which money moves as fast and efficiently as information does today.
|
||||
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free asset native to the XRP Ledger, and is designed to bridge the many different currencies in use worldwide. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
|
||||
|
||||
## `rippled`
|
||||
The server software that powers the XRP Ledger is called `rippled` and is available in this repository under the permissive [ISC open-source license](LICENSE). The `rippled` server is written primarily in C++ and runs on a variety of platforms.
|
||||
## rippled
|
||||
The server software that powers the XRP Ledger is called `rippled` and is available in this repository under the permissive [ISC open-source license](LICENSE.md). The `rippled` server software is written primarily in C++ and runs on a variety of platforms. The `rippled` server software can run in several modes depending on its [configuration](https://xrpl.org/rippled-server-modes.html).
|
||||
|
||||
If you are interested in running an **API Server** (including a **Full History Server**), take a look at [Clio](https://github.com/XRPLF/clio). (rippled Reporting Mode has been replaced by Clio.)
|
||||
|
||||
# Key Features of the XRP Ledger
|
||||
### Build from Source
|
||||
|
||||
* [Read the build instructions in `BUILD.md`](BUILD.md)
|
||||
* If you encounter any issues, please [open an issue](https://github.com/XRPLF/rippled/issues)
|
||||
|
||||
## Key Features of the XRP Ledger
|
||||
|
||||
- **[Censorship-Resistant Transaction Processing][]:** No single party decides which transactions succeed or fail, and no one can "roll back" a transaction after it completes. As long as those who choose to participate in the network keep it healthy, they can settle transactions in seconds.
|
||||
- **[Fast, Efficient Consensus Algorithm][]:** The XRP Ledger's consensus algorithm settles transactions in 4 to 5 seconds, processing at a throughput of up to 1500 transactions per second. These properties put XRP at least an order of magnitude ahead of other top digital assets.
|
||||
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. (Each XRP is subdivisible down to 6 decimal places, for a grand total of 100 quintillion _drops_ of XRP.) The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction costs.
|
||||
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction costs.
|
||||
- **[Responsible Software Governance][]:** A team of full-time, world-class developers at Ripple maintain and continually improve the XRP Ledger's underlying software with contributions from the open-source community. Ripple acts as a steward for the technology and an advocate for its interests, and builds constructive relationships with governments and financial institutions worldwide.
|
||||
- **[Secure, Adaptable Cryptography][]:** The XRP Ledger relies on industry standard digital signature systems like ECDSA (the same scheme used by Bitcoin) but also supports modern, efficient algorithms like Ed25519. The extensible nature of the XRP Ledger's software makes it possible to add and disable algorithms as the state of the art in cryptography advances.
|
||||
- **[Modern Features for Smart Contracts][]:** Features like Escrow, Checks, and Payment Channels support cutting-edge financial applications including the [Interledger Protocol](https://interledger.org/). This toolbox of advanced features comes with safety features like a process for amending the network and separate checks against invariant constraints.
|
||||
- **[On-Ledger Decentralized Exchange][]:** In addition to all the features that make XRP useful on its own, the XRP Ledger also has a fully-functional accounting system for tracking and trading obligations denominated in any way users want, and an exchange built into the protocol. The XRP Ledger can settle long, cross-currency payment paths and exchanges of multiple currencies in atomic transactions, bridging gaps of trust with XRP.
|
||||
|
||||
[Censorship-Resistant Transaction Processing]: https://developers.ripple.com/xrp-ledger-overview.html#censorship-resistant-transaction-processing
|
||||
[Fast, Efficient Consensus Algorithm]: https://developers.ripple.com/xrp-ledger-overview.html#fast-efficient-consensus-algorithm
|
||||
[Finite XRP Supply]: https://developers.ripple.com/xrp-ledger-overview.html#finite-xrp-supply
|
||||
[Responsible Software Governance]: https://developers.ripple.com/xrp-ledger-overview.html#responsible-software-governance
|
||||
[Secure, Adaptable Cryptography]: https://developers.ripple.com/xrp-ledger-overview.html#secure-adaptable-cryptography
|
||||
[Modern Features for Smart Contracts]: https://developers.ripple.com/xrp-ledger-overview.html#modern-features-for-smart-contracts
|
||||
[On-Ledger Decentralized Exchange]: https://developers.ripple.com/xrp-ledger-overview.html#on-ledger-decentralized-exchange
|
||||
[Censorship-Resistant Transaction Processing]: https://xrpl.org/xrp-ledger-overview.html#censorship-resistant-transaction-processing
|
||||
[Fast, Efficient Consensus Algorithm]: https://xrpl.org/xrp-ledger-overview.html#fast-efficient-consensus-algorithm
|
||||
[Finite XRP Supply]: https://xrpl.org/xrp-ledger-overview.html#finite-xrp-supply
|
||||
[Responsible Software Governance]: https://xrpl.org/xrp-ledger-overview.html#responsible-software-governance
|
||||
[Secure, Adaptable Cryptography]: https://xrpl.org/xrp-ledger-overview.html#secure-adaptable-cryptography
|
||||
[Modern Features for Smart Contracts]: https://xrpl.org/xrp-ledger-overview.html#modern-features-for-smart-contracts
|
||||
[On-Ledger Decentralized Exchange]: https://xrpl.org/xrp-ledger-overview.html#on-ledger-decentralized-exchange
|
||||
|
||||
|
||||
## Source Code
|
||||
[](https://travis-ci.org/ripple/rippled)
|
||||
[](https://codecov.io/gh/ripple/rippled)
|
||||
|
||||
Here are some good places to start learning the source code:
|
||||
|
||||
- Read the markdown files in the source tree: `src/ripple/**/*.md`.
|
||||
- Read [the levelization document](./Builds/levelization) to get an idea of the internal dependency graph.
|
||||
- In the big picture, the `main` function constructs an `ApplicationImp` object, which implements the `Application` virtual interface. Almost every component in the application takes an `Application&` parameter in its constructor, typically named `app` and stored as a member variable `app_`. This allows most components to depend on any other component.
|
||||
|
||||
### Repository Contents
|
||||
|
||||
@@ -46,11 +58,14 @@ Some of the directories under `src` are external repositories included using
|
||||
git-subtree. See those directories' README files for more details.
|
||||
|
||||
|
||||
## Additional Documentation
|
||||
|
||||
* [XRP Ledger Dev Portal](https://xrpl.org/)
|
||||
* [Setup and Installation](https://xrpl.org/install-rippled.html)
|
||||
* [Source Documentation (Doxygen)](https://xrplf.github.io/rippled/)
|
||||
|
||||
## See Also
|
||||
|
||||
* [XRP Ledger Dev Portal](https://developers.ripple.com/)
|
||||
* [XRP News](https://ripple.com/category/xrp/)
|
||||
* [Setup and Installation](https://developers.ripple.com/install-rippled.html)
|
||||
|
||||
To learn about how Ripple is transforming global payments, visit
|
||||
<https://ripple.com/contact/>.
|
||||
* [Clio API Server for the XRP Ledger](https://github.com/XRPLF/clio)
|
||||
* [Mailing List for Release Announcements](https://groups.google.com/g/ripple-server)
|
||||
* [Learn more about the XRP Ledger (YouTube)](https://www.youtube.com/playlist?list=PLJQ55Tj1hIVZtJ_JdTvSum2qMTsedWkNi)
|
||||
|
||||
2797
RELEASENOTES.md
2797
RELEASENOTES.md
File diff suppressed because it is too large
Load Diff
149
SECURITY.md
Normal file
149
SECURITY.md
Normal file
@@ -0,0 +1,149 @@
|
||||
### Operating an XRP Ledger server securely
|
||||
|
||||
For more details on operating an XRP Ledger server securely, please visit https://xrpl.org/manage-the-rippled-server.html.
|
||||
|
||||
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Software constantly evolves. In order to focus resources, we only generally only accept vulnerability reports that affect recent and current versions of the software. We always accept reports for issues present in the **master**, **release** or **develop** branches, and with proposed, [open pull requests](https://github.com/ripple/rippled/pulls).
|
||||
|
||||
## Identifying and Reporting Vulnerabilities
|
||||
|
||||
We take security seriously and we do our best to ensure that all our releases are bug free. But we aren't perfect and sometimes things will slip through.
|
||||
|
||||
### Responsible Investigation
|
||||
|
||||
We urge you to examine our code carefully and responsibly, and to disclose any issues that you identify in a responsible fashion.
|
||||
|
||||
Responsible investigation includes, but isn't limited to, the following:
|
||||
|
||||
- Not performing tests on the main network. If testing is necessary, use the [Testnet or Devnet](https://xrpl.org/xrp-testnet-faucet.html).
|
||||
- Not targeting physical security measures, or attempting to use social engineering, spam, distributed denial of service (DDOS) attacks, etc.
|
||||
- Investigating bugs in a way that makes a reasonable, good faith effort not to be disruptive or harmful to the XRP Ledger and the broader ecosystem.
|
||||
|
||||
### Responsible Disclosure
|
||||
|
||||
If you discover a vulnerability or potential threat, or if you _think_
|
||||
you have, please reach out by dropping an email using the contact
|
||||
information below.
|
||||
|
||||
Your report should include the following:
|
||||
|
||||
- Your contact information (typically, an email address);
|
||||
- The description of the vulnerability;
|
||||
- The attack scenario (if any);
|
||||
- The steps to reproduce the vulnerability;
|
||||
- Any other relevant details or artifacts, including code, scripts or patches.
|
||||
|
||||
In your email, please describe the issue or potential threat. If possible, include a "repro" (code that can reproduce the issue) or describe the best way to reproduce and replicate the issue. Please make your report as detailed and comprehensive as possible.
|
||||
|
||||
For more information on responsible disclosure, please read this [Wikipedia article](https://en.wikipedia.org/wiki/Responsible_disclosure).
|
||||
|
||||
## Report Handling Process
|
||||
|
||||
Please report the bug directly to us and limit further disclosure. If you want to prove that you knew the bug as of a given time, consider using a cryptographic precommitment: hash the content of your report and publish the hash on a medium of your choice (e.g. on Twitter or as a memo in a transaction) as "proof" that you had written the text at a given point in time.
|
||||
|
||||
Once we receive a report, we:
|
||||
|
||||
1. Assign two people to independently evaluate the report;
|
||||
2. Consider their recommendations;
|
||||
3. If action is necessary, formulate a plan to address the issue;
|
||||
4. Communicate privately with the reporter to explain our plan.
|
||||
5. Prepare, test and release a version which fixes the issue; and
|
||||
6. Announce the vulnerability publicly.
|
||||
|
||||
We will triage and respond to your disclosure within 24 hours. Beyond that, we will work to analyze the issue in more detail, formulate, develop and test a fix.
|
||||
|
||||
While we commit to responding with 24 hours of your initial report with our triage assessment, we cannot guarantee a response time for the remaining steps. We will communicate with you throughout this process, letting you know where we are and keeping you updated on the timeframe.
|
||||
|
||||
## Bug Bounty Program
|
||||
|
||||
[Ripple](https://ripple.com) is generously sponsoring a bug bounty program for vulnerabilities in [`rippled`](https://github.com/XRPLF/rippled) (and other related projects, like [`xrpl.js`](https://github.com/XRPLF/xrpl.js), [`xrpl-py`](https://github.com/XRPLF/xrpl-py), [`xrpl4j`](https://github.com/XRPLF/xrpl4j)).
|
||||
|
||||
This program allows us to recognize and reward individuals or groups that identify and report bugs. In summary, in order to qualify for a bounty, the bug must be:
|
||||
|
||||
1. **In scope**. Only bugs in software under the scope of the program qualify. Currently, that means `rippled`, `xrpl.js`, `xrpl-py`, `xrpl4j`.
|
||||
2. **Relevant**. A security issue, posing a danger to user funds, privacy, or the operation of the XRP Ledger.
|
||||
3. **Original and previously unknown**. Bugs that are already known and discussed in public do not qualify. Previously reported bugs, even if publicly unknown, are not eligible.
|
||||
4. **Specific**. We welcome general security advice or recommendations, but we cannot pay bounties for that.
|
||||
5. **Fixable**. There has to be something we can do to permanently fix the problem. Note that bugs in other people’s software may still qualify in some cases. For example, if you find a bug in a library that we use which can compromise the security of software that is in scope and we can get it fixed, you may qualify for a bounty.
|
||||
6. **Unused**. If you use the exploit to attack the XRP Ledger, you do not qualify for a bounty. If you report a vulnerability used in an ongoing or past attack and there is specific, concrete evidence that suggests you are the attacker we reserve the right not to pay a bounty.
|
||||
|
||||
The amount paid varies dramatically. Vulnerabilities that are harmless on their own, but could form part of a critical exploit will usually receive a bounty. Full-blown exploits can receive much higher bounties. Please don’t hold back partial vulnerabilities while trying to construct a full-blown exploit. We will pay a bounty to anyone who reports a complete chain of vulnerabilities even if they have reported each component of the exploit separately and those vulnerabilities have been fixed in the meantime. However, to qualify for a the full bounty, you must to have been the first to report each of the partial exploits.
|
||||
|
||||
### Contacting Us
|
||||
|
||||
To report a qualifying bug, please send a detailed report to:
|
||||
|
||||
|Email Address|bugs@ripple.com |
|
||||
|:-----------:|:----------------------------------------------------|
|
||||
|Short Key ID | `0xC57929BE` |
|
||||
|Long Key ID | `0xCD49A0AFC57929BE` |
|
||||
|Fingerprint | `24E6 3B02 37E0 FA9C 5E96 8974 CD49 A0AF C579 29BE` |
|
||||
|
||||
The full PGP key for this address, which is also available on several key servers (e.g. on [keyserver.ubuntu.com](https://keyserver.ubuntu.com)), is:
|
||||
```
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
mQINBFUwGHYBEAC0wpGpBPkd8W1UdQjg9+cEFzeIEJRaoZoeuJD8mofwI5Ejnjdt
|
||||
kCpUYEDal0ygkKobu8SzOoATcDl18iCrScX39VpTm96vISFZMhmOryYCIp4QLJNN
|
||||
4HKc2ZdBj6W4igNi6vj5Qo6JMyGpLY2mz4CZskbt0TNuUxWrGood+UrCzpY8x7/N
|
||||
a93fcvNw+prgCr0rCH3hAPmAFfsOBbtGzNnmq7xf3jg5r4Z4sDiNIF1X1y53DAfV
|
||||
rWDx49IKsuCEJfPMp1MnBSvDvLaQ2hKXs+cOpx1BCZgHn3skouEUxxgqbtTzBLt1
|
||||
xXpmuijsaltWngPnGO7mOAzbpZSdBm82/Emrk9bPMuD0QaLQjWr7HkTSUs6ZsKt4
|
||||
7CLPdWqxyY/QVw9UaxeHEtWGQGMIQGgVJGh1fjtUr5O1sC9z9jXcQ0HuIHnRCTls
|
||||
GP7hklJmfH5V4SyAJQ06/hLuEhUJ7dn+BlqCsT0tLmYTgZYNzNcLHcqBFMEZHvHw
|
||||
9GENMx/tDXgajKql4bJnzuTK0iGU/YepanANLd1JHECJ4jzTtmKOus9SOGlB2/l1
|
||||
0t0ADDYAS3eqOdOcUvo9ElSLCI5vSVHhShSte/n2FMWU+kMUboTUisEG8CgQnrng
|
||||
g2CvvQvqDkeOtZeqMcC7HdiZS0q3LJUWtwA/ViwxrVlBDCxiTUXCotyBWwARAQAB
|
||||
tDBSaXBwbGUgTGFicyBCdWcgQm91bnR5IFByb2dyYW0gPGJ1Z3NAcmlwcGxlLmNv
|
||||
bT6JAjcEEwEKACEFAlUwGHYCGwMFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AACgkQ
|
||||
zUmgr8V5Kb6R0g//SwY/mVJY59k87iL26/KayauSoOcz7xjcST26l4ZHVVX85gOY
|
||||
HYZl8k0+m8X3zxeYm9a3QAoAml8sfoaFRFQP8ynnefRrLUPaZ2MjbJ0SACMwZNef
|
||||
T6o7Mi8LBAaiNZdYVyIfX1oM6YXtqYkuJdav6ZCyvVYqc9OvMJPY2ZzJYuI/ZtvQ
|
||||
/lTndxCeg9ALNX/iezOLGdfMpf4HuIFVwcPPlwGi+HDlB9/bggDEHC8z434SXVFc
|
||||
aQatXAPcDkjMUweU7y0CZtYEj00HITd4pSX6MqGiHrxlDZTqinCOPs1Ieqp7qufs
|
||||
MzlM6irLGucxj1+wa16ieyYvEtGaPIsksUKkywx0O7cf8N2qKg+eIkUk6O0Uc6eO
|
||||
CszizmiXIXy4O6OiLlVHGKkXHMSW9Nwe9GE95O8G9WR8OZCEuDv+mHPAutO+IjdP
|
||||
PDAAUvy+3XnkceO+HGWRpVvJZfFP2YH4A33InFL5yqlJmSoR/yVingGLxk55bZDM
|
||||
+HYGR3VeMb8Xj1rf/02qERsZyccMCFdAvKDbTwmvglyHdVLu5sPmktxbBYiemfyJ
|
||||
qxMxmYXCc9S0hWrWZW7edktBa9NpE58z1mx+hRIrDNbS2sDHrib9PULYCySyVYcF
|
||||
P+PWEe1CAS5jqkR2ker5td2/pHNnJIycynBEs7l6zbc9fu+nktFJz0q2B+GJAhwE
|
||||
EAEKAAYFAlUwGaQACgkQ+tiY1qQ2QkjMFw//f2hNY3BPNe+1qbhzumMDCnbTnGif
|
||||
kLuAGl9OKt81VHG1f6RnaGiLpR696+6Ja45KzH15cQ5JJl5Bgs1YkR/noTGX8IAD
|
||||
c70eNwiFu8JXTaaeeJrsmFkF9Tueufb364risYkvPP8tNUD3InBFEZT3WN7JKwix
|
||||
coD4/BwekUwOZVDd/uCFEyhlhZsROxdKNisNo3VtAq2s+3tIBAmTrriFUl0K+ZC5
|
||||
zgavcpnPN57zMtW9aK+VO3wXqAKYLYmtgxkVzSLUZt2M7JuwOaAdyuYWAneKZPCu
|
||||
1AXkmyo+d84sd5mZaKOr5xArAFiNMWPUcZL4rkS1Fq4dKtGAqzzR7a7hWtA5o27T
|
||||
6vynuxZ1n0PPh0er2O/zF4znIjm5RhTlfjp/VmhZdQfpulFEQ/dMxxGkQ9z5IYbX
|
||||
mTlSDbCSb+FMsanRBJ7Drp5EmBIudVGY6SHI5Re1RQiEh7GoDfUMUwZO+TVDII5R
|
||||
Ra7WyuimYleJgDo/+7HyfuIyGDaUCVj6pwVtYtYIdOI3tTw1R1Mr0V8yaNVnJghL
|
||||
CHcEJQL+YHSmiMM3ySil3O6tm1By6lFz8bVe/rgG/5uklQrnjMR37jYboi1orCC4
|
||||
yeIoQeV0ItlxeTyBwYIV/o1DBNxDevTZvJabC93WiGLw2XFjpZ0q/9+zI2rJUZJh
|
||||
qxmKP+D4e27lCI65Ag0EVTAYdgEQAMvttYNqeRNBRpSX8fk45WVIV8Fb21fWdwk6
|
||||
2SkZnJURbiC0LxQnOi7wrtii7DeFZtwM2kFHihS1VHekBnIKKZQSgGoKuFAQMGyu
|
||||
a426H4ZsSmA9Ufd7kRbvdtEcp7/RTAanhrSL4lkBhaKJrXlxBJ27o3nd7/rh7r3a
|
||||
OszbPY6DJ5bWClX3KooPTDl/RF2lHn+fweFk58UvuunHIyo4BWJUdilSXIjLun+P
|
||||
Qaik4ZAsZVwNhdNz05d+vtai4AwbYoO7adboMLRkYaXSQwGytkm+fM6r7OpXHYuS
|
||||
cR4zB/OK5hxCVEpWfiwN71N2NMvnEMaWd/9uhqxJzyvYgkVUXV9274TUe16pzXnW
|
||||
ZLfmitjwc91e7mJBBfKNenDdhaLEIlDRwKTLj7k58f9srpMnyZFacntu5pUMNblB
|
||||
cjXwWxz5ZaQikLnKYhIvrIEwtWPyjqOzNXNvYfZamve/LJ8HmWGCKao3QHoAIDvB
|
||||
9XBxrDyTJDpxbog6Qu4SY8AdgVlan6c/PsLDc7EUegeYiNTzsOK+eq3G5/E92eIu
|
||||
TsUXlciypFcRm1q8vLRr+HYYe2mJDo4GetB1zLkAFBcYJm/x9iJQbu0hn5NxJvZO
|
||||
R0Y5nOJQdyi+muJzKYwhkuzaOlswzqVXkq/7+QCjg7QsycdcwDjiQh3OrsgXHrwl
|
||||
M7gyafL9ABEBAAGJAh8EGAEKAAkFAlUwGHYCGwwACgkQzUmgr8V5Kb50BxAAhj9T
|
||||
TwmNrgRldTHszj+Qc+v8RWqV6j+R+zc0cn5XlUa6XFaXI1OFFg71H4dhCPEiYeN0
|
||||
IrnocyMNvCol+eKIlPKbPTmoixjQ4udPTR1DC1Bx1MyW5FqOrsgBl5t0e1VwEViM
|
||||
NspSStxu5Hsr6oWz2GD48lXZWJOgoL1RLs+uxjcyjySD/em2fOKASwchYmI+ezRv
|
||||
plfhAFIMKTSCN2pgVTEOaaz13M0U+MoprThqF1LWzkGkkC7n/1V1f5tn83BWiagG
|
||||
2N2Q4tHLfyouzMUKnX28kQ9sXfxwmYb2sA9FNIgxy+TdKU2ofLxivoWT8zS189z/
|
||||
Yj9fErmiMjns2FzEDX+bipAw55X4D/RsaFgC+2x2PDbxeQh6JalRA2Wjq32Ouubx
|
||||
u+I4QhEDJIcVwt9x6LPDuos1F+M5QW0AiUhKrZJ17UrxOtaquh/nPUL9T3l2qPUn
|
||||
1ChrZEEEhHO6vA8+jn0+cV9n5xEz30Str9iHnDQ5QyR5LyV4UBPgTdWyQzNVKA69
|
||||
KsSr9lbHEtQFRzGuBKwt6UlSFv9vPWWJkJit5XDKAlcKuGXj0J8OlltToocGElkF
|
||||
+gEBZfoOWi/IBjRLrFW2cT3p36DTR5O1Ud/1DLnWRqgWNBLrbs2/KMKE6EnHttyD
|
||||
7Tz8SQkuxltX/yBXMV3Ddy0t6nWV2SZEfuxJAQI=
|
||||
=spg4
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
```
|
||||
94
appveyor.yml
94
appveyor.yml
@@ -1,94 +0,0 @@
|
||||
# Set environment variables.
|
||||
environment:
|
||||
|
||||
# We bundle up protoc.exe and only the parts of boost and openssl we need so
|
||||
# that it's a small download. We also use appveyor's free cache, avoiding fees
|
||||
# downloading from S3 each time.
|
||||
# TODO: script to create this package.
|
||||
RIPPLED_DEPS_PATH: rippled_deps17.04
|
||||
RIPPLED_DEPS_URL: https://ripple.github.io/Downloads/appveyor/%RIPPLED_DEPS_PATH%.zip
|
||||
|
||||
# CMake honors these environment variables, setting the include/lib paths.
|
||||
BOOST_ROOT: C:/%RIPPLED_DEPS_PATH%/boost
|
||||
OPENSSL_ROOT: C:/%RIPPLED_DEPS_PATH%/openssl
|
||||
|
||||
# We've had trouble with AppVeyor apparently not having a stack as large
|
||||
# as the *nix CI platforms. AppVeyor support suggested that we try
|
||||
# GCE VMs. The following line is supposed to enable that VM type.
|
||||
appveyor_build_worker_cloud: gce
|
||||
|
||||
matrix:
|
||||
- build: cmake
|
||||
target: msvc.debug
|
||||
buildconfig: Debug
|
||||
|
||||
os: Visual Studio 2017
|
||||
|
||||
# At the end of each successful build we cache this directory.
|
||||
# https://www.appveyor.com/docs/build-cache/
|
||||
# Resulting archive should not exceed 100 MB.
|
||||
cache:
|
||||
- 'C:\%RIPPLED_DEPS_PATH%'
|
||||
|
||||
# This means we'll download a zip of the branch we want, rather than the full
|
||||
# history.
|
||||
shallow_clone: true
|
||||
|
||||
install:
|
||||
# We want protoc.exe on PATH.
|
||||
- SET PATH=C:/%RIPPLED_DEPS_PATH%;%PATH%
|
||||
|
||||
# Download dependencies if appveyor didn't restore them from the cache.
|
||||
# Use 7zip to unzip.
|
||||
- ps: |
|
||||
if (-not(Test-Path 'C:/$env:RIPPLED_DEPS_PATH')) {
|
||||
echo "Download from $env:RIPPLED_DEPS_URL"
|
||||
Start-FileDownload "$env:RIPPLED_DEPS_URL"
|
||||
7z x "$($env:RIPPLED_DEPS_PATH).zip" -oC:\ -y > $null
|
||||
if ($LastExitCode -ne 0) { throw "7z failed" }
|
||||
}
|
||||
|
||||
# Newer DEPS include a versions file.
|
||||
# Dump it so we can verify correct behavior.
|
||||
- ps: |
|
||||
if (Test-Path "C:/$env:RIPPLED_DEPS_PATH/versions.txt") {
|
||||
cat "C:/$env:RIPPLED_DEPS_PATH/versions.txt"
|
||||
}
|
||||
|
||||
# TODO: This is giving me grief
|
||||
# artifacts:
|
||||
# # Save rippled.exe in the cloud after each build.
|
||||
# - path: "build\\rippled.exe"
|
||||
|
||||
build_script:
|
||||
# We set the environment variables needed to put compilers on the PATH.
|
||||
- '"%VS140COMNTOOLS%../../VC/vcvarsall.bat" x86_amd64'
|
||||
# Show which version of the compiler we are using.
|
||||
- cl
|
||||
- ps: |
|
||||
# Build with cmake
|
||||
cmake --version
|
||||
$cmake_target="$($env:target).ci"
|
||||
"$cmake_target"
|
||||
New-Item -ItemType Directory -Force -Path "build/$cmake_target"
|
||||
Push-Location "build/$cmake_target"
|
||||
cmake -G"Visual Studio 15 2017 Win64" ../..
|
||||
if ($LastExitCode -ne 0) { throw "CMake failed" }
|
||||
cmake --build . --config $env:buildconfig -- -m
|
||||
if ($LastExitCode -ne 0) { throw "CMake build failed" }
|
||||
Pop-Location
|
||||
|
||||
after_build:
|
||||
- ps: |
|
||||
$exe="build/$cmake_target/$env:buildconfig/rippled"
|
||||
"Exe is at $exe"
|
||||
|
||||
test_script:
|
||||
- ps: |
|
||||
& {
|
||||
# Run the rippled unit tests
|
||||
& $exe --unittest --unittest-log
|
||||
# https://connect.microsoft.com/PowerShell/feedback/details/751703/option-to-stop-script-if-command-line-exe-fails
|
||||
if ($LastExitCode -ne 0) { throw "Unit tests failed" }
|
||||
}
|
||||
|
||||
@@ -1,192 +0,0 @@
|
||||
#!/bin/bash -u
|
||||
# We use set -e and bash with -u to bail on first non zero exit code of any
|
||||
# processes launched or upon any unbound variable.
|
||||
# We use set -x to print commands before running them to help with
|
||||
# debugging.
|
||||
set -ex
|
||||
__dirname=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||
echo "using CC: $CC"
|
||||
"${CC}" --version
|
||||
export CC
|
||||
COMPNAME=$(basename $CC)
|
||||
echo "using CXX: ${CXX:-notset}"
|
||||
if [[ $CXX ]]; then
|
||||
"${CXX}" --version
|
||||
export CXX
|
||||
fi
|
||||
: ${BUILD_TYPE:=Debug}
|
||||
echo "BUILD TYPE: $BUILD_TYPE"
|
||||
|
||||
# Ensure APP defaults to rippled if it's not set.
|
||||
: ${APP:=rippled}
|
||||
echo "using APP: $APP"
|
||||
|
||||
JOBS=${NUM_PROCESSORS:-2}
|
||||
if [[ ${TRAVIS:-false} != "true" ]]; then
|
||||
JOBS=$((JOBS+1))
|
||||
fi
|
||||
|
||||
if [ -x /usr/bin/time ] ; then
|
||||
: ${TIME:="Duration: %E"}
|
||||
export TIME
|
||||
time=/usr/bin/time
|
||||
else
|
||||
time=
|
||||
fi
|
||||
|
||||
echo "cmake building ${APP}"
|
||||
: ${CMAKE_EXTRA_ARGS:=""}
|
||||
if [[ ${NINJA_BUILD:-} == true ]]; then
|
||||
CMAKE_EXTRA_ARGS+=" -G Ninja"
|
||||
fi
|
||||
|
||||
coverage=false
|
||||
if [[ "${CMAKE_EXTRA_ARGS}" =~ -Dcoverage=((on)|(ON)) ]] ; then
|
||||
echo "coverage option detected."
|
||||
coverage=true
|
||||
fi
|
||||
|
||||
#
|
||||
# allow explicit setting of the name of the build
|
||||
# dir, otherwise default to the compiler.build_type
|
||||
#
|
||||
: "${BUILD_DIR:=${COMPNAME}.${BUILD_TYPE}}"
|
||||
BUILDARGS=" -j${JOBS}"
|
||||
if [[ ${VERBOSE_BUILD:-} == true ]]; then
|
||||
CMAKE_EXTRA_ARGS+=" -DCMAKE_VERBOSE_MAKEFILE=ON"
|
||||
|
||||
# TODO: if we use a different generator, this
|
||||
# option to build verbose would need to change:
|
||||
if [[ ${NINJA_BUILD:-} == true ]]; then
|
||||
BUILDARGS+=" -v"
|
||||
else
|
||||
BUILDARGS+=" verbose=1"
|
||||
fi
|
||||
fi
|
||||
if [[ ${USE_CCACHE:-} == true ]]; then
|
||||
echo "using ccache with basedir [${CCACHE_BASEDIR:-}]"
|
||||
CMAKE_EXTRA_ARGS+=" -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||
fi
|
||||
if [ -d "build/${BUILD_DIR}" ]; then
|
||||
rm -rf "build/${BUILD_DIR}"
|
||||
fi
|
||||
|
||||
mkdir -p "build/${BUILD_DIR}"
|
||||
pushd "build/${BUILD_DIR}"
|
||||
$time cmake ../.. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} ${CMAKE_EXTRA_ARGS}
|
||||
if [[ ${BUILD_TYPE} == "docs" ]]; then
|
||||
$time cmake --build . --target docs -- $BUILDARGS
|
||||
## mimic the standard test output for docs build
|
||||
## to make controlling processes like jenkins happy
|
||||
if [ -f html_doc/index.html ]; then
|
||||
echo "1 case, 1 test total, 0 failures"
|
||||
else
|
||||
echo "1 case, 1 test total, 1 failures"
|
||||
fi
|
||||
exit
|
||||
else
|
||||
$time cmake --build . -- $BUILDARGS
|
||||
fi
|
||||
popd
|
||||
export APP_PATH="$PWD/build/${BUILD_DIR}/${APP}"
|
||||
echo "using APP_PATH: $APP_PATH"
|
||||
|
||||
# See what we've actually built
|
||||
ldd $APP_PATH
|
||||
|
||||
function join_by { local IFS="$1"; shift; echo "$*"; }
|
||||
|
||||
# This is a list of manual tests
|
||||
# in rippled that we want to run
|
||||
declare -a manual_tests=(
|
||||
"beast.chrono.abstract_clock"
|
||||
"beast.unit_test.print"
|
||||
"ripple.NodeStore.Timing"
|
||||
"ripple.app.Flow_manual"
|
||||
"ripple.app.NoRippleCheckLimits"
|
||||
"ripple.app.PayStrandAllPairs"
|
||||
"ripple.consensus.ByzantineFailureSim"
|
||||
"ripple.consensus.DistributedValidators"
|
||||
"ripple.consensus.ScaleFreeSim"
|
||||
"ripple.ripple_data.digest"
|
||||
"ripple.tx.CrossingLimits"
|
||||
"ripple.tx.FindOversizeCross"
|
||||
"ripple.tx.Offer_manual"
|
||||
"ripple.tx.OversizeMeta"
|
||||
"ripple.tx.PlumpBook"
|
||||
)
|
||||
|
||||
: ${APP_ARGS:=}
|
||||
if [[ ${APP} == "rippled" ]]; then
|
||||
if [[ ${MANUAL_TESTS:-} == true ]]; then
|
||||
APP_ARGS+=" --unittest=$(join_by , "${manual_tests[@]}")"
|
||||
else
|
||||
APP_ARGS+=" --unittest --quiet --unittest-log"
|
||||
fi
|
||||
# Only report on src/ripple files
|
||||
export LCOV_FILES="*/src/ripple/*"
|
||||
# Nothing to explicitly exclude
|
||||
export LCOV_EXCLUDE_FILES="LCOV_NO_EXCLUDE"
|
||||
if [[ ${coverage} == false && ${PARALLEL_TESTS:-} == true ]]; then
|
||||
APP_ARGS+=" --unittest-jobs ${JOBS}"
|
||||
fi
|
||||
else
|
||||
: ${LCOV_FILES:="*/src/*"}
|
||||
# Don't exclude anything
|
||||
: ${LCOV_EXCLUDE_FILES:="LCOV_NO_EXCLUDE"}
|
||||
fi
|
||||
|
||||
if [[ $coverage == true ]]; then
|
||||
export PATH=$PATH:$LCOV_ROOT/usr/bin
|
||||
|
||||
# Create baseline coverage data file
|
||||
lcov --no-external -c -i -d . -o baseline.info | grep -v "ignoring data for external file"
|
||||
fi
|
||||
|
||||
if [[ ${SKIP_TESTS:-} == true ]]; then
|
||||
echo "skipping tests."
|
||||
exit
|
||||
fi
|
||||
|
||||
if [[ "${MAX_TIME:-}" == "" ]] ; then
|
||||
tcmd=""
|
||||
else
|
||||
tcmd="timeout ${MAX_TIME}"
|
||||
fi
|
||||
|
||||
if [[ ${DEBUGGER:-true} == "true" && -v GDB_ROOT && -x $GDB_ROOT/bin/gdb ]]; then
|
||||
$GDB_ROOT/bin/gdb -v
|
||||
# Execute unit tests under gdb, printing a call stack
|
||||
# if we get a crash.
|
||||
export APP_ARGS
|
||||
$tcmd $GDB_ROOT/bin/gdb -return-child-result -quiet -batch \
|
||||
-ex "set env MALLOC_CHECK_=3" \
|
||||
-ex "set print thread-events off" \
|
||||
-ex run \
|
||||
-ex "thread apply all backtrace full" \
|
||||
-ex "quit" \
|
||||
--args $APP_PATH $APP_ARGS
|
||||
else
|
||||
$tcmd $APP_PATH $APP_ARGS
|
||||
fi
|
||||
|
||||
if [[ $coverage == true ]]; then
|
||||
# Create test coverage data file
|
||||
lcov --no-external -c -d . -o tests.info | grep -v "ignoring data for external file"
|
||||
|
||||
# Combine baseline and test coverage data
|
||||
lcov -a baseline.info -a tests.info -o lcov-all.info
|
||||
|
||||
# Included files
|
||||
lcov -e "lcov-all.info" "${LCOV_FILES}" -o lcov.pre.info
|
||||
|
||||
# Excluded files
|
||||
lcov --remove lcov.pre.info "${LCOV_EXCLUDE_FILES}" -o lcov.info
|
||||
|
||||
# Push the results (lcov.info) to codecov
|
||||
codecov -X gcov # don't even try and look for .gcov files ;)
|
||||
|
||||
find . -name "*.gcda" | xargs rm -f
|
||||
fi
|
||||
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
#!/bin/bash -u
|
||||
# Exit if anything fails. Echo commands to aid debugging.
|
||||
set -ex
|
||||
|
||||
# Target working dir - defaults to current dir.
|
||||
# Can be set from caller, or in the first parameter
|
||||
TWD=$( cd ${TWD:-${1:-${PWD:-$( pwd )}}}; pwd )
|
||||
echo "Target path is: $TWD"
|
||||
# Override gcc version to $GCC_VER.
|
||||
# Put an appropriate symlink at the front of the path.
|
||||
mkdir -pv $HOME/bin
|
||||
for g in gcc g++ gcov gcc-ar gcc-nm gcc-ranlib
|
||||
do
|
||||
test -x $( type -p ${g}-$GCC_VER )
|
||||
ln -sv $(type -p ${g}-$GCC_VER) $HOME/bin/${g}
|
||||
done
|
||||
|
||||
# What versions are we ACTUALLY running?
|
||||
if [ -x $HOME/bin/g++ ]; then
|
||||
$HOME/bin/g++ -v
|
||||
else
|
||||
g++ -v
|
||||
fi
|
||||
|
||||
pip install --user requests==2.13.0
|
||||
pip install --user https://github.com/codecov/codecov-python/archive/master.zip
|
||||
|
||||
bash bin/sh/install-boost.sh
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
rippled_exe=/opt/ripple/bin/rippled
|
||||
conf_file=/etc/opt/ripple/rippled.cfg
|
||||
|
||||
while getopts ":e:c:" opt; do
|
||||
case $opt in
|
||||
e)
|
||||
rippled_exe=${OPTARG}
|
||||
;;
|
||||
c)
|
||||
conf_file=${OPTARG}
|
||||
;;
|
||||
\?)
|
||||
echo "Invalid option: -$OPTARG"
|
||||
esac
|
||||
done
|
||||
|
||||
tmp_loc=$(mktemp -d --tmpdir ripple_info.XXXX)
|
||||
cd /tmp
|
||||
chmod 751 ripple_info.*
|
||||
cd ~
|
||||
echo ${tmp_loc}
|
||||
|
||||
cleaned_conf=${tmp_loc}/cleaned_rippled_cfg.txt
|
||||
|
||||
if [[ -f ${conf_file} ]]
|
||||
then
|
||||
db=$(sed -r -e 's/\<s[a-zA-Z0-9]{28}\>/secretsecretsecretsecretmaybe/g' ${conf_file} |\
|
||||
awk -v OUT_FILE=${cleaned_conf} '
|
||||
BEGIN {skip=0; db_path="";print > OUT_FILE}
|
||||
/^\[validation_seed\]/ {skip=1; next}
|
||||
/^\[node_seed\]/ {skip=1; next}
|
||||
/^\[validation_manifest\]/ {skip=1; next}
|
||||
/^\[validator_token\]/ {skip=1; next}
|
||||
/^\[.*\]/ {skip=0}
|
||||
skip==1 {next}
|
||||
save==1 {save=0;db_path=$0}
|
||||
/^\[database_path\]/ {save=1}
|
||||
{print >> OUT_FILE}
|
||||
END {print db_path}
|
||||
')
|
||||
fi
|
||||
|
||||
echo "database_path: ${db}"
|
||||
df ${db} > ${tmp_loc}/db_path_df.txt
|
||||
echo
|
||||
|
||||
# Send output from this script to a log file
|
||||
## this captures any messages
|
||||
## or errors from the script itself
|
||||
|
||||
log_file=${tmp_loc}/get_info.log
|
||||
exec 3>&1 1>>${log_file} 2>&1
|
||||
|
||||
## Send all stdout files to /tmp
|
||||
|
||||
if [[ -x ${rippled_exe} ]]
|
||||
then
|
||||
pgrep rippled && \
|
||||
${rippled_exe} --conf ${conf_file} \
|
||||
-- server_info > ${tmp_loc}/server_info.txt
|
||||
fi
|
||||
|
||||
df -h > ${tmp_loc}/free_disk_space.txt
|
||||
cat /proc/meminfo > ${tmp_loc}/amount_mem.txt
|
||||
cat /proc/swaps > ${tmp_loc}/swap_space.txt
|
||||
ulimit -a > ${tmp_loc}/reported_current_limits.txt
|
||||
|
||||
for dev_path in $(df | awk '$1 ~ /^\/dev\// {print $1}'); do
|
||||
# strip numbers from end and remove '/dev/'
|
||||
dev=$(basename ${dev_path%%[0-9]})
|
||||
if [[ "$(cat /sys/block/${dev}/queue/rotational)" = 0 ]]
|
||||
then
|
||||
echo "${dev} : SSD" >> ${tmp_loc}/is_ssd.txt
|
||||
else
|
||||
echo "${dev} : NO SSD" >> ${tmp_loc}/is_ssd.txt
|
||||
fi
|
||||
done
|
||||
|
||||
pushd ${tmp_loc}
|
||||
tar -czvf info-package.tar.gz *.txt *.log
|
||||
popd
|
||||
|
||||
echo "Use the following command on your local machine to download from your rippled instance: scp <remote_rippled_username>@<remote_host>:${tmp_loc}/info-package.tar.gz <path/to/local_machine/directory>"| tee /dev/fd/3
|
||||
|
||||
150
bin/getRippledInfo
Executable file
150
bin/getRippledInfo
Executable file
@@ -0,0 +1,150 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This script generates information about your rippled installation
|
||||
# and system. It can be used to help debug issues that you may face
|
||||
# in your installation. While this script endeavors to not display any
|
||||
# sensitive information, it is recommended that you read the output
|
||||
# before sharing with any third parties.
|
||||
|
||||
|
||||
rippled_exe=/opt/ripple/bin/rippled
|
||||
conf_file=/etc/opt/ripple/rippled.cfg
|
||||
|
||||
while getopts ":e:c:" opt; do
|
||||
case $opt in
|
||||
e)
|
||||
rippled_exe=${OPTARG}
|
||||
;;
|
||||
c)
|
||||
conf_file=${OPTARG}
|
||||
;;
|
||||
\?)
|
||||
echo "Invalid option: -$OPTARG"
|
||||
exit -1
|
||||
esac
|
||||
done
|
||||
|
||||
tmp_loc=$(mktemp -d --tmpdir ripple_info.XXXXX)
|
||||
chmod 751 ${tmp_loc}
|
||||
awk_prog=${tmp_loc}/cfg.awk
|
||||
summary_out=${tmp_loc}/rippled_info.md
|
||||
printf "# rippled report info\n\n> generated at %s\n" "$(date -R)" > ${summary_out}
|
||||
|
||||
function log_section {
|
||||
printf "\n## %s\n" "$*" >> ${summary_out}
|
||||
|
||||
while read -r l; do
|
||||
echo " $l" >> ${summary_out}
|
||||
done </dev/stdin
|
||||
}
|
||||
|
||||
function join_by {
|
||||
local IFS="$1"; shift; echo "$*";
|
||||
}
|
||||
|
||||
if [[ -f ${conf_file} ]] ; then
|
||||
exclude=( ips ips_fixed node_seed validation_seed validator_token )
|
||||
cleaned_conf=${tmp_loc}/cleaned_rippled_cfg.txt
|
||||
cat << 'EOP' >> ${awk_prog}
|
||||
BEGIN {FS="[[:space:]]*=[[:space:]]*"; skip=0; db_path=""; print > OUT_FILE; split(exl,exa,"|")}
|
||||
/^#/ {next}
|
||||
save==2 && /^[[:space:]]*$/ {next}
|
||||
/^\[.+\]$/ {
|
||||
section=tolower(gensub(/^\[[[:space:]]*([a-zA-Z_]+)[[:space:]]*\]$/, "\\1", "g"))
|
||||
skip = 0
|
||||
for (i in exa) {
|
||||
if (section == exa[i])
|
||||
skip = 1
|
||||
}
|
||||
if (section == "database_path")
|
||||
save = 1
|
||||
}
|
||||
skip==1 {next}
|
||||
save==2 {save=0; db_path=$0}
|
||||
save==1 {save=2}
|
||||
$1 ~ /password/ {$0=$1"=<redacted>"}
|
||||
{print >> OUT_FILE}
|
||||
END {print db_path}
|
||||
EOP
|
||||
|
||||
db=$(\
|
||||
sed -r -e 's/\<s[[:alnum:]]{28}\>/<redactedsecret>/g;s/^[[:space:]]*//;s/[[:space:]]*$//' ${conf_file} |\
|
||||
awk -v OUT_FILE=${cleaned_conf} -v exl="$(join_by '|' "${exclude[@]}")" -f ${awk_prog})
|
||||
rm ${awk_prog}
|
||||
cat ${cleaned_conf} | log_section "cleaned config file"
|
||||
rm ${cleaned_conf}
|
||||
echo "${db}" | log_section "database path"
|
||||
df ${db} | log_section "df: database"
|
||||
fi
|
||||
|
||||
# Send output from this script to a log file
|
||||
## this captures any messages
|
||||
## or errors from the script itself
|
||||
|
||||
log_file=${tmp_loc}/get_info.log
|
||||
exec 3>&1 1>>${log_file} 2>&1
|
||||
|
||||
## Send all stdout files to /tmp
|
||||
|
||||
if [[ -x ${rippled_exe} ]] ; then
|
||||
pgrep rippled && \
|
||||
${rippled_exe} --conf ${conf_file} \
|
||||
-- server_info | log_section "server info"
|
||||
fi
|
||||
|
||||
cat /proc/meminfo | log_section "meminfo"
|
||||
cat /proc/swaps | log_section "swap space"
|
||||
ulimit -a | log_section "ulimit"
|
||||
|
||||
if command -v lshw >/dev/null 2>&1 ; then
|
||||
lshw 2>/dev/null | log_section "hardware info"
|
||||
else
|
||||
lscpu > ${tmp_loc}/hw_info.txt
|
||||
hwinfo >> ${tmp_loc}/hw_info.txt
|
||||
lspci >> ${tmp_loc}/hw_info.txt
|
||||
lsblk >> ${tmp_loc}/hw_info.txt
|
||||
cat ${tmp_loc}/hw_info.txt | log_section "hardware info"
|
||||
rm ${tmp_loc}/hw_info.txt
|
||||
fi
|
||||
|
||||
if command -v iostat >/dev/null 2>&1 ; then
|
||||
iostat -t -d -x 2 6 | log_section "iostat"
|
||||
fi
|
||||
|
||||
df -h | log_section "free disk space"
|
||||
drives=($(df | awk '$1 ~ /^\/dev\// {print $1}' | xargs -n 1 basename))
|
||||
block_devs=($(ls /sys/block/))
|
||||
for d in "${drives[@]}"; do
|
||||
for dev in "${block_devs[@]}"; do
|
||||
#echo "D: [$d], DEV: [$dev]"
|
||||
if [[ $d =~ $dev ]]; then
|
||||
# this file (if exists) has 0 for SSD and 1 for HDD
|
||||
if [[ "$(cat /sys/block/${dev}/queue/rotational 2>/dev/null)" == 0 ]] ; then
|
||||
echo "${d} : SSD" >> ${tmp_loc}/is_ssd.txt
|
||||
else
|
||||
echo "${d} : NO SSD" >> ${tmp_loc}/is_ssd.txt
|
||||
fi
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
if [[ -f ${tmp_loc}/is_ssd.txt ]] ; then
|
||||
cat ${tmp_loc}/is_ssd.txt | log_section "SSD"
|
||||
rm ${tmp_loc}/is_ssd.txt
|
||||
fi
|
||||
|
||||
cat ${log_file} | log_section "script log"
|
||||
|
||||
cat << MSG | tee /dev/fd/3
|
||||
####################################################
|
||||
rippled info has been gathered. Please copy the
|
||||
contents of ${summary_out}
|
||||
to a github gist at https://gist.github.com/
|
||||
|
||||
PLEASE REVIEW THIS FILE FOR ANY SENSITIVE DATA
|
||||
BEFORE POSTING! We have tried our best to omit
|
||||
any sensitive information from this file, but you
|
||||
should verify before posting.
|
||||
####################################################
|
||||
MSG
|
||||
|
||||
86
bin/git/setup-upstreams.sh
Executable file
86
bin/git/setup-upstreams.sh
Executable file
@@ -0,0 +1,86 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [[ $# -ne 1 || "$1" == "--help" || "$1" == "-h" ]]
|
||||
then
|
||||
name=$( basename $0 )
|
||||
cat <<- USAGE
|
||||
Usage: $name <username>
|
||||
|
||||
Where <username> is the Github username of the upstream repo. e.g. XRPLF
|
||||
USAGE
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create upstream remotes based on origin
|
||||
shift
|
||||
user="$1"
|
||||
# Get the origin URL. Expect it be an SSH-style URL
|
||||
origin=$( git remote get-url origin )
|
||||
if [[ "${origin}" == "" ]]
|
||||
then
|
||||
echo Invalid origin remote >&2
|
||||
exit 1
|
||||
fi
|
||||
# echo "Origin: ${origin}"
|
||||
# Parse the origin
|
||||
ifs_orig="${IFS}"
|
||||
IFS=':' read remote originpath <<< "${origin}"
|
||||
# echo "Remote: ${remote}, Originpath: ${originpath}"
|
||||
IFS='@' read sshuser server <<< "${remote}"
|
||||
# echo "SSHUser: ${sshuser}, Server: ${server}"
|
||||
IFS='/' read originuser repo <<< "${originpath}"
|
||||
# echo "Originuser: ${originuser}, Repo: ${repo}"
|
||||
if [[ "${sshuser}" == "" || "${server}" == "" || "${originuser}" == ""
|
||||
|| "${repo}" == "" ]]
|
||||
then
|
||||
echo "Can't parse origin URL: ${origin}" >&2
|
||||
exit 1
|
||||
fi
|
||||
upstream="https://${server}/${user}/${repo}"
|
||||
upstreampush="${remote}:${user}/${repo}"
|
||||
upstreamgroup="upstream upstream-push"
|
||||
current=$( git remote get-url upstream 2>/dev/null )
|
||||
currentpush=$( git remote get-url upstream-push 2>/dev/null )
|
||||
currentgroup=$( git config remotes.upstreams )
|
||||
if [[ "${current}" == "${upstream}" ]]
|
||||
then
|
||||
echo "Upstream already set up correctly. Skip"
|
||||
elif [[ -n "${current}" && "${current}" != "${upstream}" &&
|
||||
"${current}" != "${upstreampush}" ]]
|
||||
then
|
||||
echo "Upstream already set up as: ${current}. Skip"
|
||||
else
|
||||
if [[ "${current}" == "${upstreampush}" ]]
|
||||
then
|
||||
echo "Upstream set to dangerous push URL. Update."
|
||||
_run git remote rename upstream upstream-push || \
|
||||
_run git remote remove upstream
|
||||
currentpush=$( git remote get-url upstream-push 2>/dev/null )
|
||||
fi
|
||||
_run git remote add upstream "${upstream}"
|
||||
fi
|
||||
|
||||
if [[ "${currentpush}" == "${upstreampush}" ]]
|
||||
then
|
||||
echo "upstream-push already set up correctly. Skip"
|
||||
elif [[ -n "${currentpush}" && "${currentpush}" != "${upstreampush}" ]]
|
||||
then
|
||||
echo "upstream-push already set up as: ${currentpush}. Skip"
|
||||
else
|
||||
_run git remote add upstream-push "${upstreampush}"
|
||||
fi
|
||||
|
||||
if [[ "${currentgroup}" == "${upstreamgroup}" ]]
|
||||
then
|
||||
echo "Upstreams group already set up correctly. Skip"
|
||||
elif [[ -n "${currentgroup}" && "${currentgroup}" != "${upstreamgroup}" ]]
|
||||
then
|
||||
echo "Upstreams group already set up as: ${currentgroup}. Skip"
|
||||
else
|
||||
_run git config --add remotes.upstreams "${upstreamgroup}"
|
||||
fi
|
||||
|
||||
_run git fetch --jobs=$(nproc) upstreams
|
||||
|
||||
exit 0
|
||||
|
||||
69
bin/git/squash-branches.sh
Executable file
69
bin/git/squash-branches.sh
Executable file
@@ -0,0 +1,69 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [[ $# -lt 3 || "$1" == "--help" || "$1" = "-h" ]]
|
||||
then
|
||||
name=$( basename $0 )
|
||||
cat <<- USAGE
|
||||
Usage: $name workbranch base/branch user/branch [user/branch [...]]
|
||||
|
||||
* workbranch will be created locally from base/branch
|
||||
* base/branch and user/branch may be specified as user:branch to allow
|
||||
easy copying from Github PRs
|
||||
* Remotes for each user must already be set up
|
||||
USAGE
|
||||
exit 0
|
||||
fi
|
||||
|
||||
work="$1"
|
||||
shift
|
||||
|
||||
branches=( $( echo "${@}" | sed "s/:/\//" ) )
|
||||
base="${branches[0]}"
|
||||
unset branches[0]
|
||||
|
||||
set -e
|
||||
|
||||
users=()
|
||||
for b in "${branches[@]}"
|
||||
do
|
||||
users+=( $( echo $b | cut -d/ -f1 ) )
|
||||
done
|
||||
|
||||
users=( $( printf '%s\n' "${users[@]}" | sort -u ) )
|
||||
|
||||
git fetch --multiple upstreams "${users[@]}"
|
||||
git checkout -B "$work" --no-track "$base"
|
||||
|
||||
for b in "${branches[@]}"
|
||||
do
|
||||
git merge --squash "${b}"
|
||||
git commit -S # Use the commit message provided on the PR
|
||||
done
|
||||
|
||||
# Make sure the commits look right
|
||||
git log --show-signature "$base..HEAD"
|
||||
|
||||
parts=( $( echo $base | sed "s/\// /" ) )
|
||||
repo="${parts[0]}"
|
||||
b="${parts[1]}"
|
||||
push=$repo
|
||||
if [[ "$push" == "upstream" ]]
|
||||
then
|
||||
push="upstream-push"
|
||||
fi
|
||||
if [[ "$repo" == "upstream" ]]
|
||||
then
|
||||
repo="upstreams"
|
||||
fi
|
||||
cat << PUSH
|
||||
|
||||
-------------------------------------------------------------------
|
||||
This script will not push. Verify everything is correct, then push
|
||||
to your repo, and create a PR if necessary. Once the PR is approved,
|
||||
run:
|
||||
|
||||
git push $push HEAD:$b
|
||||
git fetch $repo
|
||||
-------------------------------------------------------------------
|
||||
PUSH
|
||||
|
||||
58
bin/git/update-version.sh
Executable file
58
bin/git/update-version.sh
Executable file
@@ -0,0 +1,58 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [[ $# -ne 3 || "$1" == "--help" || "$1" = "-h" ]]
|
||||
then
|
||||
name=$( basename $0 )
|
||||
cat <<- USAGE
|
||||
Usage: $name workbranch base/branch version
|
||||
|
||||
* workbranch will be created locally from base/branch. If it exists,
|
||||
it will be reused, so make sure you don't overwrite any work.
|
||||
* base/branch may be specified as user:branch to allow easy copying
|
||||
from Github PRs.
|
||||
USAGE
|
||||
exit 0
|
||||
fi
|
||||
|
||||
work="$1"
|
||||
shift
|
||||
|
||||
base=$( echo "$1" | sed "s/:/\//" )
|
||||
shift
|
||||
|
||||
version=$1
|
||||
shift
|
||||
|
||||
set -e
|
||||
|
||||
git fetch upstreams
|
||||
|
||||
git checkout -B "${work}" --no-track "${base}"
|
||||
|
||||
push=$( git rev-parse --abbrev-ref --symbolic-full-name '@{push}' \
|
||||
2>/dev/null ) || true
|
||||
if [[ "${push}" != "" ]]
|
||||
then
|
||||
echo "Warning: ${push} may already exist."
|
||||
fi
|
||||
|
||||
build=$( find -name BuildInfo.cpp )
|
||||
sed 's/\(^.*versionString =\).*$/\1 "'${version}'"/' ${build} > version.cpp && \
|
||||
diff "${build}" version.cpp && exit 1 || \
|
||||
mv -vi version.cpp ${build}
|
||||
|
||||
git diff
|
||||
|
||||
git add ${build}
|
||||
|
||||
git commit -S -m "Set version to ${version}"
|
||||
|
||||
git log --oneline --first-parent ${base}^..
|
||||
|
||||
cat << PUSH
|
||||
|
||||
-------------------------------------------------------------------
|
||||
This script will not push. Verify everything is correct, then push
|
||||
to your repo, and create a PR as described in CONTRIBUTING.md.
|
||||
-------------------------------------------------------------------
|
||||
PUSH
|
||||
218
bin/physical.sh
Executable file
218
bin/physical.sh
Executable file
@@ -0,0 +1,218 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -o errexit
|
||||
|
||||
marker_base=985c80fbc6131f3a8cedd0da7e8af98dfceb13c7
|
||||
marker_commit=${1:-${marker_base}}
|
||||
|
||||
if [ $(git merge-base ${marker_commit} ${marker_base}) != ${marker_base} ]; then
|
||||
echo "first marker commit not an ancestor: ${marker_commit}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $(git merge-base ${marker_commit} HEAD) != $(git rev-parse --verify ${marker_commit}) ]; then
|
||||
echo "given marker commit not an ancestor: ${marker_commit}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -e Builds/CMake ]; then
|
||||
echo move CMake
|
||||
git mv Builds/CMake cmake
|
||||
git add --update .
|
||||
git commit -m 'Move CMake directory' --author 'Pretty Printer <cpp@ripple.com>'
|
||||
fi
|
||||
|
||||
if [ -e src/ripple ]; then
|
||||
|
||||
echo move protocol buffers
|
||||
mkdir -p include/xrpl
|
||||
if [ -e src/ripple/proto ]; then
|
||||
git mv src/ripple/proto include/xrpl
|
||||
fi
|
||||
|
||||
extract_list() {
|
||||
git show ${marker_commit}:Builds/CMake/RippledCore.cmake | \
|
||||
awk "/END ${1}/ { p = 0 } p && /src\/ripple/; /BEGIN ${1}/ { p = 1 }" | \
|
||||
sed -e 's#src/ripple/##' -e 's#[^a-z]\+$##'
|
||||
}
|
||||
|
||||
move_files() {
|
||||
oldroot="$1"; shift
|
||||
newroot="$1"; shift
|
||||
detail="$1"; shift
|
||||
files=("$@")
|
||||
for file in ${files[@]}; do
|
||||
if [ ! -e ${oldroot}/${file} ]; then
|
||||
continue
|
||||
fi
|
||||
dir=$(dirname ${file})
|
||||
if [ $(basename ${dir}) == 'details' ]; then
|
||||
dir=$(dirname ${dir})
|
||||
fi
|
||||
if [ $(basename ${dir}) == 'impl' ]; then
|
||||
dir="$(dirname ${dir})/${detail}"
|
||||
fi
|
||||
mkdir -p ${newroot}/${dir}
|
||||
git mv ${oldroot}/${file} ${newroot}/${dir}
|
||||
done
|
||||
}
|
||||
|
||||
echo move libxrpl headers
|
||||
files=$(extract_list 'LIBXRPL HEADERS')
|
||||
files+=(
|
||||
basics/SlabAllocator.h
|
||||
|
||||
beast/asio/io_latency_probe.h
|
||||
beast/container/aged_container.h
|
||||
beast/container/aged_container_utility.h
|
||||
beast/container/aged_map.h
|
||||
beast/container/aged_multimap.h
|
||||
beast/container/aged_multiset.h
|
||||
beast/container/aged_set.h
|
||||
beast/container/aged_unordered_map.h
|
||||
beast/container/aged_unordered_multimap.h
|
||||
beast/container/aged_unordered_multiset.h
|
||||
beast/container/aged_unordered_set.h
|
||||
beast/container/detail/aged_associative_container.h
|
||||
beast/container/detail/aged_container_iterator.h
|
||||
beast/container/detail/aged_ordered_container.h
|
||||
beast/container/detail/aged_unordered_container.h
|
||||
beast/container/detail/empty_base_optimization.h
|
||||
beast/core/LockFreeStack.h
|
||||
beast/insight/Collector.h
|
||||
beast/insight/Counter.h
|
||||
beast/insight/CounterImpl.h
|
||||
beast/insight/Event.h
|
||||
beast/insight/EventImpl.h
|
||||
beast/insight/Gauge.h
|
||||
beast/insight/GaugeImpl.h
|
||||
beast/insight/Group.h
|
||||
beast/insight/Groups.h
|
||||
beast/insight/Hook.h
|
||||
beast/insight/HookImpl.h
|
||||
beast/insight/Insight.h
|
||||
beast/insight/Meter.h
|
||||
beast/insight/MeterImpl.h
|
||||
beast/insight/NullCollector.h
|
||||
beast/insight/StatsDCollector.h
|
||||
beast/test/fail_counter.h
|
||||
beast/test/fail_stream.h
|
||||
beast/test/pipe_stream.h
|
||||
beast/test/sig_wait.h
|
||||
beast/test/string_iostream.h
|
||||
beast/test/string_istream.h
|
||||
beast/test/string_ostream.h
|
||||
beast/test/test_allocator.h
|
||||
beast/test/yield_to.h
|
||||
beast/utility/hash_pair.h
|
||||
beast/utility/maybe_const.h
|
||||
beast/utility/temp_dir.h
|
||||
|
||||
# included by only json/impl/json_assert.h
|
||||
json/json_errors.h
|
||||
|
||||
protocol/PayChan.h
|
||||
protocol/RippleLedgerHash.h
|
||||
protocol/messages.h
|
||||
protocol/st.h
|
||||
)
|
||||
files+=(
|
||||
basics/README.md
|
||||
crypto/README.md
|
||||
json/README.md
|
||||
protocol/README.md
|
||||
resource/README.md
|
||||
)
|
||||
move_files src/ripple include/xrpl detail ${files[@]}
|
||||
|
||||
echo move libxrpl sources
|
||||
files=$(extract_list 'LIBXRPL SOURCES')
|
||||
move_files src/ripple src/libxrpl "" ${files[@]}
|
||||
|
||||
echo check leftovers
|
||||
dirs=$(cd include/xrpl; ls -d */)
|
||||
dirs=$(cd src/ripple; ls -d ${dirs} 2>/dev/null || true)
|
||||
files="$(cd src/ripple; find ${dirs} -type f)"
|
||||
if [ -n "${files}" ]; then
|
||||
echo "leftover files:"
|
||||
echo ${files}
|
||||
exit
|
||||
fi
|
||||
|
||||
echo remove empty directories
|
||||
empty_dirs="$(cd src/ripple; find ${dirs} -depth -type d)"
|
||||
for dir in ${empty_dirs[@]}; do
|
||||
if [ -e ${dir} ]; then
|
||||
rmdir ${dir}
|
||||
fi
|
||||
done
|
||||
|
||||
echo move xrpld sources
|
||||
files=$(
|
||||
extract_list 'XRPLD SOURCES'
|
||||
cd src/ripple
|
||||
find * -regex '.*\.\(h\|ipp\|md\|pu\|uml\|png\)'
|
||||
)
|
||||
move_files src/ripple src/xrpld detail ${files[@]}
|
||||
|
||||
files="$(cd src/ripple; find . -type f)"
|
||||
if [ -n "${files}" ]; then
|
||||
echo "leftover files:"
|
||||
echo ${files}
|
||||
exit
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
rm -rf src/ripple
|
||||
|
||||
echo rename .hpp to .h
|
||||
find include src -name '*.hpp' -exec bash -c 'f="{}"; git mv "${f}" "${f%hpp}h"' \;
|
||||
|
||||
echo move PerfLog.h
|
||||
if [ -e include/xrpl/basics/PerfLog.h ]; then
|
||||
git mv include/xrpl/basics/PerfLog.h src/xrpld/perflog
|
||||
fi
|
||||
|
||||
# Make sure all protobuf includes have the correct prefix.
|
||||
protobuf_replace='s:^#include\s*["<].*org/xrpl\([^">]\+\)[">]:#include <xrpl/proto/org/xrpl\1>:'
|
||||
# Make sure first-party includes use angle brackets and .h extension.
|
||||
ripple_replace='s:include\s*["<]ripple/\(.*\)\.h\(pp\)\?[">]:include <ripple/\1.h>:'
|
||||
beast_replace='s:include\s*<beast/:include <xrpl/beast/:'
|
||||
# Rename impl directories to detail.
|
||||
impl_rename='s:\(<xrpl.*\)/impl\(/details\)\?/:\1/detail/:'
|
||||
|
||||
echo rewrite includes in libxrpl
|
||||
find include/xrpl src/libxrpl -type f -exec sed -i \
|
||||
-e "${protobuf_replace}" \
|
||||
-e "${ripple_replace}" \
|
||||
-e "${beast_replace}" \
|
||||
-e 's:^#include <ripple/:#include <xrpl/:' \
|
||||
-e "${impl_rename}" \
|
||||
{} +
|
||||
|
||||
echo rewrite includes in xrpld
|
||||
# # https://www.baeldung.com/linux/join-multiple-lines
|
||||
libxrpl_dirs="$(cd include/xrpl; ls -d1 */ | sed 's:/$::')"
|
||||
# libxrpl_dirs='a\nb\nc\n'
|
||||
readarray -t libxrpl_dirs <<< "${libxrpl_dirs}"
|
||||
# libxrpl_dirs=(a b c)
|
||||
libxrpl_dirs=$(printf -v txt '%s\\|' "${libxrpl_dirs[@]}"; echo "${txt%\\|}")
|
||||
# libxrpl_dirs='a\|b\|c'
|
||||
find src/xrpld src/test -type f -exec sed -i \
|
||||
-e "${protobuf_replace}" \
|
||||
-e "${ripple_replace}" \
|
||||
-e "${beast_replace}" \
|
||||
-e "s:^#include <ripple/basics/PerfLog.h>:#include <xrpld/perflog/PerfLog.h>:" \
|
||||
-e "s:^#include <ripple/\(${libxrpl_dirs}\)/:#include <xrpl/\1/:" \
|
||||
-e 's:^#include <ripple/:#include <xrpld/:' \
|
||||
-e "${impl_rename}" \
|
||||
{} +
|
||||
|
||||
git commit -m 'Rearrange sources' --author 'Pretty Printer <cpp@ripple.com>'
|
||||
find include src -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format-10 -i {} +
|
||||
git add --update .
|
||||
git commit -m 'Rewrite includes' --author 'Pretty Printer <cpp@ripple.com>'
|
||||
./Builds/levelization/levelization.sh
|
||||
git add --update .
|
||||
git commit -m 'Recompute loops' --author 'Pretty Printer <cpp@ripple.com>'
|
||||
@@ -1,32 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Assumptions:
|
||||
# 1) BOOST_ROOT and BOOST_URL are already defined,
|
||||
# and contain valid values.
|
||||
# 2) The last namepart of BOOST_ROOT matches the
|
||||
# folder name internal to boost's .tar.gz
|
||||
# When testing you can force a boost build by clearing travis caches:
|
||||
# https://travis-ci.org/ripple/rippled/caches
|
||||
set -e
|
||||
|
||||
if [ -x /usr/bin/time ] ; then
|
||||
: ${TIME:="Duration: %E"}
|
||||
export TIME
|
||||
time=/usr/bin/time
|
||||
else
|
||||
time=
|
||||
fi
|
||||
|
||||
if [ ! -d "$BOOST_ROOT/lib" ]
|
||||
then
|
||||
wget $BOOST_URL -O /tmp/boost.tar.gz
|
||||
cd `dirname $BOOST_ROOT`
|
||||
rm -fr ${BOOST_ROOT}
|
||||
tar xzf /tmp/boost.tar.gz
|
||||
cd $BOOST_ROOT && \
|
||||
$time ./bootstrap.sh --prefix=$BOOST_ROOT && \
|
||||
$time ./b2 -d1 define=_GLIBCXX_USE_CXX11_ABI=0 -j$((2*${NUM_PROCESSORS:-2})) &&\
|
||||
$time ./b2 -d0 define=_GLIBCXX_USE_CXX11_ABI=0 install
|
||||
else
|
||||
echo "Using cached boost at $BOOST_ROOT"
|
||||
fi
|
||||
|
||||
51
bin/sh/install-vcpkg.sh
Executable file
51
bin/sh/install-vcpkg.sh
Executable file
@@ -0,0 +1,51 @@
|
||||
#!/usr/bin/env bash
|
||||
set -exu
|
||||
|
||||
: ${TRAVIS_BUILD_DIR:=""}
|
||||
: ${VCPKG_DIR:=".vcpkg"}
|
||||
export VCPKG_ROOT=${VCPKG_DIR}
|
||||
: ${VCPKG_DEFAULT_TRIPLET:="x64-windows-static"}
|
||||
|
||||
export VCPKG_DEFAULT_TRIPLET
|
||||
|
||||
EXE="vcpkg"
|
||||
if [[ -z ${COMSPEC:-} ]]; then
|
||||
EXE="${EXE}.exe"
|
||||
fi
|
||||
|
||||
if [[ -d "${VCPKG_DIR}" && -x "${VCPKG_DIR}/${EXE}" && -d "${VCPKG_DIR}/installed" ]] ; then
|
||||
echo "Using cached vcpkg at ${VCPKG_DIR}"
|
||||
${VCPKG_DIR}/${EXE} list
|
||||
else
|
||||
if [[ -d "${VCPKG_DIR}" ]] ; then
|
||||
rm -rf "${VCPKG_DIR}"
|
||||
fi
|
||||
git clone --branch 2021.04.30 https://github.com/Microsoft/vcpkg.git ${VCPKG_DIR}
|
||||
pushd ${VCPKG_DIR}
|
||||
BSARGS=()
|
||||
if [[ "$(uname)" == "Darwin" ]] ; then
|
||||
BSARGS+=(--allowAppleClang)
|
||||
fi
|
||||
if [[ -z ${COMSPEC:-} ]]; then
|
||||
chmod +x ./bootstrap-vcpkg.sh
|
||||
time ./bootstrap-vcpkg.sh "${BSARGS[@]}"
|
||||
else
|
||||
time ./bootstrap-vcpkg.bat
|
||||
fi
|
||||
popd
|
||||
fi
|
||||
|
||||
# TODO: bring boost in this way as well ?
|
||||
# NOTE: can pin specific ports to a commit/version like this:
|
||||
# git checkout <SOME COMMIT HASH> ports/boost
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "No extra packages specified..."
|
||||
PKGS=()
|
||||
else
|
||||
PKGS=( "$@" )
|
||||
fi
|
||||
for LIB in "${PKGS[@]}"; do
|
||||
time ${VCPKG_DIR}/${EXE} --clean-after-build install ${LIB}
|
||||
done
|
||||
|
||||
|
||||
40
bin/sh/setup-msvc.sh
Executable file
40
bin/sh/setup-msvc.sh
Executable file
@@ -0,0 +1,40 @@
|
||||
|
||||
# NOTE: must be sourced from a shell so it can export vars
|
||||
|
||||
cat << BATCH > ./getenv.bat
|
||||
CALL %*
|
||||
ENV
|
||||
BATCH
|
||||
|
||||
while read line ; do
|
||||
IFS='"' read x path arg <<<"${line}"
|
||||
if [ -f "${path}" ] ; then
|
||||
echo "FOUND: $path"
|
||||
export VCINSTALLDIR=$(./getenv.bat "${path}" ${arg} | grep "^VCINSTALLDIR=" | sed -E "s/^VCINSTALLDIR=//g")
|
||||
if [ "${VCINSTALLDIR}" != "" ] ; then
|
||||
echo "USING ${VCINSTALLDIR}"
|
||||
export LIB=$(./getenv.bat "${path}" ${arg} | grep "^LIB=" | sed -E "s/^LIB=//g")
|
||||
export LIBPATH=$(./getenv.bat "${path}" ${arg} | grep "^LIBPATH=" | sed -E "s/^LIBPATH=//g")
|
||||
export INCLUDE=$(./getenv.bat "${path}" ${arg} | grep "^INCLUDE=" | sed -E "s/^INCLUDE=//g")
|
||||
ADDPATH=$(./getenv.bat "${path}" ${arg} | grep "^PATH=" | sed -E "s/^PATH=//g")
|
||||
export PATH="${ADDPATH}:${PATH}"
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done <<EOL
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio/2019/BuildTools/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio/2019/Community/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio/2017/BuildTools/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio 15.0/VC/vcvarsall.bat" amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/vcvarsall.bat" amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio 13.0/VC/vcvarsall.bat" amd64
|
||||
"C:/Program Files (x86)/Microsoft Visual Studio 12.0/VC/vcvarsall.bat" amd64
|
||||
EOL
|
||||
# TODO: update the list above as needed to support newer versions of msvc tools
|
||||
|
||||
rm -f getenv.bat
|
||||
|
||||
if [ "${VCINSTALLDIR}" = "" ] ; then
|
||||
echo "No compatible visual studio found!"
|
||||
fi
|
||||
246
bin/start_sync_stop.py
Normal file
246
bin/start_sync_stop.py
Normal file
@@ -0,0 +1,246 @@
|
||||
#!/usr/bin/env python
|
||||
"""A script to test rippled in an infinite loop of start-sync-stop.
|
||||
|
||||
- Requires Python 3.7+.
|
||||
- Can be stopped with SIGINT.
|
||||
- Has no dependencies outside the standard library.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
assert sys.version_info.major == 3 and sys.version_info.minor >= 7
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import configparser
|
||||
import contextlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import platform
|
||||
import subprocess
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
|
||||
# Enable asynchronous subprocesses on Windows. The default changed in 3.8.
|
||||
# https://docs.python.org/3.7/library/asyncio-platforms.html#subprocess-support-on-windows
|
||||
if (platform.system() == 'Windows' and sys.version_info.major == 3
|
||||
and sys.version_info.minor < 8):
|
||||
asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
|
||||
|
||||
DEFAULT_EXE = 'rippled'
|
||||
DEFAULT_CONFIGURATION_FILE = 'rippled.cfg'
|
||||
# Number of seconds to wait before forcefully terminating.
|
||||
PATIENCE = 120
|
||||
# Number of contiguous seconds in a sync state to be considered synced.
|
||||
DEFAULT_SYNC_DURATION = 60
|
||||
# Number of seconds between polls of state.
|
||||
DEFAULT_POLL_INTERVAL = 5
|
||||
SYNC_STATES = ('full', 'validating', 'proposing')
|
||||
|
||||
|
||||
def read_config(config_file):
|
||||
# strict = False: Allow duplicate keys, e.g. [rpc_startup].
|
||||
# allow_no_value = True: Allow keys with no values. Generally, these
|
||||
# instances use the "key" as the value, and the section name is the key,
|
||||
# e.g. [debug_logfile].
|
||||
# delimiters = ('='): Allow ':' as a character in Windows paths. Some of
|
||||
# our "keys" are actually values, and we don't want to split them on ':'.
|
||||
config = configparser.ConfigParser(
|
||||
strict=False,
|
||||
allow_no_value=True,
|
||||
delimiters=('='),
|
||||
)
|
||||
config.read(config_file)
|
||||
return config
|
||||
|
||||
|
||||
def to_list(value, separator=','):
|
||||
"""Parse a list from a delimited string value."""
|
||||
return [s.strip() for s in value.split(separator) if s]
|
||||
|
||||
|
||||
def find_log_file(config_file):
|
||||
"""Try to figure out what log file the user has chosen. Raises all kinds
|
||||
of exceptions if there is any possibility of ambiguity."""
|
||||
config = read_config(config_file)
|
||||
values = list(config['debug_logfile'].keys())
|
||||
if len(values) < 1:
|
||||
raise ValueError(
|
||||
f'no [debug_logfile] in configuration file: {config_file}')
|
||||
if len(values) > 1:
|
||||
raise ValueError(
|
||||
f'too many [debug_logfile] in configuration file: {config_file}')
|
||||
return values[0]
|
||||
|
||||
|
||||
def find_http_port(config_file):
|
||||
config = read_config(config_file)
|
||||
names = list(config['server'].keys())
|
||||
for name in names:
|
||||
server = config[name]
|
||||
if 'http' in to_list(server.get('protocol', '')):
|
||||
return int(server['port'])
|
||||
raise ValueError(f'no server in [server] for "http" protocol')
|
||||
|
||||
|
||||
@contextlib.asynccontextmanager
|
||||
async def rippled(exe=DEFAULT_EXE, config_file=DEFAULT_CONFIGURATION_FILE):
|
||||
"""A context manager for a rippled process."""
|
||||
# Start the server.
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
str(exe),
|
||||
'--conf',
|
||||
str(config_file),
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
logging.info(f'rippled started with pid {process.pid}')
|
||||
try:
|
||||
yield process
|
||||
finally:
|
||||
# Ask it to stop.
|
||||
logging.info(f'asking rippled (pid: {process.pid}) to stop')
|
||||
start = time.time()
|
||||
process.terminate()
|
||||
|
||||
# Wait nicely.
|
||||
try:
|
||||
await asyncio.wait_for(process.wait(), PATIENCE)
|
||||
except asyncio.TimeoutError:
|
||||
# Ask the operating system to kill it.
|
||||
logging.warning(f'killing rippled ({process.pid})')
|
||||
try:
|
||||
process.kill()
|
||||
except ProcessLookupError:
|
||||
pass
|
||||
|
||||
code = await process.wait()
|
||||
end = time.time()
|
||||
logging.info(
|
||||
f'rippled stopped after {end - start:.1f} seconds with code {code}'
|
||||
)
|
||||
|
||||
|
||||
async def sync(
|
||||
port,
|
||||
*,
|
||||
duration=DEFAULT_SYNC_DURATION,
|
||||
interval=DEFAULT_POLL_INTERVAL,
|
||||
):
|
||||
"""Poll rippled on an interval until it has been synced for a duration."""
|
||||
start = time.perf_counter()
|
||||
while (time.perf_counter() - start) < duration:
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
request = urllib.request.Request(
|
||||
f'http://127.0.0.1:{port}',
|
||||
data=json.dumps({
|
||||
'method': 'server_state'
|
||||
}).encode(),
|
||||
headers={'Content-Type': 'application/json'},
|
||||
)
|
||||
with urllib.request.urlopen(request) as response:
|
||||
try:
|
||||
body = json.loads(response.read())
|
||||
except urllib.error.HTTPError as cause:
|
||||
logging.warning(f'server_state returned not JSON: {cause}')
|
||||
start = time.perf_counter()
|
||||
continue
|
||||
|
||||
try:
|
||||
state = body['result']['state']['server_state']
|
||||
except KeyError as cause:
|
||||
logging.warning(f'server_state response missing key: {cause.key}')
|
||||
start = time.perf_counter()
|
||||
continue
|
||||
logging.info(f'server_state: {state}')
|
||||
if state not in SYNC_STATES:
|
||||
# Require a contiguous sync state.
|
||||
start = time.perf_counter()
|
||||
|
||||
|
||||
async def loop(test,
|
||||
*,
|
||||
exe=DEFAULT_EXE,
|
||||
config_file=DEFAULT_CONFIGURATION_FILE):
|
||||
"""
|
||||
Start-test-stop rippled in an infinite loop.
|
||||
|
||||
Moves log to a different file after each iteration.
|
||||
"""
|
||||
log_file = find_log_file(config_file)
|
||||
id = 0
|
||||
while True:
|
||||
logging.info(f'iteration: {id}')
|
||||
async with rippled(exe, config_file) as process:
|
||||
start = time.perf_counter()
|
||||
exited = asyncio.create_task(process.wait())
|
||||
tested = asyncio.create_task(test())
|
||||
# Try to sync as long as the process is running.
|
||||
done, pending = await asyncio.wait(
|
||||
{exited, tested},
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
if done == {exited}:
|
||||
code = exited.result()
|
||||
logging.warning(
|
||||
f'server halted for unknown reason with code {code}')
|
||||
else:
|
||||
assert done == {tested}
|
||||
assert tested.exception() is None
|
||||
end = time.perf_counter()
|
||||
logging.info(f'synced after {end - start:.0f} seconds')
|
||||
os.replace(log_file, f'debug.{id}.log')
|
||||
id += 1
|
||||
|
||||
|
||||
logging.basicConfig(
|
||||
format='%(asctime)s %(levelname)-8s %(message)s',
|
||||
level=logging.INFO,
|
||||
datefmt='%Y-%m-%d %H:%M:%S',
|
||||
)
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
parser.add_argument(
|
||||
'rippled',
|
||||
type=Path,
|
||||
nargs='?',
|
||||
default=DEFAULT_EXE,
|
||||
help='Path to rippled.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--conf',
|
||||
type=Path,
|
||||
default=DEFAULT_CONFIGURATION_FILE,
|
||||
help='Path to configuration file.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--duration',
|
||||
type=int,
|
||||
default=DEFAULT_SYNC_DURATION,
|
||||
help='Number of contiguous seconds required in a synchronized state.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--interval',
|
||||
type=int,
|
||||
default=DEFAULT_POLL_INTERVAL,
|
||||
help='Number of seconds to wait between polls of state.',
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
port = find_http_port(args.conf)
|
||||
|
||||
|
||||
def test():
|
||||
return sync(port, duration=args.duration, interval=args.interval)
|
||||
|
||||
|
||||
try:
|
||||
asyncio.run(loop(test, exe=args.rippled, config_file=args.conf))
|
||||
except KeyboardInterrupt:
|
||||
# Squelch the message. This is a normal mode of exit.
|
||||
pass
|
||||
@@ -1,6 +1,5 @@
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
# Rippled Server Instance Configuration Example
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
@@ -14,13 +13,17 @@
|
||||
#
|
||||
# 4. HTTPS Client
|
||||
#
|
||||
# 5. Database
|
||||
# 5. <vacated>
|
||||
#
|
||||
# 6. Diagnostics
|
||||
# 6. Database
|
||||
#
|
||||
# 7. Voting
|
||||
# 7. Diagnostics
|
||||
#
|
||||
# 8. Example Settings
|
||||
# 8. Voting
|
||||
#
|
||||
# 9. Misc Settings
|
||||
#
|
||||
# 10. Example Settings
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
@@ -32,14 +35,13 @@
|
||||
#
|
||||
# rippled.cfg
|
||||
#
|
||||
# For more information on where the rippled server instance searches for
|
||||
# the file please visit the Ripple wiki. Specifically, the section explaining
|
||||
# the --conf command line option:
|
||||
# For more information on where the rippled server instance searches for the
|
||||
# file, visit:
|
||||
#
|
||||
# https://ripple.com/wiki/Rippled#--conf.3Dpath
|
||||
# https://xrpl.org/commandline-usage.html#generic-options
|
||||
#
|
||||
# This file should be named rippled.cfg. This file is UTF-8 with Dos, UNIX,
|
||||
# or Mac style end of lines. Blank lines and lines beginning with '#' are
|
||||
# This file should be named rippled.cfg. This file is UTF-8 with DOS, UNIX,
|
||||
# or Mac style end of lines. Blank lines and lines beginning with '#' are
|
||||
# ignored. Undefined sections are reserved. No escapes are currently defined.
|
||||
#
|
||||
# Notation
|
||||
@@ -47,8 +49,8 @@
|
||||
# In this document a simple BNF notation is used. Angle brackets denote
|
||||
# required elements, square brackets denote optional elements, and single
|
||||
# quotes indicate string literals. A vertical bar separating 1 or more
|
||||
# elements is a logical "or"; Any one of the elements may be chosen.
|
||||
# Parenthesis are notational only, and used to group elements, they are not
|
||||
# elements is a logical "or"; any one of the elements may be chosen.
|
||||
# Parentheses are notational only, and used to group elements; they are not
|
||||
# part of the syntax unless they appear in quotes. White space may always
|
||||
# appear between elements, it has no effect on values.
|
||||
#
|
||||
@@ -124,13 +126,13 @@
|
||||
# port = 80
|
||||
#
|
||||
# [port_public]
|
||||
# ip=0.0.0.0
|
||||
# ip = 0.0.0.0
|
||||
# port = 443
|
||||
# protocol=peer,https
|
||||
# protocol = peer,https
|
||||
#
|
||||
# [port_private]
|
||||
# ip=127.0.0.1
|
||||
# protocol=http
|
||||
# ip = 127.0.0.1
|
||||
# protocol = http
|
||||
#
|
||||
# When rippled is used as a command line client (for example, issuing a
|
||||
# server stop command), the first port advertising the http or https
|
||||
@@ -146,7 +148,11 @@
|
||||
# ip = <IP-address>
|
||||
#
|
||||
# Required. Determines the IP address of the network interface to bind
|
||||
# to. To bind to all available interfaces, uses 0.0.0.0
|
||||
# to. To bind to all available IPv4 interfaces, use 0.0.0.0
|
||||
# To binding to all IPv4 and IPv6 interfaces, use ::
|
||||
#
|
||||
# NOTE if the ip value is ::, then any incoming IPv4 connections will
|
||||
# be made as mapped IPv4 addresses.
|
||||
#
|
||||
# port = <number>
|
||||
#
|
||||
@@ -194,15 +200,31 @@
|
||||
#
|
||||
# admin = [ IP, IP, IP, ... ]
|
||||
#
|
||||
# A comma-separated list of IP addresses.
|
||||
# A comma-separated list of IP addresses or subnets. Subnets
|
||||
# should be represented in "slash" notation, such as:
|
||||
# 10.0.0.0/8
|
||||
# 172.16.0.0/12
|
||||
# 192.168.0.0/16
|
||||
# Those examples are ipv4, but ipv6 is also supported.
|
||||
# When configuring subnets, the address must match the
|
||||
# underlying network address. Otherwise, the desired IP range is
|
||||
# ambiguous. For example, 10.1.2.3/24 has a network address of
|
||||
# 10.1.2.0. Therefore, that subnet should be configured as
|
||||
# 10.1.2.0/24.
|
||||
#
|
||||
# When set, grants administrative command access to the specified IP
|
||||
# When set, grants administrative command access to the specified
|
||||
# addresses. These commands may be issued over http, https, ws, or wss
|
||||
# if configured on the port. If unspecified, the default is to not allow
|
||||
# if configured on the port. If not provided, the default is to not allow
|
||||
# administrative commands.
|
||||
#
|
||||
# NOTE A common configuration value for the admin field is "localhost".
|
||||
# If you are listening on all IPv4/IPv6 addresses by specifing
|
||||
# ip = :: then you can use admin = ::ffff:127.0.0.1,::1 to allow
|
||||
# administrative access from both IPv4 and IPv6 localhost
|
||||
# connections.
|
||||
#
|
||||
# *SECURITY WARNING*
|
||||
# 0.0.0.0 may be specified to allow access from any IP address. It must
|
||||
# 0.0.0.0 or :: may be used to allow access from any IP address. It must
|
||||
# be the only address specified and cannot be combined with other IPs.
|
||||
# Use of this address can compromise server security, please consider its
|
||||
# use carefully.
|
||||
@@ -221,9 +243,10 @@
|
||||
#
|
||||
# secure_gateway = [ IP, IP, IP, ... ]
|
||||
#
|
||||
# A comma-separated list of IP addresses.
|
||||
# A comma-separated list of IP addresses or subnets. See the
|
||||
# details for the "admin" option above.
|
||||
#
|
||||
# When set, allows the specified IP addresses to pass HTTP headers
|
||||
# When set, allows the specified addresses to pass HTTP headers
|
||||
# containing username and remote IP address for each session. If a
|
||||
# non-empty username is passed in this way, then resource controls
|
||||
# such as often resulting in "tooBusy" errors will be lifted. However,
|
||||
@@ -238,9 +261,9 @@
|
||||
# proxies. Since rippled trusts these hosts, they must be
|
||||
# responsible for properly authenticating the remote user.
|
||||
#
|
||||
# The same IP address cannot be used in both "admin" and "secure_gateway"
|
||||
# lists for the same port. In this case, rippled will abort with an error
|
||||
# message to the console shortly after startup
|
||||
# If some IP addresses are included for both "admin" and
|
||||
# "secure_gateway" connections, then they will be treated as
|
||||
# "admin" addresses.
|
||||
#
|
||||
# ssl_key = <filename>
|
||||
# ssl_cert = <filename>
|
||||
@@ -260,12 +283,14 @@
|
||||
# ssl_cert
|
||||
#
|
||||
# Specifies the path to the SSL certificate file in PEM format.
|
||||
# This is not needed if the chain includes it.
|
||||
# This is not needed if the chain includes it. Use ssl_chain if
|
||||
# your certificate includes one or more intermediates.
|
||||
#
|
||||
# ssl_chain
|
||||
#
|
||||
# If you need a certificate chain, specify the path to the
|
||||
# certificate chain here. The chain may include the end certificate.
|
||||
# This must be used if the certificate includes intermediates.
|
||||
#
|
||||
# ssl_ciphers = <cipherlist>
|
||||
#
|
||||
@@ -280,7 +305,7 @@
|
||||
# keep rippled from connecting to other instances of rippled or
|
||||
# prevent RPC and WebSocket clients from connecting.
|
||||
#
|
||||
# send_queue_limit = = [1..65535]
|
||||
# send_queue_limit = [1..65535]
|
||||
#
|
||||
# A Websocket will disconnect when its send queue exceeds this limit.
|
||||
# The default is 100. A larger value may help with erratic disconnects but
|
||||
@@ -341,6 +366,14 @@
|
||||
# connection is no longer available.
|
||||
#
|
||||
#
|
||||
# [server_domain]
|
||||
#
|
||||
# domain name
|
||||
#
|
||||
# The domain under which a TOML file applicable to this server can be
|
||||
# found. A server may lie about its domain so the TOML should contain
|
||||
# a reference to this server by pubkey in the [nodes] array.
|
||||
#
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
@@ -356,28 +389,46 @@
|
||||
#
|
||||
#
|
||||
#
|
||||
# [compression]
|
||||
#
|
||||
# true or false
|
||||
#
|
||||
# true - enables compression
|
||||
# false - disables compression [default].
|
||||
#
|
||||
# The rippled server can save bandwidth by compressing its peer-to-peer communications,
|
||||
# at a cost of greater CPU usage. If you enable link compression,
|
||||
# the server automatically compresses communications with peer servers
|
||||
# that also have link compression enabled.
|
||||
# https://xrpl.org/enable-link-compression.html
|
||||
#
|
||||
#
|
||||
#
|
||||
# [ips]
|
||||
#
|
||||
# List of hostnames or ips where the Ripple protocol is served. For a starter
|
||||
# list, you can either copy entries from: https://ripple.com/ripple.txt or if
|
||||
# you prefer you can specify r.ripple.com 51235
|
||||
# List of hostnames or ips where the Ripple protocol is served. A default
|
||||
# starter list is included in the code and used if no other hostnames are
|
||||
# available.
|
||||
#
|
||||
# One IPv4 address or domain names per line is allowed. A port may must be
|
||||
# specified after adding a space to the address. By convention, if known,
|
||||
# IPs are listed in from most to least trusted.
|
||||
# One address or domain name per line is allowed. A port may be specified
|
||||
# after adding a space to the address. If a port is not specified, the default
|
||||
# port of 2459 will be used. Many servers still use the legacy port of 51235.
|
||||
# To connect to such servers, you must specify the port number. The ordering
|
||||
# of entries does not generally matter.
|
||||
#
|
||||
# The default list of entries is:
|
||||
# - r.ripple.com 51235
|
||||
# - sahyadri.isrdc.in 51235
|
||||
# - hubs.xrpkuwait.com 51235
|
||||
# - hub.xrpl-commons.org 51235
|
||||
#
|
||||
# Examples:
|
||||
# 192.168.0.1
|
||||
# 192.168.0.1 3939
|
||||
# r.ripple.com 51235
|
||||
#
|
||||
# This will give you a good, up-to-date list of addresses:
|
||||
#
|
||||
# [ips]
|
||||
# 192.168.0.1
|
||||
# 192.168.0.1 2459
|
||||
# r.ripple.com 51235
|
||||
#
|
||||
# The default is: [ips_fixed] addresses (if present) or r.ripple.com 51235
|
||||
#
|
||||
#
|
||||
# [ips_fixed]
|
||||
#
|
||||
@@ -387,8 +438,8 @@
|
||||
# Ripple network through a public-facing server, or for building a set
|
||||
# of cluster peers.
|
||||
#
|
||||
# One IPv4 address or domain names per line is allowed. A port must be
|
||||
# specified after adding a space to the address.
|
||||
# One address or domain names per line is allowed. A port must be specified
|
||||
# after adding a space to the address.
|
||||
#
|
||||
#
|
||||
#
|
||||
@@ -430,17 +481,11 @@
|
||||
#
|
||||
#
|
||||
#
|
||||
# [sntp_servers]
|
||||
# [max_transactions]
|
||||
#
|
||||
# IP address or domain of NTP servers to use for time synchronization.
|
||||
#
|
||||
# These NTP servers are suitable for rippled servers located in the United
|
||||
# States:
|
||||
# time.windows.com
|
||||
# time.apple.com
|
||||
# time.nist.gov
|
||||
# pool.ntp.org
|
||||
# Configure the maximum number of transactions to have in the job queue
|
||||
#
|
||||
# Must be a number between 100 and 1000, defaults to 250
|
||||
#
|
||||
#
|
||||
# [overlay]
|
||||
@@ -464,6 +509,23 @@
|
||||
# single host from consuming all inbound slots. If the value is not
|
||||
# present the server will autoconfigure an appropriate limit.
|
||||
#
|
||||
# max_unknown_time = <number>
|
||||
#
|
||||
# The maximum amount of time, in seconds, that an outbound connection
|
||||
# is allowed to stay in the "unknown" tracking state. This option can
|
||||
# take any value between 300 and 1800 seconds, inclusive. If the option
|
||||
# is not present the server will autoconfigure an appropriate limit.
|
||||
#
|
||||
# The current default (which is subject to change) is 600 seconds.
|
||||
#
|
||||
# max_diverged_time = <number>
|
||||
#
|
||||
# The maximum amount of time, in seconds, that an outbound connection
|
||||
# is allowed to stay in the "diverged" tracking state. The option can
|
||||
# take any value between 60 and 900 seconds, inclusive. If the option
|
||||
# is not present the server will autoconfigure an appropriate limit.
|
||||
#
|
||||
# The current default (which is subject to change) is 300 seconds.
|
||||
#
|
||||
#
|
||||
# [transaction_queue] EXPERIMENTAL
|
||||
@@ -496,14 +558,6 @@
|
||||
# than the original transaction's fee, or meet the current open
|
||||
# ledger fee to be considered. Default: 25.
|
||||
#
|
||||
# multi_txn_percent = <number>
|
||||
#
|
||||
# If a client submits multiple transactions (different sequence
|
||||
# numbers), later transactions must pay a fee at least <number>
|
||||
# percent higher than the transaction with the previous sequence
|
||||
# number.
|
||||
# Default: -90.
|
||||
#
|
||||
# minimum_escalation_multiplier = <number>
|
||||
#
|
||||
# At ledger close time, the median fee level of the transactions
|
||||
@@ -538,6 +592,20 @@
|
||||
# into the ledger at the minimum required fee before the required
|
||||
# fee escalates. Default: no maximum.
|
||||
#
|
||||
# normal_consensus_increase_percent = <number>
|
||||
#
|
||||
# (Optional) When the ledger has more transactions than "expected",
|
||||
# and performance is humming along nicely, the expected ledger size
|
||||
# is updated to the previous ledger size plus this percentage.
|
||||
# Default: 20
|
||||
#
|
||||
# slow_consensus_decrease_percent = <number>
|
||||
#
|
||||
# (Optional) When consensus takes longer than appropriate, the
|
||||
# expected ledger size is updated to the minimum of the previous
|
||||
# ledger size or the "expected" ledger size minus this percentage.
|
||||
# Default: 50
|
||||
#
|
||||
# maximum_txn_per_account = <number>
|
||||
#
|
||||
# Maximum number of transactions that one account can have in the
|
||||
@@ -559,22 +627,43 @@
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
# 3. Ripple Protocol
|
||||
# 3. Protocol
|
||||
#
|
||||
#-------------------
|
||||
#
|
||||
# These settings affect the behavior of the server instance with respect
|
||||
# to Ripple payment protocol level activities such as validating and
|
||||
# closing ledgers or adjusting fees in response to server overloads.
|
||||
# to protocol level activities such as validating and closing ledgers
|
||||
# adjusting fees in response to server overloads.
|
||||
#
|
||||
#
|
||||
#
|
||||
# [node_size]
|
||||
#
|
||||
# Tunes the servers based on the expected load and available memory. Legal
|
||||
# sizes are "tiny", "small", "medium", "large", and "huge". We recommend
|
||||
# you start at the default and raise the setting if you have extra memory.
|
||||
# The default is "tiny".
|
||||
# [relay_proposals]
|
||||
#
|
||||
# Controls the relay and processing behavior for proposals received by this
|
||||
# server that are issued by validators that are not on the server's UNL.
|
||||
#
|
||||
# Legal values are:
|
||||
# "all" - Relay and process all incoming proposals
|
||||
# "trusted" - Relay only trusted proposals, but locally process all
|
||||
# "drop_untrusted" - Relay only trusted proposals, do not process untrusted
|
||||
#
|
||||
# The default is "trusted".
|
||||
#
|
||||
#
|
||||
# [relay_validations]
|
||||
#
|
||||
# Controls the relay and processing behavior for validations received by this
|
||||
# server that are issued by validators that are not on the server's UNL.
|
||||
#
|
||||
# Legal values are:
|
||||
# "all" - Relay and process all incoming validations
|
||||
# "trusted" - Relay only trusted validations, but locally process all
|
||||
# "drop_untrusted" - Relay only trusted validations, do not process untrusted
|
||||
#
|
||||
# The default is "all".
|
||||
#
|
||||
#
|
||||
#
|
||||
#
|
||||
#
|
||||
@@ -676,7 +765,9 @@
|
||||
# When searching for paths, the default search aggressiveness. This can take
|
||||
# exponentially more resources as the size is increased.
|
||||
#
|
||||
# The default is: 7
|
||||
# The recommended value to support advanced pathfinding is: 7
|
||||
#
|
||||
# The default is: 2
|
||||
#
|
||||
# [path_search_fast]
|
||||
# [path_search_max]
|
||||
@@ -685,12 +776,19 @@
|
||||
# If you do not need pathfinding, you can set path_search_max to zero to
|
||||
# disable it and avoid some expensive bookkeeping.
|
||||
#
|
||||
# The default for 'path_search_fast' is 2. The default for 'path_search_max' is 10.
|
||||
# To support advanced pathfinding the recommended value for
|
||||
# 'path_search_fast' is 2, and for 'path_search_max' is 10.
|
||||
#
|
||||
# The default for 'path_search_fast' is 2. The default for 'path_search_max' is 3.
|
||||
#
|
||||
# [path_search_old]
|
||||
#
|
||||
# For clients that use the legacy path finding interfaces, the search
|
||||
# aggressiveness to use. The default is 7.
|
||||
# aggressiveness to use.
|
||||
#
|
||||
# The recommended value to support advanced pathfinding is: 7.
|
||||
#
|
||||
# The default is: 2
|
||||
#
|
||||
#
|
||||
#
|
||||
@@ -704,10 +802,48 @@
|
||||
# [workers]
|
||||
#
|
||||
# Configures the number of threads for processing work submitted by peers
|
||||
# and clients. If not specified, then the value is automatically determined
|
||||
# by factors including the number of system processors and whether this
|
||||
# node is a validator.
|
||||
# and clients. If not specified, then the value is automatically set to the
|
||||
# number of processor threads plus 2 for networked nodes. Nodes running in
|
||||
# stand alone mode default to 1 worker.
|
||||
#
|
||||
# [io_workers]
|
||||
#
|
||||
# Configures the number of threads for processing raw inbound and outbound IO.
|
||||
#
|
||||
# [prefetch_workers]
|
||||
#
|
||||
# Configures the number of threads for performing nodestore prefetching.
|
||||
#
|
||||
#
|
||||
#
|
||||
# [network_id]
|
||||
#
|
||||
# Specify the network which this server is configured to connect to and
|
||||
# track. If set, the server will not establish connections with servers
|
||||
# that are explicitly configured to track another network.
|
||||
#
|
||||
# Network identifiers are usually unsigned integers in the range 0 to
|
||||
# 4294967295 inclusive. The server also maps the following well-known
|
||||
# names to the corresponding numerical identifier:
|
||||
#
|
||||
# main -> 0
|
||||
# testnet -> 1
|
||||
# devnet -> 2
|
||||
#
|
||||
# If this value is not specified the server is not explicitly configured
|
||||
# to track a particular network.
|
||||
#
|
||||
#
|
||||
# [ledger_replay]
|
||||
#
|
||||
# 0 or 1.
|
||||
#
|
||||
# 0: Disable the ledger replay feature [default]
|
||||
# 1: Enable the ledger replay feature. With this feature enabled, when
|
||||
# acquiring a ledger from the network, a rippled node only downloads
|
||||
# the ledger header and the transactions instead of the whole ledger.
|
||||
# And the ledger is built by applying the transactions to the parent
|
||||
# ledger.
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
@@ -749,11 +885,9 @@
|
||||
# certificates that the server will accept for verifying HTTP servers.
|
||||
# Used only for outbound HTTPS client connections.
|
||||
#
|
||||
#
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
# 5. Database
|
||||
# 6. Database
|
||||
#
|
||||
#------------
|
||||
#
|
||||
@@ -800,33 +934,86 @@
|
||||
# RocksDB is an alternative backend for systems that don't use solid-state
|
||||
# drives. Because RocksDB's performance degrades as it stores more data,
|
||||
# keeping full history is not advised, and using online delete is
|
||||
# recommended. RocksDB is not available on Windows.
|
||||
# recommended.
|
||||
#
|
||||
# The RocksDB backend also provides these optional parameters:
|
||||
# Required keys for NuDB and RocksDB:
|
||||
#
|
||||
# compression 0 for none, 1 for Snappy compression
|
||||
# path Location to store the database
|
||||
#
|
||||
# Optional keys
|
||||
#
|
||||
# cache_size Size of cache for database records. Default is 16384.
|
||||
# Setting this value to 0 will use the default value.
|
||||
#
|
||||
# Required keys:
|
||||
# path Location to store the database (all types)
|
||||
# cache_age Length of time in minutes to keep database records
|
||||
# cached. Default is 5 minutes. Setting this value to
|
||||
# 0 will use the default value.
|
||||
#
|
||||
# Optional keys:
|
||||
# Note: if neither cache_size nor cache_age is
|
||||
# specified, the cache for database records will not
|
||||
# be created. If only one of cache_size or cache_age
|
||||
# is specified, the cache will be created using the
|
||||
# default value for the unspecified parameter.
|
||||
#
|
||||
# These keys are possible for any type of backend:
|
||||
# Note: the cache will not be created if online_delete
|
||||
# is specified.
|
||||
#
|
||||
# fast_load Boolean. If set, load the last persisted ledger
|
||||
# from disk upon process start before syncing to
|
||||
# the network. This is likely to improve performance
|
||||
# if sufficient IOPS capacity is available.
|
||||
# Default 0.
|
||||
#
|
||||
# Optional keys for NuDB or RocksDB:
|
||||
#
|
||||
# earliest_seq The default is 32570 to match the XRP ledger
|
||||
# network's earliest allowed sequence. Alternate
|
||||
# networks may set this value. Minimum value of 1.
|
||||
#
|
||||
# online_delete Minimum value of 256. Enable automatic purging
|
||||
# of older ledger information. Maintain at least this
|
||||
# number of ledger records online. Must be greater
|
||||
# than or equal to ledger_history.
|
||||
#
|
||||
# advisory_delete 0 for disabled, 1 for enabled. If set, then
|
||||
# require administrative RPC call "can_delete"
|
||||
# to enable online deletion of ledger records.
|
||||
# These keys modify the behavior of online_delete, and thus are only
|
||||
# relevant if online_delete is defined and non-zero:
|
||||
#
|
||||
# earliest_seq The default is 32570 to match the XRP ledger
|
||||
# network's earliest allowed sequence. Alternate
|
||||
# networks may set this value. Minimum value of 1.
|
||||
# advisory_delete 0 for disabled, 1 for enabled. If set, the
|
||||
# administrative RPC call "can_delete" is required
|
||||
# to enable online deletion of ledger records.
|
||||
# Online deletion does not run automatically if
|
||||
# non-zero and the last deletion was on a ledger
|
||||
# greater than the current "can_delete" setting.
|
||||
# Default is 0.
|
||||
#
|
||||
# delete_batch When automatically purging, SQLite database
|
||||
# records are deleted in batches. This value
|
||||
# controls the maximum size of each batch. Larger
|
||||
# batches keep the databases locked for more time,
|
||||
# which may cause other functions to fall behind,
|
||||
# and thus cause the node to lose sync.
|
||||
# Default is 100.
|
||||
#
|
||||
# back_off_milliseconds
|
||||
# Number of milliseconds to wait between
|
||||
# online_delete batches to allow other functions
|
||||
# to catch up.
|
||||
# Default is 100.
|
||||
#
|
||||
# age_threshold_seconds
|
||||
# The online delete process will only run if the
|
||||
# latest validated ledger is younger than this
|
||||
# number of seconds.
|
||||
# Default is 60.
|
||||
#
|
||||
# recovery_wait_seconds
|
||||
# The online delete process checks periodically
|
||||
# that rippled is still in sync with the network,
|
||||
# and that the validated ledger is less than
|
||||
# 'age_threshold_seconds' old. If not, then continue
|
||||
# sleeping for this number of seconds and
|
||||
# checking until healthy.
|
||||
# Default is 5.
|
||||
#
|
||||
# Notes:
|
||||
# The 'node_db' entry configures the primary, persistent storage.
|
||||
@@ -838,46 +1025,108 @@
|
||||
# [import_db] Settings for performing a one-time import (optional)
|
||||
# [database_path] Path to the book-keeping databases.
|
||||
#
|
||||
# [shard_db] Settings for the Shard Database (optional)
|
||||
# The server creates and maintains 4 to 5 bookkeeping SQLite databases in
|
||||
# the 'database_path' location. If you omit this configuration setting,
|
||||
# the server creates a directory called "db" located in the same place as
|
||||
# your rippled.cfg file.
|
||||
# Partial pathnames are relative to the location of the rippled executable.
|
||||
#
|
||||
# [sqlite] Tuning settings for the SQLite databases (optional)
|
||||
#
|
||||
# Format (without spaces):
|
||||
# One or more lines of case-insensitive key / value pairs:
|
||||
# <key> '=' <value>
|
||||
# ...
|
||||
#
|
||||
# Example:
|
||||
# type=nudb
|
||||
# path=db/shards/nudb
|
||||
# Example 1:
|
||||
# safety_level=low
|
||||
#
|
||||
# The "type" field must be present and controls the choice of backend:
|
||||
# Example 2:
|
||||
# journal_mode=off
|
||||
# synchronous=off
|
||||
#
|
||||
# type = NuDB
|
||||
# NuDB is recommended for shards.
|
||||
# WARNING: These settings can have significant effects on data integrity,
|
||||
# particularly in systemic failure scenarios. It is strongly recommended
|
||||
# that they be left at their defaults unless the server is having
|
||||
# performance issues during normal operation or during automatic purging
|
||||
# (online_delete) operations. A warning will be logged on startup if
|
||||
# 'ledger_history' is configured to store more than 10,000,000 ledgers and
|
||||
# any of these settings are less safe than the default. This is due to the
|
||||
# inordinate amount of time and bandwidth it will take to safely rebuild a
|
||||
# corrupted database of that size from other peers.
|
||||
#
|
||||
# type = RocksDB
|
||||
# Optional keys:
|
||||
#
|
||||
# The RocksDB backend also provides these optional parameters:
|
||||
# safety_level Valid values: high, low
|
||||
# The default is "high", which tunes the SQLite
|
||||
# databases in the most reliable mode, and is
|
||||
# equivalent to:
|
||||
# journal_mode=wal
|
||||
# synchronous=normal
|
||||
# temp_store=file
|
||||
# "low" is equivalent to:
|
||||
# journal_mode=memory
|
||||
# synchronous=off
|
||||
# temp_store=memory
|
||||
# These "low" settings trade speed and reduced I/O
|
||||
# for a higher risk of data loss. See the
|
||||
# individual settings below for more information.
|
||||
# This setting may not be combined with any of the
|
||||
# other tuning settings: "journal_mode",
|
||||
# "synchronous", or "temp_store".
|
||||
#
|
||||
# compression 0 for none, 1 for Snappy compression
|
||||
# journal_mode Valid values: delete, truncate, persist, memory, wal, off
|
||||
# The default is "wal", which uses a write-ahead
|
||||
# log to implement database transactions.
|
||||
# Alternately, "memory" saves disk I/O, but if
|
||||
# rippled crashes during a transaction, the
|
||||
# database is likely to be corrupted.
|
||||
# See https://www.sqlite.org/pragma.html#pragma_journal_mode
|
||||
# for more details about the available options.
|
||||
# This setting may not be combined with the
|
||||
# "safety_level" setting.
|
||||
#
|
||||
# Required keys:
|
||||
# path Location to store the database (all types)
|
||||
# synchronous Valid values: off, normal, full, extra
|
||||
# The default is "normal", which works well with
|
||||
# the "wal" journal mode. Alternatively, "off"
|
||||
# allows rippled to continue as soon as data is
|
||||
# passed to the OS, which can significantly
|
||||
# increase speed, but risks data corruption if
|
||||
# the host computer crashes before writing that
|
||||
# data to disk.
|
||||
# See https://www.sqlite.org/pragma.html#pragma_synchronous
|
||||
# for more details about the available options.
|
||||
# This setting may not be combined with the
|
||||
# "safety_level" setting.
|
||||
#
|
||||
# max_size_gb Maximum disk space the database will utilize (in gigabytes)
|
||||
#
|
||||
#
|
||||
# There are 4 bookkeeping SQLite database that the server creates and
|
||||
# maintains. If you omit this configuration setting, it will default to
|
||||
# creating a directory called "db" located in the same place as your
|
||||
# rippled.cfg file. Partial pathnames will be considered relative to
|
||||
# the location of the rippled executable.
|
||||
# temp_store Valid values: default, file, memory
|
||||
# The default is "file", which will use files
|
||||
# for temporary database tables and indices.
|
||||
# Alternatively, "memory" may save I/O, but
|
||||
# rippled does not currently use many, if any,
|
||||
# of these temporary objects.
|
||||
# See https://www.sqlite.org/pragma.html#pragma_temp_store
|
||||
# for more details about the available options.
|
||||
# This setting may not be combined with the
|
||||
# "safety_level" setting.
|
||||
#
|
||||
# page_size Valid values: integer (MUST be power of 2 between 512 and 65536)
|
||||
# The default is 4096 bytes. This setting determines
|
||||
# the size of a page in the transaction.db file.
|
||||
# See https://www.sqlite.org/pragma.html#pragma_page_size
|
||||
# for more details about the available options.
|
||||
#
|
||||
# journal_size_limit Valid values: integer
|
||||
# The default is 1582080. This setting limits
|
||||
# the size of the journal for transaction.db file. When the limit is
|
||||
# reached, older entries will be deleted.
|
||||
# See https://www.sqlite.org/pragma.html#pragma_journal_size_limit
|
||||
# for more details about the available options.
|
||||
#
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
# 6. Diagnostics
|
||||
# 7. Diagnostics
|
||||
#
|
||||
#---------------
|
||||
#
|
||||
@@ -951,7 +1200,7 @@
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
# 7. Voting
|
||||
# 8. Voting
|
||||
#
|
||||
#----------
|
||||
#
|
||||
@@ -986,7 +1235,7 @@
|
||||
# default. Don't change this without understanding the consequences.
|
||||
#
|
||||
# Example:
|
||||
# account_reserve = 20000000 # 20 XRP
|
||||
# account_reserve = 10000000 # 10 XRP
|
||||
#
|
||||
# owner_reserve = <drops>
|
||||
#
|
||||
@@ -998,13 +1247,33 @@
|
||||
# default. Don't change this without understanding the consequences.
|
||||
#
|
||||
# Example:
|
||||
# owner_reserve = 5000000 # 5 XRP
|
||||
# owner_reserve = 2000000 # 2 XRP
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
# 8. Misc Settings
|
||||
# 9. Misc Settings
|
||||
#
|
||||
#----------
|
||||
#-----------------
|
||||
#
|
||||
# [node_size]
|
||||
#
|
||||
# Tunes the servers based on the expected load and available memory. Legal
|
||||
# sizes are "tiny", "small", "medium", "large", and "huge". We recommend
|
||||
# you start at the default and raise the setting if you have extra memory.
|
||||
#
|
||||
# The code attempts to automatically determine the appropriate size for
|
||||
# this parameter based on the amount of RAM and the number of execution
|
||||
# cores available to the server. The current decision matrix is:
|
||||
#
|
||||
# | | Cores |
|
||||
# |---------|------------------------|
|
||||
# | RAM | 1 | 2 or 3 | ≥ 4 |
|
||||
# |---------|------|--------|--------|
|
||||
# | < ~8GB | tiny | tiny | tiny |
|
||||
# | < ~12GB | tiny | small | small |
|
||||
# | < ~16GB | tiny | small | medium |
|
||||
# | < ~24GB | tiny | small | large |
|
||||
# | < ~32GB | tiny | small | huge |
|
||||
#
|
||||
# [signing_support]
|
||||
#
|
||||
@@ -1025,9 +1294,72 @@
|
||||
# [signing_support]
|
||||
# true
|
||||
#
|
||||
# [crawl]
|
||||
#
|
||||
# List of options to control what data is reported through the /crawl endpoint
|
||||
# See https://xrpl.org/peer-crawler.html
|
||||
#
|
||||
# <flag>
|
||||
#
|
||||
# Enable or disable access to /crawl requests. Default is '1' which
|
||||
# enables access.
|
||||
#
|
||||
# overlay = <flag>
|
||||
#
|
||||
# Report information about peers this server is connected to, similar
|
||||
# to the "peers" RPC API. Default is '1' which means to report peer
|
||||
# overlay info.
|
||||
#
|
||||
# server = <flag>
|
||||
#
|
||||
# Report information about the local server, similar to the "server_state"
|
||||
# RPC API. Default is '1' which means to report local server info.
|
||||
#
|
||||
# counts = <flag>
|
||||
#
|
||||
# Report information about the local server health counters, similar to
|
||||
# the "get_counts" RPC API. Default is '0' which means not to report
|
||||
# server counts.
|
||||
#
|
||||
# unl = <flag>
|
||||
#
|
||||
# Report information about the local server's validator lists, similar to
|
||||
# the "validators" and "validator_list_sites" RPC APIs. Default is '1'
|
||||
# which means to report server validator lists.
|
||||
#
|
||||
# Examples:
|
||||
#
|
||||
# [crawl]
|
||||
# 0
|
||||
#
|
||||
# [crawl]
|
||||
# overlay = 1
|
||||
# server = 1
|
||||
# counts = 0
|
||||
# unl = 1
|
||||
#
|
||||
# [vl]
|
||||
#
|
||||
# Options to control what data is reported through the /vl endpoint
|
||||
# See [...]
|
||||
#
|
||||
# enable = <flag>
|
||||
#
|
||||
# Enable or disable access to /vl requests. Default is '1' which
|
||||
# enables access.
|
||||
#
|
||||
# [beta_rpc_api]
|
||||
#
|
||||
# 0 or 1.
|
||||
#
|
||||
# 0: Disable the beta API version for JSON-RPC and WebSocket [default]
|
||||
# 1: Enable the beta API version for testing. The beta API version
|
||||
# contains breaking changes that require a new API version number.
|
||||
# They are not ready for public consumption.
|
||||
#
|
||||
#-------------------------------------------------------------------------------
|
||||
#
|
||||
# 9. Example Settings
|
||||
# 10. Example Settings
|
||||
#
|
||||
#--------------------
|
||||
#
|
||||
@@ -1055,6 +1387,12 @@
|
||||
# Admin level API commands over Secure Websockets, when originating
|
||||
# from the same machine (via the loopback adapter at 127.0.0.1).
|
||||
#
|
||||
# "grpc"
|
||||
#
|
||||
# ETL commands for Clio. We recommend setting secure_gateway
|
||||
# in this section to a comma-separated list of the addresses
|
||||
# of your Clio servers, in order to bypass rippled's rate limiting.
|
||||
#
|
||||
# This port is commented out but can be enabled by removing
|
||||
# the '#' from each corresponding line including the entry under [server]
|
||||
#
|
||||
@@ -1088,8 +1426,13 @@ admin = 127.0.0.1
|
||||
protocol = http
|
||||
|
||||
[port_peer]
|
||||
# Many servers still use the legacy port of 51235, so for backward-compatibility
|
||||
# we maintain that port number here. However, for new servers we recommend
|
||||
# changing this to the default port of 2459.
|
||||
port = 51235
|
||||
ip = 0.0.0.0
|
||||
# alternatively, to accept connections on IPv4 + IPv6, use:
|
||||
#ip = ::
|
||||
protocol = peer
|
||||
|
||||
[port_ws_admin_local]
|
||||
@@ -1097,62 +1440,52 @@ port = 6006
|
||||
ip = 127.0.0.1
|
||||
admin = 127.0.0.1
|
||||
protocol = ws
|
||||
send_queue_limit = 500
|
||||
|
||||
[port_grpc]
|
||||
port = 50051
|
||||
ip = 127.0.0.1
|
||||
secure_gateway = 127.0.0.1
|
||||
|
||||
#[port_ws_public]
|
||||
#port = 5005
|
||||
#port = 6005
|
||||
#ip = 127.0.0.1
|
||||
#protocol = wss
|
||||
#send_queue_limit = 500
|
||||
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
[node_size]
|
||||
medium
|
||||
|
||||
# This is primary persistent datastore for rippled. This includes transaction
|
||||
# metadata, account states, and ledger headers. Helpful information can be
|
||||
# found here: https://ripple.com/wiki/NodeBackEnd
|
||||
# delete old ledgers while maintaining at least 2000. Do not require an
|
||||
# external administrative command to initiate deletion.
|
||||
# found at https://xrpl.org/capacity-planning.html#node-db-type
|
||||
# type=NuDB is recommended for non-validators with fast SSDs. Validators or
|
||||
# slow / spinning disks should use RocksDB. Caution: Spinning disks are
|
||||
# not recommended. They do not perform well enough to consistently remain
|
||||
# synced to the network.
|
||||
# online_delete=512 is recommended to delete old ledgers while maintaining at
|
||||
# least 512.
|
||||
# advisory_delete=0 allows the online delete process to run automatically
|
||||
# when the node has approximately two times the "online_delete" value of
|
||||
# ledgers. No external administrative command is required to initiate
|
||||
# deletion.
|
||||
[node_db]
|
||||
type=RocksDB
|
||||
path=/var/lib/rippled/db/rocksdb
|
||||
open_files=2000
|
||||
filter_bits=12
|
||||
cache_mb=256
|
||||
file_size_mb=8
|
||||
file_size_mult=2
|
||||
online_delete=2000
|
||||
type=NuDB
|
||||
path=/var/lib/rippled/db/nudb
|
||||
online_delete=512
|
||||
advisory_delete=0
|
||||
|
||||
# This is the persistent datastore for shards. It is important for the health
|
||||
# of the ripple network that rippled operators shard as much as practical.
|
||||
# NuDB requires SSD storage. Helpful information can be found here
|
||||
# https://ripple.com/build/history-sharding
|
||||
#[shard_db]
|
||||
#type=NuDB
|
||||
#path=/var/lib/rippled/db/shards/nudb
|
||||
#max_size_gb=500
|
||||
|
||||
[database_path]
|
||||
/var/lib/rippled/db
|
||||
|
||||
|
||||
# This needs to be an absolute directory reference, not a relative one.
|
||||
# Modify this value as required.
|
||||
[debug_logfile]
|
||||
/var/log/rippled/debug.log
|
||||
|
||||
[sntp_servers]
|
||||
time.windows.com
|
||||
time.apple.com
|
||||
time.nist.gov
|
||||
pool.ntp.org
|
||||
|
||||
# Where to find some other servers speaking the Ripple protocol.
|
||||
[ips]
|
||||
r.ripple.com 51235
|
||||
|
||||
# To use the XRP test network (see https://ripple.com/build/xrp-test-net/),
|
||||
# use the following [ips] section instead:
|
||||
# To use the XRP test network
|
||||
# (see https://xrpl.org/connect-your-rippled-to-the-xrp-test-net.html),
|
||||
# use the following [ips] section:
|
||||
# [ips]
|
||||
# r.altnet.rippletest.net 51235
|
||||
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
[Unit]
|
||||
Description=Ripple Peer-to-Peer Network Daemon
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=nobody
|
||||
ExecStart=/usr/bin/rippled --conf=/etc/rippled/rippled.cfg
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
114
cfg/rippled.init
114
cfg/rippled.init
@@ -1,114 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
### BEGIN INIT INFO
|
||||
# Provides: ripple
|
||||
# Required-Start: $local_fs $remote_fs $network $syslog
|
||||
# Required-Stop: $local_fs $remote_fs $network $syslog
|
||||
# Default-Start: 2 3 4 5
|
||||
# Default-Stop: 0 1 6
|
||||
# Short-Description: starts the ripple network node
|
||||
# Description: starts rippled using start-stop-daemon
|
||||
### END INIT INFO
|
||||
|
||||
set -e
|
||||
|
||||
NAME=rippled
|
||||
USER="rippled"
|
||||
GROUP="rippled"
|
||||
PIDFILE=/var/run/$NAME.pid
|
||||
DAEMON=/usr/local/sbin/rippled
|
||||
DAEMON_OPTS="--conf /etc/ripple/rippled.cfg"
|
||||
NET_OPTS="--net $DAEMON_OPTS"
|
||||
LOGDIR="/var/log/rippled"
|
||||
DBDIR="/var/db/rippled/db/hyperldb"
|
||||
|
||||
export PATH="${PATH:+$PATH:}/usr/sbin:/sbin"
|
||||
|
||||
# I wish it didn't come down to this, but this is the easiest way to ensure
|
||||
# sanity of an install.
|
||||
if [ ! -d $LOGDIR ]; then
|
||||
mkdir -p $LOGDIR
|
||||
chown $USER:$GROUP $LOGDIR
|
||||
fi
|
||||
if [ ! -d $DBDIR ]; then
|
||||
mkdir -p $DBDIR
|
||||
chown -R $USER:$GROUP $DBDIR
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
start)
|
||||
echo -n "Starting daemon: "$NAME
|
||||
start-stop-daemon --start --quiet --background -m --pidfile $PIDFILE \
|
||||
--exec $DAEMON --chuid $USER --group $GROUP --verbose -- $NET_OPTS
|
||||
echo "."
|
||||
;;
|
||||
|
||||
stop)
|
||||
echo -n "Stopping daemon: "$NAME
|
||||
$DAEMON $DAEMON_OPTS stop
|
||||
rm -f $PIDFILE
|
||||
echo "."
|
||||
;;
|
||||
|
||||
restart)
|
||||
echo -n "Restarting daemon: "$NAME
|
||||
$DAEMON $DAEMON_OPTS stop
|
||||
rm -f $PIDFILE
|
||||
start-stop-daemon --start --quiet --background -m --pidfile $PIDFILE \
|
||||
--exec $DAEMON --chuid $USER --group $GROUP -- $NET_OPTS
|
||||
echo "."
|
||||
;;
|
||||
|
||||
status)
|
||||
echo "Status of $NAME:"
|
||||
echo -n "PID of $NAME: "
|
||||
if [ -f "$PIDFILE" ]; then
|
||||
cat $PIDFILE
|
||||
$DAEMON $DAEMON_OPTS server_info
|
||||
else
|
||||
echo "$NAME not running."
|
||||
fi
|
||||
echo "."
|
||||
;;
|
||||
|
||||
fetch)
|
||||
echo "$NAME ledger fetching info:"
|
||||
$DAEMON $DAEMON_OPTS fetch_info
|
||||
echo "."
|
||||
;;
|
||||
|
||||
uptime)
|
||||
echo "$NAME uptime:"
|
||||
$DAEMON $DAEMON_OPTS get_counts
|
||||
echo "."
|
||||
;;
|
||||
|
||||
startconfig)
|
||||
echo "$NAME is being started with the following command line:"
|
||||
echo "$DAEMON $NET_OPTS"
|
||||
echo "."
|
||||
;;
|
||||
|
||||
command)
|
||||
# Truncate the script's argument vector by one position to get rid of
|
||||
# this entry.
|
||||
shift
|
||||
|
||||
# Pass the remainder of the argument vector to rippled.
|
||||
$DAEMON $DAEMON_OPTS "$@"
|
||||
echo "."
|
||||
;;
|
||||
|
||||
test)
|
||||
$DAEMON $DAEMON_OPTS ping
|
||||
echo "."
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Usage: $0 {start|stop|restart|status|fetch|uptime|startconfig|"
|
||||
echo " command|test}"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
exit 0
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
#
|
||||
# Default validators.txt
|
||||
#
|
||||
# A list of domains to bootstrap a nodes UNLs or for clients to indirectly
|
||||
# locate IPs to contact the Ripple network.
|
||||
# This file is located in the same folder as your rippled.cfg file
|
||||
# and defines which validators your server trusts not to collude.
|
||||
#
|
||||
# This file is UTF-8 with Dos, UNIX, or Mac style end of lines.
|
||||
# This file is UTF-8 with DOS, UNIX, or Mac style line endings.
|
||||
# Blank lines and lines starting with a '#' are ignored.
|
||||
# All other lines should be hankos or domain names.
|
||||
#
|
||||
#
|
||||
#
|
||||
@@ -14,10 +13,8 @@
|
||||
#
|
||||
# List of the validation public keys of nodes to always accept as validators.
|
||||
#
|
||||
# The latest list of recommended validators can be obtained from
|
||||
# https://ripple.com/ripple.txt
|
||||
#
|
||||
# See also https://wiki.ripple.com/Ripple.txt
|
||||
# Manually listing validator keys is not recommended for production networks.
|
||||
# See validator_list_sites and validator_list_keys below.
|
||||
#
|
||||
# Examples:
|
||||
# n9KorY8QtTdRx7TVDpwnG9NvyxsDwHUKUEeDLY3AkiGncVaSXZi5
|
||||
@@ -29,7 +26,9 @@
|
||||
#
|
||||
# Examples:
|
||||
# https://vl.ripple.com
|
||||
# https://unl.xrplf.org
|
||||
# http://127.0.0.1:8000
|
||||
# file:///etc/opt/ripple/vl.txt
|
||||
#
|
||||
# [validator_list_keys]
|
||||
#
|
||||
@@ -40,9 +39,8 @@
|
||||
# Validator list keys should be hex-encoded.
|
||||
#
|
||||
# Examples:
|
||||
# ed499d732bded01504a7407c224412ef550cc1ade638a4de4eb88af7c36cb8b282
|
||||
# 0202d3f36a801349f3be534e3f64cfa77dede6e1b6310a0b48f40f20f955cec945
|
||||
# 02dd8b7075f64d77d9d2bdb88da364f29fcd975f9ea6f21894abcc7564efda8054
|
||||
# ED2677ABFFD1B33AC6FBC3062B71F1E8397C1505E1C42C64D11AD1B28FF73F4734
|
||||
# ED307A760EE34F2D0CAA103377B1969117C38B8AA0AA1E2A24DAC1F32FC97087ED
|
||||
#
|
||||
|
||||
# The default validator list publishers that the rippled instance
|
||||
@@ -56,11 +54,15 @@
|
||||
|
||||
[validator_list_sites]
|
||||
https://vl.ripple.com
|
||||
https://unl.xrplf.org
|
||||
|
||||
[validator_list_keys]
|
||||
#vl.ripple.com
|
||||
ED2677ABFFD1B33AC6FBC3062B71F1E8397C1505E1C42C64D11AD1B28FF73F4734
|
||||
#unl.xrplf.org
|
||||
ED42AEC58B701EEBB77356FFFEC26F83C1F0407263530F068C7C73D392C7E06FD1
|
||||
|
||||
# To use the XRP test network (see https://ripple.com/build/xrp-test-net/),
|
||||
# To use the test network (see https://xrpl.org/connect-your-rippled-to-the-xrp-test-net.html),
|
||||
# use the following configuration instead:
|
||||
#
|
||||
# [validator_list_sites]
|
||||
@@ -68,3 +70,21 @@ ED2677ABFFD1B33AC6FBC3062B71F1E8397C1505E1C42C64D11AD1B28FF73F4734
|
||||
#
|
||||
# [validator_list_keys]
|
||||
# ED264807102805220DA0F312E71FC2C69E1552C9C5790F6C25E3729DEB573D5860
|
||||
|
||||
|
||||
# [validator_list_threshold]
|
||||
#
|
||||
# Minimum number of validator lists on which a validator must be listed in
|
||||
# order to be used.
|
||||
#
|
||||
# This can be set explicitly to any positive integer number not greater than
|
||||
# the size of [validator_list_keys]. If it is not set, or set to 0, the
|
||||
# value will be calculated at startup from the size of [validator_list_keys],
|
||||
# where the calculation is:
|
||||
#
|
||||
# threshold = size(validator_list_keys) < 3
|
||||
# ? 1
|
||||
# : floor(size(validator_list_keys) / 2) + 1
|
||||
|
||||
[validator_list_threshold]
|
||||
0
|
||||
|
||||
48
cmake/CMakeFuncs.cmake
Normal file
48
cmake/CMakeFuncs.cmake
Normal file
@@ -0,0 +1,48 @@
|
||||
macro(group_sources_in source_dir curdir)
|
||||
file(GLOB children RELATIVE ${source_dir}/${curdir}
|
||||
${source_dir}/${curdir}/*)
|
||||
foreach (child ${children})
|
||||
if (IS_DIRECTORY ${source_dir}/${curdir}/${child})
|
||||
group_sources_in(${source_dir} ${curdir}/${child})
|
||||
else()
|
||||
string(REPLACE "/" "\\" groupname ${curdir})
|
||||
source_group(${groupname} FILES
|
||||
${source_dir}/${curdir}/${child})
|
||||
endif()
|
||||
endforeach()
|
||||
endmacro()
|
||||
|
||||
macro(group_sources curdir)
|
||||
group_sources_in(${PROJECT_SOURCE_DIR} ${curdir})
|
||||
endmacro()
|
||||
|
||||
macro (exclude_from_default target_)
|
||||
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_ALL ON)
|
||||
set_target_properties (${target_} PROPERTIES EXCLUDE_FROM_DEFAULT_BUILD ON)
|
||||
endmacro ()
|
||||
|
||||
macro (exclude_if_included target_)
|
||||
get_directory_property(has_parent PARENT_DIRECTORY)
|
||||
if (has_parent)
|
||||
exclude_from_default (${target_})
|
||||
endif ()
|
||||
endmacro ()
|
||||
|
||||
find_package(Git)
|
||||
|
||||
function (git_branch branch_val)
|
||||
if (NOT GIT_FOUND)
|
||||
return ()
|
||||
endif ()
|
||||
set (_branch "")
|
||||
execute_process (COMMAND ${GIT_EXECUTABLE} "rev-parse" "--abbrev-ref" "HEAD"
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
RESULT_VARIABLE _git_exit_code
|
||||
OUTPUT_VARIABLE _temp_branch
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
ERROR_QUIET)
|
||||
if (_git_exit_code EQUAL 0)
|
||||
set (_branch ${_temp_branch})
|
||||
endif ()
|
||||
set (${branch_val} "${_branch}" PARENT_SCOPE)
|
||||
endfunction ()
|
||||
468
cmake/CodeCoverage.cmake
Normal file
468
cmake/CodeCoverage.cmake
Normal file
@@ -0,0 +1,468 @@
|
||||
# Copyright (c) 2012 - 2017, Lars Bilke
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice, this
|
||||
# list of conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# CHANGES:
|
||||
#
|
||||
# 2012-01-31, Lars Bilke
|
||||
# - Enable Code Coverage
|
||||
#
|
||||
# 2013-09-17, Joakim Söderberg
|
||||
# - Added support for Clang.
|
||||
# - Some additional usage instructions.
|
||||
#
|
||||
# 2016-02-03, Lars Bilke
|
||||
# - Refactored functions to use named parameters
|
||||
#
|
||||
# 2017-06-02, Lars Bilke
|
||||
# - Merged with modified version from github.com/ufz/ogs
|
||||
#
|
||||
# 2019-05-06, Anatolii Kurotych
|
||||
# - Remove unnecessary --coverage flag
|
||||
#
|
||||
# 2019-12-13, FeRD (Frank Dana)
|
||||
# - Deprecate COVERAGE_LCOVR_EXCLUDES and COVERAGE_GCOVR_EXCLUDES lists in favor
|
||||
# of tool-agnostic COVERAGE_EXCLUDES variable, or EXCLUDE setup arguments.
|
||||
# - CMake 3.4+: All excludes can be specified relative to BASE_DIRECTORY
|
||||
# - All setup functions: accept BASE_DIRECTORY, EXCLUDE list
|
||||
# - Set lcov basedir with -b argument
|
||||
# - Add automatic --demangle-cpp in lcovr, if 'c++filt' is available (can be
|
||||
# overridden with NO_DEMANGLE option in setup_target_for_coverage_lcovr().)
|
||||
# - Delete output dir, .info file on 'make clean'
|
||||
# - Remove Python detection, since version mismatches will break gcovr
|
||||
# - Minor cleanup (lowercase function names, update examples...)
|
||||
#
|
||||
# 2019-12-19, FeRD (Frank Dana)
|
||||
# - Rename Lcov outputs, make filtered file canonical, fix cleanup for targets
|
||||
#
|
||||
# 2020-01-19, Bob Apthorpe
|
||||
# - Added gfortran support
|
||||
#
|
||||
# 2020-02-17, FeRD (Frank Dana)
|
||||
# - Make all add_custom_target()s VERBATIM to auto-escape wildcard characters
|
||||
# in EXCLUDEs, and remove manual escaping from gcovr targets
|
||||
#
|
||||
# 2021-01-19, Robin Mueller
|
||||
# - Add CODE_COVERAGE_VERBOSE option which will allow to print out commands which are run
|
||||
# - Added the option for users to set the GCOVR_ADDITIONAL_ARGS variable to supply additional
|
||||
# flags to the gcovr command
|
||||
#
|
||||
# 2020-05-04, Mihchael Davis
|
||||
# - Add -fprofile-abs-path to make gcno files contain absolute paths
|
||||
# - Fix BASE_DIRECTORY not working when defined
|
||||
# - Change BYPRODUCT from folder to index.html to stop ninja from complaining about double defines
|
||||
#
|
||||
# 2021-05-10, Martin Stump
|
||||
# - Check if the generator is multi-config before warning about non-Debug builds
|
||||
#
|
||||
# 2022-02-22, Marko Wehle
|
||||
# - Change gcovr output from -o <filename> for --xml <filename> and --html <filename> output respectively.
|
||||
# This will allow for Multiple Output Formats at the same time by making use of GCOVR_ADDITIONAL_ARGS, e.g. GCOVR_ADDITIONAL_ARGS "--txt".
|
||||
#
|
||||
# 2022-09-28, Sebastian Mueller
|
||||
# - fix append_coverage_compiler_flags_to_target to correctly add flags
|
||||
# - replace "-fprofile-arcs -ftest-coverage" with "--coverage" (equivalent)
|
||||
#
|
||||
# 2024-01-04, Bronek Kozicki
|
||||
# - remove setup_target_for_coverage_lcov (slow) and setup_target_for_coverage_fastcov (no support for Clang)
|
||||
# - fix Clang support by adding find_program( ... llvm-cov )
|
||||
# - add Apple Clang support by adding execute_process( COMMAND xcrun -f llvm-cov ... )
|
||||
# - add CODE_COVERAGE_GCOV_TOOL to explicitly select gcov tool and disable find_program
|
||||
# - replace both functions setup_target_for_coverage_gcovr_* with a single setup_target_for_coverage_gcovr
|
||||
# - add support for all gcovr output formats
|
||||
#
|
||||
# 2024-04-03, Bronek Kozicki
|
||||
# - add support for output formats: jacoco, clover, lcov
|
||||
#
|
||||
# 2025-05-12, Jingchen Wu
|
||||
# - add -fprofile-update=atomic to ensure atomic profile generation
|
||||
#
|
||||
# USAGE:
|
||||
#
|
||||
# 1. Copy this file into your cmake modules path.
|
||||
#
|
||||
# 2. Add the following line to your CMakeLists.txt (best inside an if-condition
|
||||
# using a CMake option() to enable it just optionally):
|
||||
# include(CodeCoverage)
|
||||
#
|
||||
# 3. Append necessary compiler flags for all supported source files:
|
||||
# append_coverage_compiler_flags()
|
||||
# Or for specific target:
|
||||
# append_coverage_compiler_flags_to_target(YOUR_TARGET_NAME)
|
||||
#
|
||||
# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og
|
||||
#
|
||||
# 4. If you need to exclude additional directories from the report, specify them
|
||||
# using full paths in the COVERAGE_EXCLUDES variable before calling
|
||||
# setup_target_for_coverage_*().
|
||||
# Example:
|
||||
# set(COVERAGE_EXCLUDES
|
||||
# '${PROJECT_SOURCE_DIR}/src/dir1/*'
|
||||
# '/path/to/my/src/dir2/*')
|
||||
# Or, use the EXCLUDE argument to setup_target_for_coverage_*().
|
||||
# Example:
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME coverage
|
||||
# EXECUTABLE testrunner
|
||||
# EXCLUDE "${PROJECT_SOURCE_DIR}/src/dir1/*" "/path/to/my/src/dir2/*")
|
||||
#
|
||||
# 4.a NOTE: With CMake 3.4+, COVERAGE_EXCLUDES or EXCLUDE can also be set
|
||||
# relative to the BASE_DIRECTORY (default: PROJECT_SOURCE_DIR)
|
||||
# Example:
|
||||
# set(COVERAGE_EXCLUDES "dir1/*")
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME coverage
|
||||
# EXECUTABLE testrunner
|
||||
# FORMAT html-details
|
||||
# BASE_DIRECTORY "${PROJECT_SOURCE_DIR}/src"
|
||||
# EXCLUDE "dir2/*")
|
||||
#
|
||||
# 4.b If you need to pass specific options to gcovr, specify them in
|
||||
# GCOVR_ADDITIONAL_ARGS variable.
|
||||
# Example:
|
||||
# set (GCOVR_ADDITIONAL_ARGS --exclude-throw-branches --exclude-noncode-lines -s)
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME coverage
|
||||
# EXECUTABLE testrunner
|
||||
# EXCLUDE "src/dir1" "src/dir2")
|
||||
#
|
||||
# 5. Use the functions described below to create a custom make target which
|
||||
# runs your test executable and produces a code coverage report.
|
||||
#
|
||||
# 6. Build a Debug build:
|
||||
# cmake -DCMAKE_BUILD_TYPE=Debug ..
|
||||
# make
|
||||
# make my_coverage_target
|
||||
|
||||
include(CMakeParseArguments)
|
||||
|
||||
option(CODE_COVERAGE_VERBOSE "Verbose information" FALSE)
|
||||
|
||||
# Check prereqs
|
||||
find_program( GCOVR_PATH gcovr PATHS ${CMAKE_SOURCE_DIR}/scripts/test)
|
||||
|
||||
if(DEFINED CODE_COVERAGE_GCOV_TOOL)
|
||||
set(GCOV_TOOL "${CODE_COVERAGE_GCOV_TOOL}")
|
||||
elseif(DEFINED ENV{CODE_COVERAGE_GCOV_TOOL})
|
||||
set(GCOV_TOOL "$ENV{CODE_COVERAGE_GCOV_TOOL}")
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
|
||||
if(APPLE)
|
||||
execute_process( COMMAND xcrun -f llvm-cov
|
||||
OUTPUT_VARIABLE LLVMCOV_PATH
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
else()
|
||||
find_program( LLVMCOV_PATH llvm-cov )
|
||||
endif()
|
||||
if(LLVMCOV_PATH)
|
||||
set(GCOV_TOOL "${LLVMCOV_PATH} gcov")
|
||||
endif()
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
|
||||
find_program( GCOV_PATH gcov )
|
||||
set(GCOV_TOOL "${GCOV_PATH}")
|
||||
endif()
|
||||
|
||||
# Check supported compiler (Clang, GNU and Flang)
|
||||
get_property(LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES)
|
||||
foreach(LANG ${LANGUAGES})
|
||||
if("${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(Apple)?[Cc]lang")
|
||||
if("${CMAKE_${LANG}_COMPILER_VERSION}" VERSION_LESS 3)
|
||||
message(FATAL_ERROR "Clang version must be 3.0.0 or greater! Aborting...")
|
||||
endif()
|
||||
elseif(NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "GNU"
|
||||
AND NOT "${CMAKE_${LANG}_COMPILER_ID}" MATCHES "(LLVM)?[Ff]lang")
|
||||
message(FATAL_ERROR "Compiler is not GNU or Flang! Aborting...")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
set(COVERAGE_COMPILER_FLAGS "-g --coverage"
|
||||
CACHE INTERNAL "")
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
|
||||
include(CheckCXXCompilerFlag)
|
||||
include(CheckCCompilerFlag)
|
||||
|
||||
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
|
||||
if(HAVE_cxx_fprofile_abs_path)
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
|
||||
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
|
||||
if(HAVE_c_fprofile_abs_path)
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
||||
endif()
|
||||
|
||||
check_cxx_compiler_flag(-fprofile-update HAVE_cxx_fprofile_update)
|
||||
if(HAVE_cxx_fprofile_update)
|
||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||
endif()
|
||||
|
||||
check_c_compiler_flag(-fprofile-update HAVE_c_fprofile_update)
|
||||
if(HAVE_c_fprofile_update)
|
||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(CMAKE_Fortran_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the Fortran compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_CXX_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the C++ compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_C_FLAGS_COVERAGE
|
||||
${COVERAGE_COMPILER_FLAGS}
|
||||
CACHE STRING "Flags used by the C compiler during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
||||
""
|
||||
CACHE STRING "Flags used for linking binaries during coverage builds."
|
||||
FORCE )
|
||||
set(CMAKE_SHARED_LINKER_FLAGS_COVERAGE
|
||||
""
|
||||
CACHE STRING "Flags used by the shared libraries linker during coverage builds."
|
||||
FORCE )
|
||||
mark_as_advanced(
|
||||
CMAKE_Fortran_FLAGS_COVERAGE
|
||||
CMAKE_CXX_FLAGS_COVERAGE
|
||||
CMAKE_C_FLAGS_COVERAGE
|
||||
CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
||||
CMAKE_SHARED_LINKER_FLAGS_COVERAGE )
|
||||
|
||||
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
|
||||
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
|
||||
endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
|
||||
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
||||
link_libraries(gcov)
|
||||
endif()
|
||||
|
||||
# Defines a target for running and collection code coverage information
|
||||
# Builds dependencies, runs the given executable and outputs reports.
|
||||
# NOTE! The executable should always have a ZERO as exit code otherwise
|
||||
# the coverage generation will not complete.
|
||||
#
|
||||
# setup_target_for_coverage_gcovr(
|
||||
# NAME ctest_coverage # New target name
|
||||
# EXECUTABLE ctest -j ${PROCESSOR_COUNT} # Executable in PROJECT_BINARY_DIR
|
||||
# DEPENDENCIES executable_target # Dependencies to build first
|
||||
# BASE_DIRECTORY "../" # Base directory for report
|
||||
# # (defaults to PROJECT_SOURCE_DIR)
|
||||
# FORMAT "cobertura" # Output format, one of:
|
||||
# # xml cobertura sonarqube jacoco clover
|
||||
# # json-summary json-details coveralls csv
|
||||
# # txt html-single html-nested html-details
|
||||
# # lcov (xml is an alias to cobertura;
|
||||
# # if no format is set, defaults to xml)
|
||||
# EXCLUDE "src/dir1/*" "src/dir2/*" # Patterns to exclude (can be relative
|
||||
# # to BASE_DIRECTORY, with CMake 3.4+)
|
||||
# )
|
||||
# The user can set the variable GCOVR_ADDITIONAL_ARGS to supply additional flags to the
|
||||
# GCVOR command.
|
||||
function(setup_target_for_coverage_gcovr)
|
||||
set(options NONE)
|
||||
set(oneValueArgs BASE_DIRECTORY NAME FORMAT)
|
||||
set(multiValueArgs EXCLUDE EXECUTABLE EXECUTABLE_ARGS DEPENDENCIES)
|
||||
cmake_parse_arguments(Coverage "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
|
||||
|
||||
if(NOT GCOV_TOOL)
|
||||
message(FATAL_ERROR "Could not find gcov or llvm-cov tool! Aborting...")
|
||||
endif()
|
||||
|
||||
if(NOT GCOVR_PATH)
|
||||
message(FATAL_ERROR "Could not find gcovr tool! Aborting...")
|
||||
endif()
|
||||
|
||||
# Set base directory (as absolute path), or default to PROJECT_SOURCE_DIR
|
||||
if(DEFINED Coverage_BASE_DIRECTORY)
|
||||
get_filename_component(BASEDIR ${Coverage_BASE_DIRECTORY} ABSOLUTE)
|
||||
else()
|
||||
set(BASEDIR ${PROJECT_SOURCE_DIR})
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED Coverage_FORMAT)
|
||||
set(Coverage_FORMAT xml)
|
||||
endif()
|
||||
|
||||
if("--output" IN_LIST GCOVR_ADDITIONAL_ARGS)
|
||||
message(FATAL_ERROR "Unsupported --output option detected in GCOVR_ADDITIONAL_ARGS! Aborting...")
|
||||
else()
|
||||
if((Coverage_FORMAT STREQUAL "html-details")
|
||||
OR (Coverage_FORMAT STREQUAL "html-nested"))
|
||||
set(GCOVR_OUTPUT_FILE ${PROJECT_BINARY_DIR}/${Coverage_NAME}/index.html)
|
||||
set(GCOVR_CREATE_FOLDER ${PROJECT_BINARY_DIR}/${Coverage_NAME})
|
||||
elseif(Coverage_FORMAT STREQUAL "html-single")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.html)
|
||||
elseif((Coverage_FORMAT STREQUAL "json-summary")
|
||||
OR (Coverage_FORMAT STREQUAL "json-details")
|
||||
OR (Coverage_FORMAT STREQUAL "coveralls"))
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.json)
|
||||
elseif(Coverage_FORMAT STREQUAL "txt")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.txt)
|
||||
elseif(Coverage_FORMAT STREQUAL "csv")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.csv)
|
||||
elseif(Coverage_FORMAT STREQUAL "lcov")
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.lcov)
|
||||
else()
|
||||
set(GCOVR_OUTPUT_FILE ${Coverage_NAME}.xml)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if((Coverage_FORMAT STREQUAL "cobertura")
|
||||
OR (Coverage_FORMAT STREQUAL "xml"))
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --cobertura-pretty )
|
||||
set(Coverage_FORMAT cobertura) # overwrite xml
|
||||
elseif(Coverage_FORMAT STREQUAL "sonarqube")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --sonarqube "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "jacoco")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --jacoco-pretty )
|
||||
elseif(Coverage_FORMAT STREQUAL "clover")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --clover "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --clover-pretty )
|
||||
elseif(Coverage_FORMAT STREQUAL "lcov")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --lcov "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "json-summary")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-summary-pretty)
|
||||
elseif(Coverage_FORMAT STREQUAL "json-details")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --json-pretty)
|
||||
elseif(Coverage_FORMAT STREQUAL "coveralls")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --coveralls-pretty)
|
||||
elseif(Coverage_FORMAT STREQUAL "csv")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --csv "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "txt")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --txt "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "html-single")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html "${GCOVR_OUTPUT_FILE}" )
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-self-contained)
|
||||
elseif(Coverage_FORMAT STREQUAL "html-nested")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-nested "${GCOVR_OUTPUT_FILE}" )
|
||||
elseif(Coverage_FORMAT STREQUAL "html-details")
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS --html-details "${GCOVR_OUTPUT_FILE}" )
|
||||
else()
|
||||
message(FATAL_ERROR "Unsupported output style ${Coverage_FORMAT}! Aborting...")
|
||||
endif()
|
||||
|
||||
# Collect excludes (CMake 3.4+: Also compute absolute paths)
|
||||
set(GCOVR_EXCLUDES "")
|
||||
foreach(EXCLUDE ${Coverage_EXCLUDE} ${COVERAGE_EXCLUDES} ${COVERAGE_GCOVR_EXCLUDES})
|
||||
if(CMAKE_VERSION VERSION_GREATER 3.4)
|
||||
get_filename_component(EXCLUDE ${EXCLUDE} ABSOLUTE BASE_DIR ${BASEDIR})
|
||||
endif()
|
||||
list(APPEND GCOVR_EXCLUDES "${EXCLUDE}")
|
||||
endforeach()
|
||||
list(REMOVE_DUPLICATES GCOVR_EXCLUDES)
|
||||
|
||||
# Combine excludes to several -e arguments
|
||||
set(GCOVR_EXCLUDE_ARGS "")
|
||||
foreach(EXCLUDE ${GCOVR_EXCLUDES})
|
||||
list(APPEND GCOVR_EXCLUDE_ARGS "-e")
|
||||
list(APPEND GCOVR_EXCLUDE_ARGS "${EXCLUDE}")
|
||||
endforeach()
|
||||
|
||||
# Set up commands which will be run to generate coverage data
|
||||
# Run tests
|
||||
set(GCOVR_EXEC_TESTS_CMD
|
||||
${Coverage_EXECUTABLE} ${Coverage_EXECUTABLE_ARGS}
|
||||
)
|
||||
|
||||
# Create folder
|
||||
if(DEFINED GCOVR_CREATE_FOLDER)
|
||||
set(GCOVR_FOLDER_CMD
|
||||
${CMAKE_COMMAND} -E make_directory ${GCOVR_CREATE_FOLDER})
|
||||
else()
|
||||
set(GCOVR_FOLDER_CMD echo) # dummy
|
||||
endif()
|
||||
|
||||
# Running gcovr
|
||||
set(GCOVR_CMD
|
||||
${GCOVR_PATH}
|
||||
--gcov-executable ${GCOV_TOOL}
|
||||
--gcov-ignore-parse-errors=negative_hits.warn_once_per_file
|
||||
-r ${BASEDIR}
|
||||
${GCOVR_ADDITIONAL_ARGS}
|
||||
${GCOVR_EXCLUDE_ARGS}
|
||||
--object-directory=${PROJECT_BINARY_DIR}
|
||||
)
|
||||
|
||||
if(CODE_COVERAGE_VERBOSE)
|
||||
message(STATUS "Executed command report")
|
||||
|
||||
message(STATUS "Command to run tests: ")
|
||||
string(REPLACE ";" " " GCOVR_EXEC_TESTS_CMD_SPACED "${GCOVR_EXEC_TESTS_CMD}")
|
||||
message(STATUS "${GCOVR_EXEC_TESTS_CMD_SPACED}")
|
||||
|
||||
if(NOT GCOVR_FOLDER_CMD STREQUAL "echo")
|
||||
message(STATUS "Command to create a folder: ")
|
||||
string(REPLACE ";" " " GCOVR_FOLDER_CMD_SPACED "${GCOVR_FOLDER_CMD}")
|
||||
message(STATUS "${GCOVR_FOLDER_CMD_SPACED}")
|
||||
endif()
|
||||
|
||||
message(STATUS "Command to generate gcovr coverage data: ")
|
||||
string(REPLACE ";" " " GCOVR_CMD_SPACED "${GCOVR_CMD}")
|
||||
message(STATUS "${GCOVR_CMD_SPACED}")
|
||||
endif()
|
||||
|
||||
add_custom_target(${Coverage_NAME}
|
||||
COMMAND ${GCOVR_EXEC_TESTS_CMD}
|
||||
COMMAND ${GCOVR_FOLDER_CMD}
|
||||
COMMAND ${GCOVR_CMD}
|
||||
|
||||
BYPRODUCTS ${GCOVR_OUTPUT_FILE}
|
||||
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
|
||||
DEPENDS ${Coverage_DEPENDENCIES}
|
||||
VERBATIM # Protect arguments to commands
|
||||
COMMENT "Running gcovr to produce code coverage report."
|
||||
)
|
||||
|
||||
# Show info where to find the report
|
||||
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
|
||||
COMMAND ;
|
||||
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
|
||||
)
|
||||
endfunction() # setup_target_for_coverage_gcovr
|
||||
|
||||
function(append_coverage_compiler_flags)
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
||||
message(STATUS "Appending code coverage compiler flags: ${COVERAGE_COMPILER_FLAGS}")
|
||||
endfunction() # append_coverage_compiler_flags
|
||||
|
||||
# Setup coverage for specific library
|
||||
function(append_coverage_compiler_flags_to_target name)
|
||||
separate_arguments(_flag_list NATIVE_COMMAND "${COVERAGE_COMPILER_FLAGS}")
|
||||
target_compile_options(${name} PRIVATE ${_flag_list})
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
||||
target_link_libraries(${name} PRIVATE gcov)
|
||||
endif()
|
||||
endfunction()
|
||||
56
cmake/RippleConfig.cmake
Normal file
56
cmake/RippleConfig.cmake
Normal file
@@ -0,0 +1,56 @@
|
||||
include (CMakeFindDependencyMacro)
|
||||
# need to represent system dependencies of the lib here
|
||||
#[=========================================================[
|
||||
Boost
|
||||
#]=========================================================]
|
||||
if (static OR APPLE OR MSVC)
|
||||
set (Boost_USE_STATIC_LIBS ON)
|
||||
endif ()
|
||||
set (Boost_USE_MULTITHREADED ON)
|
||||
if (static OR MSVC)
|
||||
set (Boost_USE_STATIC_RUNTIME ON)
|
||||
else ()
|
||||
set (Boost_USE_STATIC_RUNTIME OFF)
|
||||
endif ()
|
||||
find_dependency (Boost 1.70
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
context
|
||||
coroutine
|
||||
date_time
|
||||
filesystem
|
||||
program_options
|
||||
regex
|
||||
system
|
||||
thread)
|
||||
#[=========================================================[
|
||||
OpenSSL
|
||||
#]=========================================================]
|
||||
if (NOT DEFINED OPENSSL_ROOT_DIR)
|
||||
if (DEFINED ENV{OPENSSL_ROOT})
|
||||
set (OPENSSL_ROOT_DIR $ENV{OPENSSL_ROOT})
|
||||
elseif (APPLE)
|
||||
find_program (homebrew brew)
|
||||
if (homebrew)
|
||||
execute_process (COMMAND ${homebrew} --prefix openssl
|
||||
OUTPUT_VARIABLE OPENSSL_ROOT_DIR
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif ()
|
||||
endif ()
|
||||
file (TO_CMAKE_PATH "${OPENSSL_ROOT_DIR}" OPENSSL_ROOT_DIR)
|
||||
endif ()
|
||||
|
||||
if (static OR APPLE OR MSVC)
|
||||
set (OPENSSL_USE_STATIC_LIBS ON)
|
||||
endif ()
|
||||
set (OPENSSL_MSVC_STATIC_RT ON)
|
||||
find_dependency (OpenSSL 1.1.1 REQUIRED)
|
||||
find_dependency (ZLIB)
|
||||
find_dependency (date)
|
||||
if (TARGET ZLIB::ZLIB)
|
||||
set_target_properties(OpenSSL::Crypto PROPERTIES
|
||||
INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
|
||||
endif ()
|
||||
|
||||
include ("${CMAKE_CURRENT_LIST_DIR}/RippleTargets.cmake")
|
||||
189
cmake/RippledCompiler.cmake
Normal file
189
cmake/RippledCompiler.cmake
Normal file
@@ -0,0 +1,189 @@
|
||||
#[===================================================================[
|
||||
setup project-wide compiler settings
|
||||
#]===================================================================]
|
||||
|
||||
#[=========================================================[
|
||||
TODO some/most of these common settings belong in a
|
||||
toolchain file, especially the ABI-impacting ones
|
||||
#]=========================================================]
|
||||
add_library (common INTERFACE)
|
||||
add_library (Ripple::common ALIAS common)
|
||||
# add a single global dependency on this interface lib
|
||||
link_libraries (Ripple::common)
|
||||
set_target_properties (common
|
||||
PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ON)
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
target_compile_definitions (common
|
||||
INTERFACE
|
||||
$<$<CONFIG:Debug>:DEBUG _DEBUG>
|
||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>)
|
||||
# ^^^^ NOTE: CMAKE release builds already have NDEBUG
|
||||
# defined, so no need to add it explicitly except for
|
||||
# this special case of (profile ON) and (assert OFF)
|
||||
# -- presumably this is because we don't want profile
|
||||
# builds asserting unless asserts were specifically
|
||||
# requested
|
||||
|
||||
if (MSVC)
|
||||
# remove existing exception flag since we set it to -EHa
|
||||
string (REGEX REPLACE "[-/]EH[a-z]+" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
|
||||
foreach (var_
|
||||
CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE
|
||||
CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE)
|
||||
|
||||
# also remove dynamic runtime
|
||||
string (REGEX REPLACE "[-/]MD[d]*" " " ${var_} "${${var_}}")
|
||||
|
||||
# /ZI (Edit & Continue debugging information) is incompatible with Gy-
|
||||
string (REPLACE "/ZI" "/Zi" ${var_} "${${var_}}")
|
||||
|
||||
# omit debug info completely under CI (not needed)
|
||||
if (is_ci)
|
||||
string (REPLACE "/Zi" " " ${var_} "${${var_}}")
|
||||
endif ()
|
||||
endforeach ()
|
||||
|
||||
target_compile_options (common
|
||||
INTERFACE
|
||||
-bigobj # Increase object file max size
|
||||
-fp:precise # Floating point behavior
|
||||
-Gd # __cdecl calling convention
|
||||
-Gm- # Minimal rebuild: disabled
|
||||
-Gy- # Function level linking: disabled
|
||||
-MP # Multiprocessor compilation
|
||||
-openmp- # pragma omp: disabled
|
||||
-errorReport:none # No error reporting to Internet
|
||||
-nologo # Suppress login banner
|
||||
-wd4018 # Disable signed/unsigned comparison warnings
|
||||
-wd4244 # Disable float to int possible loss of data warnings
|
||||
-wd4267 # Disable size_t to T possible loss of data warnings
|
||||
-wd4800 # Disable C4800(int to bool performance)
|
||||
-wd4503 # Decorated name length exceeded, name was truncated
|
||||
$<$<COMPILE_LANGUAGE:CXX>:
|
||||
-EHa
|
||||
-GR
|
||||
>
|
||||
$<$<CONFIG:Release>:-Ox>
|
||||
$<$<AND:$<COMPILE_LANGUAGE:CXX>,$<CONFIG:Debug>>:
|
||||
-GS
|
||||
-Zc:forScope
|
||||
>
|
||||
# static runtime
|
||||
$<$<CONFIG:Debug>:-MTd>
|
||||
$<$<NOT:$<CONFIG:Debug>>:-MT>
|
||||
$<$<BOOL:${werr}>:-WX>
|
||||
)
|
||||
target_compile_definitions (common
|
||||
INTERFACE
|
||||
_WIN32_WINNT=0x6000
|
||||
_SCL_SECURE_NO_WARNINGS
|
||||
_CRT_SECURE_NO_WARNINGS
|
||||
WIN32_CONSOLE
|
||||
WIN32_LEAN_AND_MEAN
|
||||
NOMINMAX
|
||||
# TODO: Resolve these warnings, don't just silence them
|
||||
_SILENCE_ALL_CXX17_DEPRECATION_WARNINGS
|
||||
$<$<AND:$<COMPILE_LANGUAGE:CXX>,$<CONFIG:Debug>>:_CRTDBG_MAP_ALLOC>)
|
||||
target_link_libraries (common
|
||||
INTERFACE
|
||||
-errorreport:none
|
||||
-machine:X64)
|
||||
else ()
|
||||
target_compile_options (common
|
||||
INTERFACE
|
||||
-Wall
|
||||
-Wdeprecated
|
||||
$<$<BOOL:${wextra}>:-Wextra -Wno-unused-parameter>
|
||||
$<$<BOOL:${werr}>:-Werror>
|
||||
-fstack-protector
|
||||
-Wno-sign-compare
|
||||
-Wno-unused-but-set-variable
|
||||
$<$<NOT:$<CONFIG:Debug>>:-fno-strict-aliasing>
|
||||
# tweak gcc optimization for debug
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<CONFIG:Debug>>:-O0>
|
||||
# Add debug symbols to release config
|
||||
$<$<CONFIG:Release>:-g>)
|
||||
target_link_libraries (common
|
||||
INTERFACE
|
||||
-rdynamic
|
||||
$<$<BOOL:${is_linux}>:-Wl,-z,relro,-z,now,--build-id>
|
||||
# link to static libc/c++ iff:
|
||||
# * static option set and
|
||||
# * NOT APPLE (AppleClang does not support static libc/c++) and
|
||||
# * NOT san (sanitizers typically don't work with static libc/c++)
|
||||
$<$<AND:$<BOOL:${static}>,$<NOT:$<BOOL:${APPLE}>>,$<NOT:$<BOOL:${san}>>>:
|
||||
-static-libstdc++
|
||||
-static-libgcc
|
||||
>)
|
||||
endif ()
|
||||
|
||||
# Antithesis instrumentation will only be built and deployed using machines running Linux.
|
||||
if (voidstar)
|
||||
if (NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
message(FATAL_ERROR "Antithesis instrumentation requires Debug build type, aborting...")
|
||||
elseif (NOT is_linux)
|
||||
message(FATAL_ERROR "Antithesis instrumentation requires Linux, aborting...")
|
||||
elseif (NOT (is_clang AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 16.0))
|
||||
message(FATAL_ERROR "Antithesis instrumentation requires Clang version 16 or later, aborting...")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (use_mold)
|
||||
# use mold linker if available
|
||||
execute_process (
|
||||
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=mold -Wl,--version
|
||||
ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
if ("${LD_VERSION}" MATCHES "mold")
|
||||
target_link_libraries (common INTERFACE -fuse-ld=mold)
|
||||
endif ()
|
||||
unset (LD_VERSION)
|
||||
elseif (use_gold AND is_gcc)
|
||||
# use gold linker if available
|
||||
execute_process (
|
||||
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=gold -Wl,--version
|
||||
ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
#[=========================================================[
|
||||
NOTE: THE gold linker inserts -rpath as DT_RUNPATH by
|
||||
default intead of DT_RPATH, so you might have slightly
|
||||
unexpected runtime ld behavior if you were expecting
|
||||
DT_RPATH. Specify --disable-new-dtags to gold if you do
|
||||
not want the default DT_RUNPATH behavior. This rpath
|
||||
treatment as well as static/dynamic selection means that
|
||||
gold does not currently have ideal default behavior when
|
||||
we are using jemalloc. Thus for simplicity we don't use
|
||||
it when jemalloc is requested. An alternative to
|
||||
disabling would be to figure out all the settings
|
||||
required to make gold play nicely with jemalloc.
|
||||
#]=========================================================]
|
||||
if (("${LD_VERSION}" MATCHES "GNU gold") AND (NOT jemalloc))
|
||||
target_link_libraries (common
|
||||
INTERFACE
|
||||
-fuse-ld=gold
|
||||
-Wl,--no-as-needed
|
||||
#[=========================================================[
|
||||
see https://bugs.launchpad.net/ubuntu/+source/eglibc/+bug/1253638/comments/5
|
||||
DT_RUNPATH does not work great for transitive
|
||||
dependencies (of which boost has a few) - so just
|
||||
switch to DT_RPATH if doing dynamic linking with gold
|
||||
#]=========================================================]
|
||||
$<$<NOT:$<BOOL:${static}>>:-Wl,--disable-new-dtags>)
|
||||
endif ()
|
||||
unset (LD_VERSION)
|
||||
elseif (use_lld)
|
||||
# use lld linker if available
|
||||
execute_process (
|
||||
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version
|
||||
ERROR_QUIET OUTPUT_VARIABLE LD_VERSION)
|
||||
if ("${LD_VERSION}" MATCHES "LLD")
|
||||
target_link_libraries (common INTERFACE -fuse-ld=lld)
|
||||
endif ()
|
||||
unset (LD_VERSION)
|
||||
endif()
|
||||
|
||||
|
||||
if (assert)
|
||||
foreach (var_ CMAKE_C_FLAGS_RELEASE CMAKE_CXX_FLAGS_RELEASE)
|
||||
STRING (REGEX REPLACE "[-/]DNDEBUG" "" ${var_} "${${var_}}")
|
||||
endforeach ()
|
||||
endif ()
|
||||
194
cmake/RippledCore.cmake
Normal file
194
cmake/RippledCore.cmake
Normal file
@@ -0,0 +1,194 @@
|
||||
#[===================================================================[
|
||||
Exported targets.
|
||||
#]===================================================================]
|
||||
|
||||
include(target_protobuf_sources)
|
||||
|
||||
# Protocol buffers cannot participate in a unity build,
|
||||
# because all the generated sources
|
||||
# define a bunch of `static const` variables with the same names,
|
||||
# so we just build them as a separate library.
|
||||
add_library(xrpl.libpb)
|
||||
set_target_properties(xrpl.libpb PROPERTIES UNITY_BUILD OFF)
|
||||
target_protobuf_sources(xrpl.libpb xrpl/proto
|
||||
LANGUAGE cpp
|
||||
IMPORT_DIRS include/xrpl/proto
|
||||
PROTOS include/xrpl/proto/ripple.proto
|
||||
)
|
||||
|
||||
file(GLOB_RECURSE protos "include/xrpl/proto/org/*.proto")
|
||||
target_protobuf_sources(xrpl.libpb xrpl/proto
|
||||
LANGUAGE cpp
|
||||
IMPORT_DIRS include/xrpl/proto
|
||||
PROTOS "${protos}"
|
||||
)
|
||||
target_protobuf_sources(xrpl.libpb xrpl/proto
|
||||
LANGUAGE grpc
|
||||
IMPORT_DIRS include/xrpl/proto
|
||||
PROTOS "${protos}"
|
||||
PLUGIN protoc-gen-grpc=$<TARGET_FILE:gRPC::grpc_cpp_plugin>
|
||||
GENERATE_EXTENSIONS .grpc.pb.h .grpc.pb.cc
|
||||
)
|
||||
|
||||
target_compile_options(xrpl.libpb
|
||||
PUBLIC
|
||||
$<$<BOOL:${MSVC}>:-wd4996>
|
||||
$<$<BOOL:${XCODE}>:
|
||||
--system-header-prefix="google/protobuf"
|
||||
-Wno-deprecated-dynamic-exception-spec
|
||||
>
|
||||
PRIVATE
|
||||
$<$<BOOL:${MSVC}>:-wd4065>
|
||||
$<$<NOT:$<BOOL:${MSVC}>>:-Wno-deprecated-declarations>
|
||||
)
|
||||
|
||||
target_link_libraries(xrpl.libpb
|
||||
PUBLIC
|
||||
protobuf::libprotobuf
|
||||
gRPC::grpc++
|
||||
)
|
||||
|
||||
# TODO: Clean up the number of library targets later.
|
||||
add_library(xrpl.imports.main INTERFACE)
|
||||
|
||||
target_link_libraries(xrpl.imports.main
|
||||
INTERFACE
|
||||
LibArchive::LibArchive
|
||||
OpenSSL::Crypto
|
||||
Ripple::boost
|
||||
Ripple::opts
|
||||
Ripple::syslibs
|
||||
absl::random_random
|
||||
date::date
|
||||
ed25519::ed25519
|
||||
secp256k1::secp256k1
|
||||
xrpl.libpb
|
||||
xxHash::xxhash
|
||||
$<$<BOOL:${voidstar}>:antithesis-sdk-cpp>
|
||||
)
|
||||
|
||||
include(add_module)
|
||||
include(target_link_modules)
|
||||
|
||||
# Level 01
|
||||
add_module(xrpl beast)
|
||||
target_link_libraries(xrpl.libxrpl.beast PUBLIC
|
||||
xrpl.imports.main
|
||||
xrpl.libpb
|
||||
)
|
||||
|
||||
# Level 02
|
||||
add_module(xrpl basics)
|
||||
target_link_libraries(xrpl.libxrpl.basics PUBLIC xrpl.libxrpl.beast)
|
||||
|
||||
# Level 03
|
||||
add_module(xrpl json)
|
||||
target_link_libraries(xrpl.libxrpl.json PUBLIC xrpl.libxrpl.basics)
|
||||
|
||||
add_module(xrpl crypto)
|
||||
target_link_libraries(xrpl.libxrpl.crypto PUBLIC xrpl.libxrpl.basics)
|
||||
|
||||
# Level 04
|
||||
add_module(xrpl protocol)
|
||||
target_link_libraries(xrpl.libxrpl.protocol PUBLIC
|
||||
xrpl.libxrpl.crypto
|
||||
xrpl.libxrpl.json
|
||||
)
|
||||
|
||||
# Level 05
|
||||
add_module(xrpl resource)
|
||||
target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol)
|
||||
|
||||
add_module(xrpl server)
|
||||
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol)
|
||||
|
||||
|
||||
add_library(xrpl.libxrpl)
|
||||
set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl)
|
||||
|
||||
add_library(xrpl::libxrpl ALIAS xrpl.libxrpl)
|
||||
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/libxrpl/*.cpp"
|
||||
)
|
||||
target_sources(xrpl.libxrpl PRIVATE ${sources})
|
||||
|
||||
target_link_modules(xrpl PUBLIC
|
||||
basics
|
||||
beast
|
||||
crypto
|
||||
json
|
||||
protocol
|
||||
resource
|
||||
server
|
||||
)
|
||||
|
||||
# All headers in libxrpl are in modules.
|
||||
# Uncomment this stanza if you have not yet moved new headers into a module.
|
||||
# target_include_directories(xrpl.libxrpl
|
||||
# PRIVATE
|
||||
# $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
|
||||
# PUBLIC
|
||||
# $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>
|
||||
# $<INSTALL_INTERFACE:include>)
|
||||
|
||||
if(xrpld)
|
||||
add_executable(rippled)
|
||||
if(tests)
|
||||
target_compile_definitions(rippled PUBLIC ENABLE_TESTS)
|
||||
target_compile_definitions(rippled PRIVATE
|
||||
UNIT_TEST_REFERENCE_FEE=${UNIT_TEST_REFERENCE_FEE}
|
||||
)
|
||||
endif()
|
||||
target_include_directories(rippled
|
||||
PRIVATE
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
|
||||
)
|
||||
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/xrpld/*.cpp"
|
||||
)
|
||||
target_sources(rippled PRIVATE ${sources})
|
||||
|
||||
if(tests)
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/test/*.cpp"
|
||||
)
|
||||
target_sources(rippled PRIVATE ${sources})
|
||||
endif()
|
||||
|
||||
target_link_libraries(rippled
|
||||
Ripple::boost
|
||||
Ripple::opts
|
||||
Ripple::libs
|
||||
xrpl.libxrpl
|
||||
)
|
||||
exclude_if_included(rippled)
|
||||
# define a macro for tests that might need to
|
||||
# be exluded or run differently in CI environment
|
||||
if(is_ci)
|
||||
target_compile_definitions(rippled PRIVATE RIPPLED_RUNNING_IN_CI)
|
||||
endif ()
|
||||
|
||||
if(voidstar)
|
||||
target_compile_options(rippled
|
||||
PRIVATE
|
||||
-fsanitize-coverage=trace-pc-guard
|
||||
)
|
||||
# rippled requires access to antithesis-sdk-cpp implementation file
|
||||
# antithesis_instrumentation.h, which is not exported as INTERFACE
|
||||
target_include_directories(rippled
|
||||
PRIVATE
|
||||
${CMAKE_SOURCE_DIR}/external/antithesis-sdk
|
||||
)
|
||||
endif()
|
||||
|
||||
# any files that don't play well with unity should be added here
|
||||
if(tests)
|
||||
set_source_files_properties(
|
||||
# these two seem to produce conflicts in beast teardown template methods
|
||||
src/test/rpc/ValidatorRPC_test.cpp
|
||||
src/test/ledger/Invariants_test.cpp
|
||||
PROPERTIES SKIP_UNITY_BUILD_INCLUSION TRUE)
|
||||
endif()
|
||||
endif()
|
||||
38
cmake/RippledCov.cmake
Normal file
38
cmake/RippledCov.cmake
Normal file
@@ -0,0 +1,38 @@
|
||||
#[===================================================================[
|
||||
coverage report target
|
||||
#]===================================================================]
|
||||
|
||||
if(NOT coverage)
|
||||
message(FATAL_ERROR "Code coverage not enabled! Aborting ...")
|
||||
endif()
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
|
||||
message(WARNING "Code coverage on Windows is not supported, ignoring 'coverage' flag")
|
||||
return()
|
||||
endif()
|
||||
|
||||
include(CodeCoverage)
|
||||
|
||||
# The instructions for these commands come from the `CodeCoverage` module,
|
||||
# which was copied from https://github.com/bilke/cmake-modules, commit fb7d2a3,
|
||||
# then locally changed (see CHANGES: section in `CodeCoverage.cmake`)
|
||||
|
||||
set(GCOVR_ADDITIONAL_ARGS ${coverage_extra_args})
|
||||
if(NOT GCOVR_ADDITIONAL_ARGS STREQUAL "")
|
||||
separate_arguments(GCOVR_ADDITIONAL_ARGS)
|
||||
endif()
|
||||
|
||||
list(APPEND GCOVR_ADDITIONAL_ARGS
|
||||
--exclude-throw-branches
|
||||
--exclude-noncode-lines
|
||||
--exclude-unreachable-branches -s
|
||||
-j ${coverage_test_parallelism})
|
||||
|
||||
setup_target_for_coverage_gcovr(
|
||||
NAME coverage
|
||||
FORMAT ${coverage_format}
|
||||
EXECUTABLE rippled
|
||||
EXECUTABLE_ARGS --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --unittest-jobs ${coverage_test_parallelism} --quiet --unittest-log
|
||||
EXCLUDE "src/test" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
|
||||
DEPENDENCIES rippled
|
||||
)
|
||||
85
cmake/RippledDocs.cmake
Normal file
85
cmake/RippledDocs.cmake
Normal file
@@ -0,0 +1,85 @@
|
||||
#[===================================================================[
|
||||
docs target (optional)
|
||||
#]===================================================================]
|
||||
|
||||
option(with_docs "Include the docs target?" FALSE)
|
||||
|
||||
if(NOT (with_docs OR only_docs))
|
||||
return()
|
||||
endif()
|
||||
|
||||
find_package(Doxygen)
|
||||
if(NOT TARGET Doxygen::doxygen)
|
||||
message(STATUS "doxygen executable not found -- skipping docs target")
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(doxygen_output_directory "${CMAKE_BINARY_DIR}/docs")
|
||||
set(doxygen_include_path "${CMAKE_CURRENT_SOURCE_DIR}/src")
|
||||
set(doxygen_index_file "${doxygen_output_directory}/html/index.html")
|
||||
set(doxyfile "${CMAKE_CURRENT_SOURCE_DIR}/docs/Doxyfile")
|
||||
|
||||
file(GLOB_RECURSE doxygen_input
|
||||
docs/*.md
|
||||
include/*.h
|
||||
include/*.cpp
|
||||
include/*.md
|
||||
src/*.h
|
||||
src/*.cpp
|
||||
src/*.md
|
||||
Builds/*.md
|
||||
*.md)
|
||||
list(APPEND doxygen_input
|
||||
external/README.md
|
||||
)
|
||||
set(dependencies "${doxygen_input}" "${doxyfile}")
|
||||
|
||||
function(verbose_find_path variable name)
|
||||
# find_path sets a CACHE variable, so don't try using a "local" variable.
|
||||
find_path(${variable} "${name}" ${ARGN})
|
||||
if(NOT ${variable})
|
||||
message(NOTICE "could not find ${name}")
|
||||
else()
|
||||
message(STATUS "found ${name}: ${${variable}}/${name}")
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
verbose_find_path(doxygen_plantuml_jar_path plantuml.jar PATH_SUFFIXES share/plantuml)
|
||||
verbose_find_path(doxygen_dot_path dot)
|
||||
|
||||
# https://en.cppreference.com/w/Cppreference:Archives
|
||||
# https://stackoverflow.com/questions/60822559/how-to-move-a-file-download-from-configure-step-to-build-step
|
||||
set(download_script "${CMAKE_BINARY_DIR}/docs/download-cppreference.cmake")
|
||||
file(WRITE
|
||||
"${download_script}"
|
||||
"file(DOWNLOAD \
|
||||
https://github.com/PeterFeicht/cppreference-doc/releases/download/v20250209/html-book-20250209.zip \
|
||||
${CMAKE_BINARY_DIR}/docs/cppreference.zip \
|
||||
EXPECTED_HASH MD5=bda585f72fbca4b817b29a3d5746567b \
|
||||
)\n \
|
||||
execute_process( \
|
||||
COMMAND \"${CMAKE_COMMAND}\" -E tar -xf cppreference.zip \
|
||||
)\n"
|
||||
)
|
||||
set(tagfile "${CMAKE_BINARY_DIR}/docs/cppreference-doxygen-web.tag.xml")
|
||||
add_custom_command(
|
||||
OUTPUT "${tagfile}"
|
||||
COMMAND "${CMAKE_COMMAND}" -P "${download_script}"
|
||||
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/docs"
|
||||
)
|
||||
set(doxygen_tagfiles "${tagfile}=http://en.cppreference.com/w/")
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT "${doxygen_index_file}"
|
||||
COMMAND "${CMAKE_COMMAND}" -E env
|
||||
"DOXYGEN_OUTPUT_DIRECTORY=${doxygen_output_directory}"
|
||||
"DOXYGEN_INCLUDE_PATH=${doxygen_include_path}"
|
||||
"DOXYGEN_TAGFILES=${doxygen_tagfiles}"
|
||||
"DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}"
|
||||
"DOXYGEN_DOT_PATH=${doxygen_dot_path}"
|
||||
"${DOXYGEN_EXECUTABLE}" "${doxyfile}"
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||
DEPENDS "${dependencies}" "${tagfile}")
|
||||
add_custom_target(docs
|
||||
DEPENDS "${doxygen_index_file}"
|
||||
SOURCES "${dependencies}")
|
||||
81
cmake/RippledInstall.cmake
Normal file
81
cmake/RippledInstall.cmake
Normal file
@@ -0,0 +1,81 @@
|
||||
#[===================================================================[
|
||||
install stuff
|
||||
#]===================================================================]
|
||||
|
||||
include(create_symbolic_link)
|
||||
|
||||
install (
|
||||
TARGETS
|
||||
common
|
||||
opts
|
||||
ripple_syslibs
|
||||
ripple_boost
|
||||
xrpl.imports.main
|
||||
xrpl.libpb
|
||||
xrpl.libxrpl.basics
|
||||
xrpl.libxrpl.beast
|
||||
xrpl.libxrpl.crypto
|
||||
xrpl.libxrpl.json
|
||||
xrpl.libxrpl.protocol
|
||||
xrpl.libxrpl.resource
|
||||
xrpl.libxrpl.server
|
||||
xrpl.libxrpl
|
||||
antithesis-sdk-cpp
|
||||
EXPORT RippleExports
|
||||
LIBRARY DESTINATION lib
|
||||
ARCHIVE DESTINATION lib
|
||||
RUNTIME DESTINATION bin
|
||||
INCLUDES DESTINATION include)
|
||||
|
||||
install(
|
||||
DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/xrpl"
|
||||
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}"
|
||||
)
|
||||
|
||||
install(CODE "
|
||||
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
|
||||
include(create_symbolic_link)
|
||||
create_symbolic_link(xrpl \
|
||||
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}/ripple)
|
||||
")
|
||||
|
||||
install (EXPORT RippleExports
|
||||
FILE RippleTargets.cmake
|
||||
NAMESPACE Ripple::
|
||||
DESTINATION lib/cmake/ripple)
|
||||
include (CMakePackageConfigHelpers)
|
||||
write_basic_package_version_file (
|
||||
RippleConfigVersion.cmake
|
||||
VERSION ${rippled_version}
|
||||
COMPATIBILITY SameMajorVersion)
|
||||
|
||||
if (is_root_project AND TARGET rippled)
|
||||
install (TARGETS rippled RUNTIME DESTINATION bin)
|
||||
set_target_properties(rippled PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON)
|
||||
# sample configs should not overwrite existing files
|
||||
# install if-not-exists workaround as suggested by
|
||||
# https://cmake.org/Bug/view.php?id=12646
|
||||
install(CODE "
|
||||
macro (copy_if_not_exists SRC DEST NEWNAME)
|
||||
if (NOT EXISTS \"\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/\${DEST}/\${NEWNAME}\")
|
||||
file (INSTALL FILE_PERMISSIONS OWNER_READ OWNER_WRITE DESTINATION \"\${CMAKE_INSTALL_PREFIX}/\${DEST}\" FILES \"\${SRC}\" RENAME \"\${NEWNAME}\")
|
||||
else ()
|
||||
message (\"-- Skipping : \$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/\${DEST}/\${NEWNAME}\")
|
||||
endif ()
|
||||
endmacro()
|
||||
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/rippled-example.cfg\" etc rippled.cfg)
|
||||
copy_if_not_exists(\"${CMAKE_CURRENT_SOURCE_DIR}/cfg/validators-example.txt\" etc validators.txt)
|
||||
")
|
||||
install(CODE "
|
||||
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
|
||||
include(create_symbolic_link)
|
||||
create_symbolic_link(rippled${suffix} \
|
||||
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/xrpld${suffix})
|
||||
")
|
||||
endif ()
|
||||
|
||||
install (
|
||||
FILES
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/cmake/RippleConfig.cmake
|
||||
${CMAKE_CURRENT_BINARY_DIR}/RippleConfigVersion.cmake
|
||||
DESTINATION lib/cmake/ripple)
|
||||
101
cmake/RippledInterface.cmake
Normal file
101
cmake/RippledInterface.cmake
Normal file
@@ -0,0 +1,101 @@
|
||||
#[===================================================================[
|
||||
rippled compile options/settings via an interface library
|
||||
#]===================================================================]
|
||||
|
||||
add_library (opts INTERFACE)
|
||||
add_library (Ripple::opts ALIAS opts)
|
||||
target_compile_definitions (opts
|
||||
INTERFACE
|
||||
BOOST_ASIO_DISABLE_HANDLER_TYPE_REQUIREMENTS
|
||||
BOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT
|
||||
BOOST_CONTAINER_FWD_BAD_DEQUE
|
||||
HAS_UNCAUGHT_EXCEPTIONS=1
|
||||
$<$<BOOL:${boost_show_deprecated}>:
|
||||
BOOST_ASIO_NO_DEPRECATED
|
||||
BOOST_FILESYSTEM_NO_DEPRECATED
|
||||
>
|
||||
$<$<NOT:$<BOOL:${boost_show_deprecated}>>:
|
||||
BOOST_COROUTINES_NO_DEPRECATION_WARNING
|
||||
BOOST_BEAST_ALLOW_DEPRECATED
|
||||
BOOST_FILESYSTEM_DEPRECATED
|
||||
>
|
||||
$<$<BOOL:${beast_no_unit_test_inline}>:BEAST_NO_UNIT_TEST_INLINE=1>
|
||||
$<$<BOOL:${beast_disable_autolink}>:BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES=1>
|
||||
$<$<BOOL:${single_io_service_thread}>:RIPPLE_SINGLE_IO_SERVICE_THREAD=1>
|
||||
$<$<BOOL:${voidstar}>:ENABLE_VOIDSTAR>)
|
||||
target_compile_options (opts
|
||||
INTERFACE
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
|
||||
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized>
|
||||
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
target_link_libraries (opts
|
||||
INTERFACE
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
||||
$<$<BOOL:${profile}>:-pg>
|
||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||
|
||||
if(jemalloc)
|
||||
find_package(jemalloc REQUIRED)
|
||||
target_compile_definitions(opts INTERFACE PROFILE_JEMALLOC)
|
||||
target_link_libraries(opts INTERFACE jemalloc::jemalloc)
|
||||
endif ()
|
||||
|
||||
if (san)
|
||||
target_compile_options (opts
|
||||
INTERFACE
|
||||
# sanitizers recommend minimum of -O1 for reasonable performance
|
||||
$<$<CONFIG:Debug>:-O1>
|
||||
${SAN_FLAG}
|
||||
-fno-omit-frame-pointer)
|
||||
target_compile_definitions (opts
|
||||
INTERFACE
|
||||
$<$<STREQUAL:${san},address>:SANITIZER=ASAN>
|
||||
$<$<STREQUAL:${san},thread>:SANITIZER=TSAN>
|
||||
$<$<STREQUAL:${san},memory>:SANITIZER=MSAN>
|
||||
$<$<STREQUAL:${san},undefined>:SANITIZER=UBSAN>)
|
||||
target_link_libraries (opts INTERFACE ${SAN_FLAG} ${SAN_LIB})
|
||||
endif ()
|
||||
|
||||
#[===================================================================[
|
||||
rippled transitive library deps via an interface library
|
||||
#]===================================================================]
|
||||
|
||||
add_library (ripple_syslibs INTERFACE)
|
||||
add_library (Ripple::syslibs ALIAS ripple_syslibs)
|
||||
target_link_libraries (ripple_syslibs
|
||||
INTERFACE
|
||||
$<$<BOOL:${MSVC}>:
|
||||
legacy_stdio_definitions.lib
|
||||
Shlwapi
|
||||
kernel32
|
||||
user32
|
||||
gdi32
|
||||
winspool
|
||||
comdlg32
|
||||
advapi32
|
||||
shell32
|
||||
ole32
|
||||
oleaut32
|
||||
uuid
|
||||
odbc32
|
||||
odbccp32
|
||||
crypt32
|
||||
>
|
||||
$<$<NOT:$<BOOL:${MSVC}>>:dl>
|
||||
$<$<NOT:$<OR:$<BOOL:${MSVC}>,$<BOOL:${APPLE}>>>:rt>)
|
||||
|
||||
if (NOT MSVC)
|
||||
set (THREADS_PREFER_PTHREAD_FLAG ON)
|
||||
find_package (Threads)
|
||||
target_link_libraries (ripple_syslibs INTERFACE Threads::Threads)
|
||||
endif ()
|
||||
|
||||
add_library (ripple_libs INTERFACE)
|
||||
add_library (Ripple::libs ALIAS ripple_libs)
|
||||
target_link_libraries (ripple_libs INTERFACE Ripple::syslibs)
|
||||
70
cmake/RippledSanity.cmake
Normal file
70
cmake/RippledSanity.cmake
Normal file
@@ -0,0 +1,70 @@
|
||||
#[===================================================================[
|
||||
convenience variables and sanity checks
|
||||
#]===================================================================]
|
||||
|
||||
get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||
|
||||
set (CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE)
|
||||
if (NOT is_multiconfig)
|
||||
if (NOT CMAKE_BUILD_TYPE)
|
||||
message (STATUS "Build type not specified - defaulting to Release")
|
||||
set (CMAKE_BUILD_TYPE Release CACHE STRING "build type" FORCE)
|
||||
elseif (NOT (CMAKE_BUILD_TYPE STREQUAL Debug OR CMAKE_BUILD_TYPE STREQUAL Release))
|
||||
# for simplicity, these are the only two config types we care about. Limiting
|
||||
# the build types simplifies dealing with external project builds especially
|
||||
message (FATAL_ERROR " *** Only Debug or Release build types are currently supported ***")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
get_directory_property(has_parent PARENT_DIRECTORY)
|
||||
if (has_parent)
|
||||
set (is_root_project OFF)
|
||||
else ()
|
||||
set (is_root_project ON)
|
||||
endif ()
|
||||
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES ".*Clang") # both Clang and AppleClang
|
||||
set (is_clang TRUE)
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" AND
|
||||
CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8.0)
|
||||
message (FATAL_ERROR "This project requires clang 8 or later")
|
||||
endif ()
|
||||
# TODO min AppleClang version check ?
|
||||
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
|
||||
set (is_gcc TRUE)
|
||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8.0)
|
||||
message (FATAL_ERROR "This project requires GCC 8 or later")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
set (is_linux TRUE)
|
||||
else ()
|
||||
set (is_linux FALSE)
|
||||
endif ()
|
||||
|
||||
if ("$ENV{CI}" STREQUAL "true" OR "$ENV{CONTINUOUS_INTEGRATION}" STREQUAL "true")
|
||||
set (is_ci TRUE)
|
||||
else ()
|
||||
set (is_ci FALSE)
|
||||
endif ()
|
||||
|
||||
# check for in-source build and fail
|
||||
if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
|
||||
message (FATAL_ERROR "Builds (in-source) are not allowed in "
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}. Please remove CMakeCache.txt and the CMakeFiles "
|
||||
"directory from ${CMAKE_CURRENT_SOURCE_DIR} and try building in a separate directory.")
|
||||
endif ()
|
||||
|
||||
if (MSVC AND CMAKE_GENERATOR_PLATFORM STREQUAL "Win32")
|
||||
message (FATAL_ERROR "Visual Studio 32-bit build is not supported.")
|
||||
endif ()
|
||||
|
||||
if (NOT CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||
message (FATAL_ERROR "Rippled requires a 64 bit target architecture.\n"
|
||||
"The most likely cause of this warning is trying to build rippled with a 32-bit OS.")
|
||||
endif ()
|
||||
|
||||
if (APPLE AND NOT HOMEBREW)
|
||||
find_program (HOMEBREW brew)
|
||||
endif ()
|
||||
140
cmake/RippledSettings.cmake
Normal file
140
cmake/RippledSettings.cmake
Normal file
@@ -0,0 +1,140 @@
|
||||
#[===================================================================[
|
||||
declare user options/settings
|
||||
#]===================================================================]
|
||||
|
||||
include(ProcessorCount)
|
||||
|
||||
ProcessorCount(PROCESSOR_COUNT)
|
||||
|
||||
option(assert "Enables asserts, even in release builds" OFF)
|
||||
|
||||
option(xrpld "Build xrpld" ON)
|
||||
|
||||
option(tests "Build tests" ON)
|
||||
if(tests)
|
||||
# This setting allows making a separate workflow to test fees other than default 10
|
||||
if(NOT UNIT_TEST_REFERENCE_FEE)
|
||||
set(UNIT_TEST_REFERENCE_FEE "10" CACHE STRING "")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
option(unity "Creates a build using UNITY support in cmake." OFF)
|
||||
if(unity)
|
||||
if(NOT is_ci)
|
||||
set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
|
||||
endif()
|
||||
set(CMAKE_UNITY_BUILD ON CACHE BOOL "Do a unity build")
|
||||
endif()
|
||||
if(is_clang AND is_linux)
|
||||
option(voidstar "Enable Antithesis instrumentation." OFF)
|
||||
endif()
|
||||
if(is_gcc OR is_clang)
|
||||
option(coverage "Generates coverage info." OFF)
|
||||
option(profile "Add profiling flags" OFF)
|
||||
set(coverage_test_parallelism "${PROCESSOR_COUNT}" CACHE STRING
|
||||
"Unit tests parallelism for the purpose of coverage report.")
|
||||
set(coverage_format "html-details" CACHE STRING
|
||||
"Output format of the coverage report.")
|
||||
set(coverage_extra_args "" CACHE STRING
|
||||
"Additional arguments to pass to gcovr.")
|
||||
set(coverage_test "" CACHE STRING
|
||||
"On gcc & clang, the specific unit test(s) to run for coverage. Default is all tests.")
|
||||
if(coverage_test AND NOT coverage)
|
||||
set(coverage ON CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
option(wextra "compile with extra gcc/clang warnings enabled" ON)
|
||||
else()
|
||||
set(profile OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(coverage OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||
endif()
|
||||
if(is_linux)
|
||||
option(BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
|
||||
option(static "link protobuf, openssl, libc++, and boost statically" ON)
|
||||
option(perf "Enables flags that assist with perf recording" OFF)
|
||||
option(use_gold "enables detection of gold (binutils) linker" ON)
|
||||
option(use_mold "enables detection of mold (binutils) linker" ON)
|
||||
else()
|
||||
# we are not ready to allow shared-libs on windows because it would require
|
||||
# export declarations. On macos it's more feasible, but static openssl
|
||||
# produces odd linker errors, thus we disable shared lib builds for now.
|
||||
set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE)
|
||||
set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
|
||||
set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
|
||||
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
|
||||
set(use_mold OFF CACHE BOOL "mold linker, linux only" FORCE)
|
||||
endif()
|
||||
if(is_clang)
|
||||
option(use_lld "enables detection of lld linker" ON)
|
||||
else()
|
||||
set(use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
|
||||
endif()
|
||||
option(jemalloc "Enables jemalloc for heap profiling" OFF)
|
||||
option(werr "treat warnings as errors" OFF)
|
||||
option(local_protobuf
|
||||
"Force a local build of protobuf instead of looking for an installed version." OFF)
|
||||
option(local_grpc
|
||||
"Force a local build of gRPC instead of looking for an installed version." OFF)
|
||||
|
||||
# this one is a string and therefore can't be an option
|
||||
set(san "" CACHE STRING "On gcc & clang, add sanitizer instrumentation")
|
||||
set_property(CACHE san PROPERTY STRINGS ";undefined;memory;address;thread")
|
||||
if(san)
|
||||
string(TOLOWER ${san} san)
|
||||
set(SAN_FLAG "-fsanitize=${san}")
|
||||
set(SAN_LIB "")
|
||||
if(is_gcc)
|
||||
if(san STREQUAL "address")
|
||||
set(SAN_LIB "asan")
|
||||
elseif(san STREQUAL "thread")
|
||||
set(SAN_LIB "tsan")
|
||||
elseif(san STREQUAL "memory")
|
||||
set(SAN_LIB "msan")
|
||||
elseif(san STREQUAL "undefined")
|
||||
set(SAN_LIB "ubsan")
|
||||
endif()
|
||||
endif()
|
||||
set(_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
|
||||
set(CMAKE_REQUIRED_LIBRARIES "${SAN_FLAG};${SAN_LIB}")
|
||||
check_cxx_compiler_flag(${SAN_FLAG} COMPILER_SUPPORTS_SAN)
|
||||
set(CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
|
||||
if(NOT COMPILER_SUPPORTS_SAN)
|
||||
message(FATAL_ERROR "${san} sanitizer does not seem to be supported by your compiler")
|
||||
endif()
|
||||
endif()
|
||||
set(container_label "" CACHE STRING "tag to use for package building containers")
|
||||
option(packages_only
|
||||
"ONLY generate package building targets. This is special use-case and almost \
|
||||
certainly not what you want. Use with caution as you won't be able to build \
|
||||
any compiled targets locally." OFF)
|
||||
option(have_package_container
|
||||
"Sometimes you already have the tagged container you want to use for package \
|
||||
building and you don't want docker to rebuild it. This flag will detach the \
|
||||
dependency of the package build from the container build. It's an advanced \
|
||||
use case and most likely you should not be touching this flag." OFF)
|
||||
|
||||
# the remaining options are obscure and rarely used
|
||||
option(beast_no_unit_test_inline
|
||||
"Prevents unit test definitions from being inserted into global table"
|
||||
OFF)
|
||||
option(single_io_service_thread
|
||||
"Restricts the number of threads calling io_service::run to one. \
|
||||
This can be useful when debugging."
|
||||
OFF)
|
||||
option(boost_show_deprecated
|
||||
"Allow boost to fail on deprecated usage. Only useful if you're trying\
|
||||
to find deprecated calls."
|
||||
OFF)
|
||||
option(beast_hashers
|
||||
"Use local implementations for sha/ripemd hashes (experimental, not recommended)"
|
||||
OFF)
|
||||
|
||||
if(WIN32)
|
||||
option(beast_disable_autolink "Disables autolinking of system libraries on WIN32" OFF)
|
||||
else()
|
||||
set(beast_disable_autolink OFF CACHE BOOL "WIN32 only" FORCE)
|
||||
endif()
|
||||
if(coverage)
|
||||
message(STATUS "coverage build requested - forcing Debug build")
|
||||
set(CMAKE_BUILD_TYPE Debug CACHE STRING "build type" FORCE)
|
||||
endif()
|
||||
22
cmake/RippledValidatorKeys.cmake
Normal file
22
cmake/RippledValidatorKeys.cmake
Normal file
@@ -0,0 +1,22 @@
|
||||
option (validator_keys "Enables building of validator-keys-tool as a separate target (imported via FetchContent)" OFF)
|
||||
|
||||
if (validator_keys)
|
||||
git_branch (current_branch)
|
||||
# default to tracking VK master branch unless we are on release
|
||||
if (NOT (current_branch STREQUAL "release"))
|
||||
set (current_branch "master")
|
||||
endif ()
|
||||
message (STATUS "tracking ValidatorKeys branch: ${current_branch}")
|
||||
|
||||
FetchContent_Declare (
|
||||
validator_keys_src
|
||||
GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
|
||||
GIT_TAG "${current_branch}"
|
||||
)
|
||||
FetchContent_GetProperties (validator_keys_src)
|
||||
if (NOT validator_keys_src_POPULATED)
|
||||
message (STATUS "Pausing to download ValidatorKeys...")
|
||||
FetchContent_Populate (validator_keys_src)
|
||||
endif ()
|
||||
add_subdirectory (${validator_keys_src_SOURCE_DIR} ${CMAKE_BINARY_DIR}/validator-keys)
|
||||
endif ()
|
||||
15
cmake/RippledVersion.cmake
Normal file
15
cmake/RippledVersion.cmake
Normal file
@@ -0,0 +1,15 @@
|
||||
#[===================================================================[
|
||||
read version from source
|
||||
#]===================================================================]
|
||||
|
||||
file(STRINGS src/libxrpl/protocol/BuildInfo.cpp BUILD_INFO)
|
||||
foreach(line_ ${BUILD_INFO})
|
||||
if(line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"")
|
||||
set(rippled_version ${CMAKE_MATCH_1})
|
||||
endif()
|
||||
endforeach()
|
||||
if(rippled_version)
|
||||
message(STATUS "rippled version: ${rippled_version}")
|
||||
else()
|
||||
message(FATAL_ERROR "unable to determine rippled version")
|
||||
endif()
|
||||
37
cmake/add_module.cmake
Normal file
37
cmake/add_module.cmake
Normal file
@@ -0,0 +1,37 @@
|
||||
include(isolate_headers)
|
||||
|
||||
# Create an OBJECT library target named
|
||||
#
|
||||
# ${PROJECT_NAME}.lib${parent}.${name}
|
||||
#
|
||||
# with sources in src/lib${parent}/${name}
|
||||
# and headers in include/${parent}/${name}
|
||||
# that cannot include headers from other directories in include/
|
||||
# unless they come through linked libraries.
|
||||
#
|
||||
# add_module(parent a)
|
||||
# add_module(parent b)
|
||||
# target_link_libraries(project.libparent.b PUBLIC project.libparent.a)
|
||||
function(add_module parent name)
|
||||
set(target ${PROJECT_NAME}.lib${parent}.${name})
|
||||
add_library(${target} OBJECT)
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}/*.cpp"
|
||||
)
|
||||
target_sources(${target} PRIVATE ${sources})
|
||||
target_include_directories(${target} PUBLIC
|
||||
"$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>"
|
||||
)
|
||||
isolate_headers(
|
||||
${target}
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/include"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/include/${parent}/${name}"
|
||||
PUBLIC
|
||||
)
|
||||
isolate_headers(
|
||||
${target}
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/lib${parent}/${name}"
|
||||
PRIVATE
|
||||
)
|
||||
endfunction()
|
||||
20
cmake/create_symbolic_link.cmake
Normal file
20
cmake/create_symbolic_link.cmake
Normal file
@@ -0,0 +1,20 @@
|
||||
# file(CREATE_SYMLINK) only works on Windows with administrator privileges.
|
||||
# https://stackoverflow.com/a/61244115/618906
|
||||
function(create_symbolic_link target link)
|
||||
if(WIN32)
|
||||
if(NOT IS_SYMLINK "${link}")
|
||||
if(NOT IS_ABSOLUTE "${target}")
|
||||
# Relative links work do not work on Windows.
|
||||
set(target "${link}/../${target}")
|
||||
endif()
|
||||
file(TO_NATIVE_PATH "${target}" target)
|
||||
file(TO_NATIVE_PATH "${link}" link)
|
||||
execute_process(COMMAND cmd.exe /c mklink /J "${link}" "${target}")
|
||||
endif()
|
||||
else()
|
||||
file(CREATE_LINK "${target}" "${link}" SYMBOLIC)
|
||||
endif()
|
||||
if(NOT IS_SYMLINK "${link}")
|
||||
message(ERROR "failed to create symlink: <${link}>")
|
||||
endif()
|
||||
endfunction()
|
||||
52
cmake/deps/Boost.cmake
Normal file
52
cmake/deps/Boost.cmake
Normal file
@@ -0,0 +1,52 @@
|
||||
find_package(Boost 1.82 REQUIRED
|
||||
COMPONENTS
|
||||
chrono
|
||||
container
|
||||
coroutine
|
||||
date_time
|
||||
filesystem
|
||||
json
|
||||
program_options
|
||||
regex
|
||||
system
|
||||
thread
|
||||
)
|
||||
|
||||
add_library(ripple_boost INTERFACE)
|
||||
add_library(Ripple::boost ALIAS ripple_boost)
|
||||
if(XCODE)
|
||||
target_include_directories(ripple_boost BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
|
||||
target_compile_options(ripple_boost INTERFACE --system-header-prefix="boost/")
|
||||
else()
|
||||
target_include_directories(ripple_boost SYSTEM BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
|
||||
endif()
|
||||
|
||||
target_link_libraries(ripple_boost
|
||||
INTERFACE
|
||||
Boost::headers
|
||||
Boost::chrono
|
||||
Boost::container
|
||||
Boost::coroutine
|
||||
Boost::date_time
|
||||
Boost::filesystem
|
||||
Boost::json
|
||||
Boost::program_options
|
||||
Boost::regex
|
||||
Boost::system
|
||||
Boost::thread)
|
||||
if(Boost_COMPILER)
|
||||
target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking)
|
||||
endif()
|
||||
if(san AND is_clang)
|
||||
# TODO: gcc does not support -fsanitize-blacklist...can we do something else
|
||||
# for gcc ?
|
||||
if(NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers)
|
||||
get_target_property(Boost_INCLUDE_DIRS Boost::headers INTERFACE_INCLUDE_DIRECTORIES)
|
||||
endif()
|
||||
message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist")
|
||||
file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*")
|
||||
target_compile_options(opts
|
||||
INTERFACE
|
||||
# ignore boost headers for sanitizing
|
||||
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
|
||||
endif()
|
||||
48
cmake/isolate_headers.cmake
Normal file
48
cmake/isolate_headers.cmake
Normal file
@@ -0,0 +1,48 @@
|
||||
include(create_symbolic_link)
|
||||
|
||||
# Consider include directory B nested under prefix A:
|
||||
#
|
||||
# /path/to/A/then/to/B/...
|
||||
#
|
||||
# Call C the relative path from A to B.
|
||||
# C is what we want to write in `#include` directives:
|
||||
#
|
||||
# #include <then/to/B/...>
|
||||
#
|
||||
# Examples, all from the `jobqueue` module:
|
||||
#
|
||||
# - Library public headers:
|
||||
# B = /include/xrpl/jobqueue
|
||||
# A = /include/
|
||||
# C = xrpl/jobqueue
|
||||
#
|
||||
# - Library private headers:
|
||||
# B = /src/libxrpl/jobqueue
|
||||
# A = /src/
|
||||
# C = libxrpl/jobqueue
|
||||
#
|
||||
# - Test private headers:
|
||||
# B = /tests/jobqueue
|
||||
# A = /
|
||||
# C = tests/jobqueue
|
||||
#
|
||||
# To isolate headers from each other,
|
||||
# we want to create a symlink Y that points to B,
|
||||
# within a subdirectory X of the `CMAKE_BINARY_DIR`,
|
||||
# that has the same relative path C between X and Y,
|
||||
# and then add X as an include directory of the target,
|
||||
# sometimes `PUBLIC` and sometimes `PRIVATE`.
|
||||
# The Cs are all guaranteed to be unique.
|
||||
# We can guarantee a unique X per target by using
|
||||
# `${CMAKE_CURRENT_BINARY_DIR}/include/${target}`.
|
||||
#
|
||||
# isolate_headers(target A B scope)
|
||||
function(isolate_headers target A B scope)
|
||||
file(RELATIVE_PATH C "${A}" "${B}")
|
||||
set(X "${CMAKE_CURRENT_BINARY_DIR}/modules/${target}")
|
||||
set(Y "${X}/${C}")
|
||||
cmake_path(GET Y PARENT_PATH parent)
|
||||
file(MAKE_DIRECTORY "${parent}")
|
||||
create_symbolic_link("${B}" "${Y}")
|
||||
target_include_directories(${target} ${scope} "$<BUILD_INTERFACE:${X}>")
|
||||
endfunction()
|
||||
24
cmake/target_link_modules.cmake
Normal file
24
cmake/target_link_modules.cmake
Normal file
@@ -0,0 +1,24 @@
|
||||
# Link a library to its modules (see: `add_module`)
|
||||
# and remove the module sources from the library's sources.
|
||||
#
|
||||
# add_module(parent a)
|
||||
# add_module(parent b)
|
||||
# target_link_libraries(project.libparent.b PUBLIC project.libparent.a)
|
||||
# add_library(project.libparent)
|
||||
# target_link_modules(parent PUBLIC a b)
|
||||
function(target_link_modules parent scope)
|
||||
set(library ${PROJECT_NAME}.lib${parent})
|
||||
foreach(name ${ARGN})
|
||||
set(module ${library}.${name})
|
||||
get_target_property(sources ${library} SOURCES)
|
||||
list(LENGTH sources before)
|
||||
get_target_property(dupes ${module} SOURCES)
|
||||
list(LENGTH dupes expected)
|
||||
list(REMOVE_ITEM sources ${dupes})
|
||||
list(LENGTH sources after)
|
||||
math(EXPR actual "${before} - ${after}")
|
||||
message(STATUS "${module} with ${expected} sources took ${actual} sources from ${library}")
|
||||
set_target_properties(${library} PROPERTIES SOURCES "${sources}")
|
||||
target_link_libraries(${library} ${scope} ${module})
|
||||
endforeach()
|
||||
endfunction()
|
||||
62
cmake/target_protobuf_sources.cmake
Normal file
62
cmake/target_protobuf_sources.cmake
Normal file
@@ -0,0 +1,62 @@
|
||||
find_package(Protobuf REQUIRED)
|
||||
|
||||
# .proto files import each other like this:
|
||||
#
|
||||
# import "path/to/file.proto";
|
||||
#
|
||||
# For the protobuf compiler to find these imports,
|
||||
# the parent directory of "path" must be in the import path.
|
||||
#
|
||||
# When generating C++,
|
||||
# it turns into an include statement like this:
|
||||
#
|
||||
# #include "path/to/file.pb.h"
|
||||
#
|
||||
# and the header is generated at a path relative to the output directory
|
||||
# that matches the given .proto path relative to the source directory
|
||||
# minus the first matching prefix on the import path.
|
||||
#
|
||||
# In other words, a file `include/package/path/to/file.proto`
|
||||
# with import path [`include/package`, `include`]
|
||||
# will generate files `output/path/to/file.pb.{h,cc}`
|
||||
# with includes like `#include "path/to/file.pb.h".
|
||||
#
|
||||
# During build, the generated files can find each other if the output
|
||||
# directory is an include directory, but we want to install that directory
|
||||
# under our package's include directory (`include/package`), not as a sibling.
|
||||
# After install, they can find each other if that subdirectory is an include
|
||||
# directory.
|
||||
|
||||
# Add protocol buffer sources to an existing library target.
|
||||
# target:
|
||||
# The name of the library target.
|
||||
# prefix:
|
||||
# The install prefix for headers relative to `CMAKE_INSTALL_INCLUDEDIR`.
|
||||
# This prefix should appear at the start of all your consumer includes.
|
||||
# ARGN:
|
||||
# A list of .proto files.
|
||||
function(target_protobuf_sources target prefix)
|
||||
set(dir "${CMAKE_CURRENT_BINARY_DIR}/pb-${target}")
|
||||
file(MAKE_DIRECTORY "${dir}/${prefix}")
|
||||
|
||||
protobuf_generate(
|
||||
TARGET ${target}
|
||||
PROTOC_OUT_DIR "${dir}/${prefix}"
|
||||
"${ARGN}"
|
||||
)
|
||||
target_include_directories(${target} SYSTEM PUBLIC
|
||||
# Allows #include <package/path/to/file.proto> used by consumer files.
|
||||
$<BUILD_INTERFACE:${dir}>
|
||||
# Allows #include "path/to/file.proto" used by generated files.
|
||||
$<BUILD_INTERFACE:${dir}/${prefix}>
|
||||
# Allows #include <package/path/to/file.proto> used by consumer files.
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
|
||||
# Allows #include "path/to/file.proto" used by generated files.
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}/${prefix}>
|
||||
)
|
||||
install(
|
||||
DIRECTORY ${dir}/
|
||||
DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
|
||||
FILES_MATCHING PATTERN "*.h"
|
||||
)
|
||||
endfunction()
|
||||
41
cmake/xrpl_add_test.cmake
Normal file
41
cmake/xrpl_add_test.cmake
Normal file
@@ -0,0 +1,41 @@
|
||||
include(isolate_headers)
|
||||
|
||||
function(xrpl_add_test name)
|
||||
set(target ${PROJECT_NAME}.test.${name})
|
||||
|
||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp"
|
||||
)
|
||||
add_executable(${target} EXCLUDE_FROM_ALL ${ARGN} ${sources})
|
||||
|
||||
isolate_headers(
|
||||
${target}
|
||||
"${CMAKE_SOURCE_DIR}"
|
||||
"${CMAKE_SOURCE_DIR}/tests/${name}"
|
||||
PRIVATE
|
||||
)
|
||||
|
||||
# Make sure the test isn't optimized away in unity builds
|
||||
set_target_properties(${target} PROPERTIES
|
||||
UNITY_BUILD_MODE GROUP
|
||||
UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
|
||||
|
||||
add_test(NAME ${target} COMMAND ${target})
|
||||
set_tests_properties(
|
||||
${target} PROPERTIES
|
||||
FIXTURES_REQUIRED ${target}_fixture
|
||||
)
|
||||
|
||||
add_test(
|
||||
NAME ${target}.build
|
||||
COMMAND
|
||||
${CMAKE_COMMAND}
|
||||
--build ${CMAKE_BINARY_DIR}
|
||||
--config $<CONFIG>
|
||||
--target ${target}
|
||||
)
|
||||
set_tests_properties(${target}.build PROPERTIES
|
||||
FIXTURES_SETUP ${target}_fixture
|
||||
)
|
||||
endfunction()
|
||||
34
conan/profiles/default
Normal file
34
conan/profiles/default
Normal file
@@ -0,0 +1,34 @@
|
||||
{% set os = detect_api.detect_os() %}
|
||||
{% set arch = detect_api.detect_arch() %}
|
||||
{% set compiler, version, compiler_exe = detect_api.detect_default_compiler() %}
|
||||
{% set compiler_version = version %}
|
||||
{% if os == "Linux" %}
|
||||
{% set compiler_version = detect_api.default_compiler_version(compiler, version) %}
|
||||
{% endif %}
|
||||
|
||||
[settings]
|
||||
os={{ os }}
|
||||
arch={{ arch }}
|
||||
build_type=Debug
|
||||
compiler={{compiler}}
|
||||
compiler.version={{ compiler_version }}
|
||||
compiler.cppstd=20
|
||||
{% if os == "Windows" %}
|
||||
compiler.runtime=static
|
||||
{% else %}
|
||||
compiler.libcxx={{detect_api.detect_libcxx(compiler, version, compiler_exe)}}
|
||||
{% endif %}
|
||||
|
||||
[conf]
|
||||
{% if compiler == "clang" and compiler_version >= 19 %}
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
{% endif %}
|
||||
{% if compiler == "apple-clang" and compiler_version >= 17 %}
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
{% endif %}
|
||||
{% if compiler == "gcc" and compiler_version < 13 %}
|
||||
tools.build:cxxflags=['-Wno-restrict']
|
||||
{% endif %}
|
||||
|
||||
[tool_requires]
|
||||
!cmake/*: cmake/[>=3 <4]
|
||||
195
conanfile.py
Normal file
195
conanfile.py
Normal file
@@ -0,0 +1,195 @@
|
||||
from conan import ConanFile, __version__ as conan_version
|
||||
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
|
||||
import re
|
||||
|
||||
class Xrpl(ConanFile):
|
||||
name = 'xrpl'
|
||||
|
||||
license = 'ISC'
|
||||
author = 'John Freeman <jfreeman@ripple.com>'
|
||||
url = 'https://github.com/xrplf/rippled'
|
||||
description = 'The XRP Ledger'
|
||||
settings = 'os', 'compiler', 'build_type', 'arch'
|
||||
options = {
|
||||
'assertions': [True, False],
|
||||
'coverage': [True, False],
|
||||
'fPIC': [True, False],
|
||||
'jemalloc': [True, False],
|
||||
'rocksdb': [True, False],
|
||||
'shared': [True, False],
|
||||
'static': [True, False],
|
||||
'tests': [True, False],
|
||||
'unity': [True, False],
|
||||
'xrpld': [True, False],
|
||||
}
|
||||
|
||||
requires = [
|
||||
'grpc/1.50.1',
|
||||
'libarchive/3.8.1',
|
||||
'nudb/2.0.9',
|
||||
'openssl/1.1.1w',
|
||||
'soci/4.0.3',
|
||||
'zlib/1.3.1',
|
||||
]
|
||||
|
||||
test_requires = [
|
||||
'doctest/2.4.11',
|
||||
]
|
||||
|
||||
tool_requires = [
|
||||
'protobuf/3.21.12',
|
||||
]
|
||||
|
||||
default_options = {
|
||||
'assertions': False,
|
||||
'coverage': False,
|
||||
'fPIC': True,
|
||||
'jemalloc': False,
|
||||
'rocksdb': True,
|
||||
'shared': False,
|
||||
'static': True,
|
||||
'tests': False,
|
||||
'unity': False,
|
||||
'xrpld': False,
|
||||
|
||||
'date/*:header_only': True,
|
||||
'grpc/*:shared': False,
|
||||
'grpc/*:secure': True,
|
||||
'libarchive/*:shared': False,
|
||||
'libarchive/*:with_acl': False,
|
||||
'libarchive/*:with_bzip2': False,
|
||||
'libarchive/*:with_cng': False,
|
||||
'libarchive/*:with_expat': False,
|
||||
'libarchive/*:with_iconv': False,
|
||||
'libarchive/*:with_libxml2': False,
|
||||
'libarchive/*:with_lz4': True,
|
||||
'libarchive/*:with_lzma': False,
|
||||
'libarchive/*:with_lzo': False,
|
||||
'libarchive/*:with_nettle': False,
|
||||
'libarchive/*:with_openssl': False,
|
||||
'libarchive/*:with_pcreposix': False,
|
||||
'libarchive/*:with_xattr': False,
|
||||
'libarchive/*:with_zlib': False,
|
||||
'lz4/*:shared': False,
|
||||
'openssl/*:shared': False,
|
||||
'protobuf/*:shared': False,
|
||||
'protobuf/*:with_zlib': True,
|
||||
'rocksdb/*:enable_sse': False,
|
||||
'rocksdb/*:lite': False,
|
||||
'rocksdb/*:shared': False,
|
||||
'rocksdb/*:use_rtti': True,
|
||||
'rocksdb/*:with_jemalloc': False,
|
||||
'rocksdb/*:with_lz4': True,
|
||||
'rocksdb/*:with_snappy': True,
|
||||
'snappy/*:shared': False,
|
||||
'soci/*:shared': False,
|
||||
'soci/*:with_sqlite3': True,
|
||||
'soci/*:with_boost': True,
|
||||
'xxhash/*:shared': False,
|
||||
}
|
||||
|
||||
def set_version(self):
|
||||
if self.version is None:
|
||||
path = f'{self.recipe_folder}/src/libxrpl/protocol/BuildInfo.cpp'
|
||||
regex = r'versionString\s?=\s?\"(.*)\"'
|
||||
with open(path, encoding='utf-8') as file:
|
||||
matches = (re.search(regex, line) for line in file)
|
||||
match = next(m for m in matches if m)
|
||||
self.version = match.group(1)
|
||||
|
||||
def configure(self):
|
||||
if self.settings.compiler == 'apple-clang':
|
||||
self.options['boost'].visibility = 'global'
|
||||
|
||||
def requirements(self):
|
||||
# Conan 2 requires transitive headers to be specified
|
||||
transitive_headers_opt = {'transitive_headers': True} if conan_version.split('.')[0] == '2' else {}
|
||||
self.requires('boost/1.86.0', force=True, **transitive_headers_opt)
|
||||
self.requires('date/3.0.4', **transitive_headers_opt)
|
||||
self.requires('lz4/1.10.0', force=True)
|
||||
self.requires('protobuf/3.21.12', force=True)
|
||||
self.requires('sqlite3/3.49.1', force=True)
|
||||
if self.options.jemalloc:
|
||||
self.requires('jemalloc/5.3.0')
|
||||
if self.options.rocksdb:
|
||||
self.requires('rocksdb/10.0.1')
|
||||
self.requires('xxhash/0.8.3', **transitive_headers_opt)
|
||||
|
||||
exports_sources = (
|
||||
'CMakeLists.txt',
|
||||
'bin/getRippledInfo',
|
||||
'cfg/*',
|
||||
'cmake/*',
|
||||
'external/*',
|
||||
'include/*',
|
||||
'src/*',
|
||||
)
|
||||
|
||||
def layout(self):
|
||||
cmake_layout(self)
|
||||
# Fix this setting to follow the default introduced in Conan 1.48
|
||||
# to align with our build instructions.
|
||||
self.folders.generators = 'build/generators'
|
||||
|
||||
generators = 'CMakeDeps'
|
||||
def generate(self):
|
||||
tc = CMakeToolchain(self)
|
||||
tc.variables['tests'] = self.options.tests
|
||||
tc.variables['assert'] = self.options.assertions
|
||||
tc.variables['coverage'] = self.options.coverage
|
||||
tc.variables['jemalloc'] = self.options.jemalloc
|
||||
tc.variables['rocksdb'] = self.options.rocksdb
|
||||
tc.variables['BUILD_SHARED_LIBS'] = self.options.shared
|
||||
tc.variables['static'] = self.options.static
|
||||
tc.variables['unity'] = self.options.unity
|
||||
tc.variables['xrpld'] = self.options.xrpld
|
||||
tc.generate()
|
||||
|
||||
def build(self):
|
||||
cmake = CMake(self)
|
||||
cmake.verbose = True
|
||||
cmake.configure()
|
||||
cmake.build()
|
||||
|
||||
def package(self):
|
||||
cmake = CMake(self)
|
||||
cmake.verbose = True
|
||||
cmake.install()
|
||||
|
||||
def package_info(self):
|
||||
libxrpl = self.cpp_info.components['libxrpl']
|
||||
libxrpl.libs = [
|
||||
'xrpl',
|
||||
'xrpl.libpb',
|
||||
'ed25519',
|
||||
'secp256k1',
|
||||
]
|
||||
# TODO: Fix the protobufs to include each other relative to
|
||||
# `include/`, not `include/ripple/proto/`.
|
||||
libxrpl.includedirs = ['include', 'include/ripple/proto']
|
||||
libxrpl.requires = [
|
||||
'boost::headers',
|
||||
'boost::chrono',
|
||||
'boost::container',
|
||||
'boost::coroutine',
|
||||
'boost::date_time',
|
||||
'boost::filesystem',
|
||||
'boost::json',
|
||||
'boost::program_options',
|
||||
'boost::regex',
|
||||
'boost::system',
|
||||
'boost::thread',
|
||||
'date::date',
|
||||
'grpc::grpc++',
|
||||
'libarchive::libarchive',
|
||||
'lz4::lz4',
|
||||
'nudb::nudb',
|
||||
'openssl::crypto',
|
||||
'protobuf::libprotobuf',
|
||||
'soci::soci',
|
||||
'sqlite3::sqlite',
|
||||
'xxhash::xxhash',
|
||||
'zlib::zlib',
|
||||
]
|
||||
if self.options.rocksdb:
|
||||
libxrpl.requires.append('rocksdb::librocksdb')
|
||||
597
docs/0001-negative-unl/README.md
Normal file
597
docs/0001-negative-unl/README.md
Normal file
@@ -0,0 +1,597 @@
|
||||
# Negative UNL Engineering Spec
|
||||
|
||||
## The Problem Statement
|
||||
|
||||
The moment-to-moment health of the XRP Ledger network depends on the health and
|
||||
connectivity of a small number of computers (nodes). The most important nodes
|
||||
are validators, specifically ones listed on the unique node list
|
||||
([UNL](#Question-What-are-UNLs)). Ripple publishes a recommended UNL that most
|
||||
network nodes use to determine which peers in the network are trusted. Although
|
||||
most validators use the same list, they are not required to. The XRP Ledger
|
||||
network progresses to the next ledger when enough validators reach agreement
|
||||
(above the minimum quorum of 80%) about what transactions to include in the next
|
||||
ledger.
|
||||
|
||||
As an example, if there are 10 validators on the UNL, at least 8 validators have
|
||||
to agree with the latest ledger for it to become validated. But what if enough
|
||||
of those validators are offline to drop the network below the 80% quorum? The
|
||||
XRP Ledger network favors safety/correctness over advancing the ledger. Which
|
||||
means if enough validators are offline, the network will not be able to validate
|
||||
ledgers.
|
||||
|
||||
Unfortunately validators can go offline at any time for many different reasons.
|
||||
Power outages, network connectivity issues, and hardware failures are just a few
|
||||
scenarios where a validator would appear "offline". Given that most of these
|
||||
events are temporary, it would make sense to temporarily remove that validator
|
||||
from the UNL. But the UNL is updated infrequently and not every node uses the
|
||||
same UNL. So instead of removing the unreliable validator from the Ripple
|
||||
recommended UNL, we can create a second negative UNL which is stored directly on
|
||||
the ledger (so the entire network has the same view). This will help the network
|
||||
see which validators are **currently** unreliable, and adjust their quorum
|
||||
calculation accordingly.
|
||||
|
||||
*Improving the liveness of the network is the main motivation for the negative UNL.*
|
||||
|
||||
### Targeted Faults
|
||||
|
||||
In order to determine which validators are unreliable, we need clearly define
|
||||
what kind of faults to measure and analyze. We want to deal with the faults we
|
||||
frequently observe in the production network. Hence we will only monitor for
|
||||
validators that do not reliably respond to network messages or send out
|
||||
validations disagreeing with the locally generated validations. We will not
|
||||
target other byzantine faults.
|
||||
|
||||
To track whether or not a validator is responding to the network, we could
|
||||
monitor them with a “heartbeat” protocol. Instead of creating a new heartbeat
|
||||
protocol, we can leverage some existing protocol messages to mimic the
|
||||
heartbeat. We picked validation messages because validators should send one and
|
||||
only one validation message per ledger. In addition, we only count the
|
||||
validation messages that agree with the local node's validations.
|
||||
|
||||
With the negative UNL, the network could keep making forward progress safely
|
||||
even if the number of remaining validators gets to 60%. Say we have a network
|
||||
with 10 validators on the UNL and everything is operating correctly. The quorum
|
||||
required for this network would be 8 (80% of 10). When validators fail, the
|
||||
quorum required would be as low as 6 (60% of 10), which is the absolute
|
||||
***minimum quorum***. We need the absolute minimum quorum to be strictly greater
|
||||
than 50% of the original UNL so that there cannot be two partitions of
|
||||
well-behaved nodes headed in different directions. We arbitrarily choose 60% as
|
||||
the minimum quorum to give a margin of safety.
|
||||
|
||||
Consider these events in the absence of negative UNL:
|
||||
1. 1:00pm - validator1 fails, votes vs. quorum: 9 >= 8, we have quorum
|
||||
1. 3:00pm - validator2 fails, votes vs. quorum: 8 >= 8, we have quorum
|
||||
1. 5:00pm - validator3 fails, votes vs. quorum: 7 < 8, we don’t have quorum
|
||||
* **network cannot validate new ledgers with 3 failed validators**
|
||||
|
||||
We're below 80% agreement, so new ledgers cannot be validated. This is how the
|
||||
XRP Ledger operates today, but if the negative UNL was enabled, the events would
|
||||
happen as follows. (Please note that the events below are from a simplified
|
||||
version of our protocol.)
|
||||
|
||||
1. 1:00pm - validator1 fails, votes vs. quorum: 9 >= 8, we have quorum
|
||||
1. 1:40pm - network adds validator1 to negative UNL, quorum changes to ceil(9 * 0.8), or 8
|
||||
1. 3:00pm - validator2 fails, votes vs. quorum: 8 >= 8, we have quorum
|
||||
1. 3:40pm - network adds validator2 to negative UNL, quorum changes to ceil(8 * 0.8), or 7
|
||||
1. 5:00pm - validator3 fails, votes vs. quorum: 7 >= 7, we have quorum
|
||||
1. 5:40pm - network adds validator3 to negative UNL, quorum changes to ceil(7 * 0.8), or 6
|
||||
1. 7:00pm - validator4 fails, votes vs. quorum: 6 >= 6, we have quorum
|
||||
* **network can still validate new ledgers with 4 failed validators**
|
||||
|
||||
## External Interactions
|
||||
|
||||
### Message Format Changes
|
||||
This proposal will:
|
||||
1. add a new pseudo-transaction type
|
||||
1. add the negative UNL to the ledger data structure.
|
||||
|
||||
Any tools or systems that rely on the format of this data will have to be
|
||||
updated.
|
||||
|
||||
### Amendment
|
||||
This feature **will** need an amendment to activate.
|
||||
|
||||
## Design
|
||||
|
||||
This section discusses the following topics about the Negative UNL design:
|
||||
|
||||
* [Negative UNL protocol overview](#Negative-UNL-Protocol-Overview)
|
||||
* [Validator reliability measurement](#Validator-Reliability-Measurement)
|
||||
* [Format Changes](#Format-Changes)
|
||||
* [Negative UNL maintenance](#Negative-UNL-Maintenance)
|
||||
* [Quorum size calculation](#Quorum-Size-Calculation)
|
||||
* [Filter validation messages](#Filter-Validation-Messages)
|
||||
* [High level sequence diagram of code
|
||||
changes](#High-Level-Sequence-Diagram-of-Code-Changes)
|
||||
|
||||
### Negative UNL Protocol Overview
|
||||
|
||||
Every ledger stores a list of zero or more unreliable validators. Updates to the
|
||||
list must be approved by the validators using the consensus mechanism that
|
||||
validators use to agree on the set of transactions. The list is used only when
|
||||
checking if a ledger is fully validated. If a validator V is in the list, nodes
|
||||
with V in their UNL adjust the quorum and V’s validation message is not counted
|
||||
when verifying if a ledger is fully validated. V’s flow of messages and network
|
||||
interactions, however, will remain the same.
|
||||
|
||||
We define the ***effective UNL** = original UNL - negative UNL*, and the
|
||||
***effective quorum*** as the quorum of the *effective UNL*. And we set
|
||||
*effective quorum = Ceiling(80% * effective UNL)*.
|
||||
|
||||
### Validator Reliability Measurement
|
||||
|
||||
A node only measures the reliability of validators on its own UNL, and only
|
||||
proposes based on local observations. There are many metrics that a node can
|
||||
measure about its validators, but we have chosen ledger validation messages.
|
||||
This is because every validator shall send one and only one signed validation
|
||||
message per ledger. This keeps the measurement simple and removes
|
||||
timing/clock-sync issues. A node will measure the percentage of agreeing
|
||||
validation messages (*PAV*) received from each validator on the node's UNL. Note
|
||||
that the node will only count the validation messages that agree with its own
|
||||
validations.
|
||||
|
||||
We define the **PAV** as the **P**ercentage of **A**greed **V**alidation
|
||||
messages received for the last N ledgers, where N = 256 by default.
|
||||
|
||||
When the PAV drops below the ***low-water mark***, the validator is considered
|
||||
unreliable, and is a candidate to be disabled by being added to the negative
|
||||
UNL. A validator must have a PAV higher than the ***high-water mark*** to be
|
||||
re-enabled. The validator is re-enabled by removing it from the negative UNL. In
|
||||
the implementation, we plan to set the low-water mark as 50% and the high-water
|
||||
mark as 80%.
|
||||
|
||||
### Format Changes
|
||||
|
||||
The negative UNL component in a ledger contains three fields.
|
||||
* ***NegativeUNL***: The current negative UNL, a list of unreliable validators.
|
||||
* ***ToDisable***: The validator to be added to the negative UNL on the next
|
||||
flag ledger.
|
||||
* ***ToReEnable***: The validator to be removed from the negative UNL on the
|
||||
next flag ledger.
|
||||
|
||||
All three fields are optional. When the *ToReEnable* field exists, the
|
||||
*NegativeUNL* field cannot be empty.
|
||||
|
||||
A new pseudo-transaction, ***UNLModify***, is added. It has three fields
|
||||
* ***Disabling***: A flag indicating whether the modification is to disable or
|
||||
to re-enable a validator.
|
||||
* ***Seq***: The ledger sequence number.
|
||||
* ***Validator***: The validator to be disabled or re-enabled.
|
||||
|
||||
There would be at most one *disable* `UNLModify` and one *re-enable* `UNLModify`
|
||||
transaction per flag ledger. The full machinery is described further on.
|
||||
|
||||
### Negative UNL Maintenance
|
||||
|
||||
The negative UNL can only be modified on the flag ledgers. If a validator's
|
||||
reliability status changes, it takes two flag ledgers to modify the negative
|
||||
UNL. Let's see an example of the algorithm:
|
||||
|
||||
* Ledger seq = 100: A validator V goes offline.
|
||||
* Ledger seq = 256: This is a flag ledger, and V's reliability measurement *PAV*
|
||||
is lower than the low-water mark. Other validators add `UNLModify`
|
||||
pseudo-transactions `{true, 256, V}` to the transaction set which goes through
|
||||
the consensus. Then the pseudo-transaction is applied to the negative UNL
|
||||
ledger component by setting `ToDisable = V`.
|
||||
* Ledger seq = 257 ~ 511: The negative UNL ledger component is copied from the
|
||||
parent ledger.
|
||||
* Ledger seq=512: This is a flag ledger, and the negative UNL is updated
|
||||
`NegativeUNL = NegativeUNL + ToDisable`.
|
||||
|
||||
The negative UNL may have up to `MaxNegativeListed = floor(original UNL * 25%)`
|
||||
validators. The 25% is because of 75% * 80% = 60%, where 75% = 100% - 25%, 80%
|
||||
is the quorum of the effective UNL, and 60% is the absolute minimum quorum of
|
||||
the original UNL. Adding more than 25% validators to the negative UNL does not
|
||||
improve the liveness of the network, because adding more validators to the
|
||||
negative UNL cannot lower the effective quorum.
|
||||
|
||||
The following is the detailed algorithm:
|
||||
|
||||
* **If** the ledger seq = x is a flag ledger
|
||||
|
||||
1. Compute `NegativeUNL = NegativeUNL + ToDisable - ToReEnable` if they
|
||||
exist in the parent ledger
|
||||
|
||||
1. Try to find a candidate to disable if `sizeof NegativeUNL < MaxNegativeListed`
|
||||
|
||||
1. Find a validator V that has a *PAV* lower than the low-water
|
||||
mark, but is not in `NegativeUNL`.
|
||||
|
||||
1. If two or more are found, their public keys are XORed with the hash
|
||||
of the parent ledger and the one with the lowest XOR result is chosen.
|
||||
|
||||
1. If V is found, create a `UNLModify` pseudo-transaction
|
||||
`TxDisableValidator = {true, x, V}`
|
||||
|
||||
1. Try to find a candidate to re-enable if `sizeof NegativeUNL > 0`:
|
||||
|
||||
1. Find a validator U that is in `NegativeUNL` and has a *PAV* higher
|
||||
than the high-water mark.
|
||||
|
||||
1. If U is not found, try to find one in `NegativeUNL` but not in the
|
||||
local *UNL*.
|
||||
|
||||
1. If two or more are found, their public keys are XORed with the hash
|
||||
of the parent ledger and the one with the lowest XOR result is chosen.
|
||||
|
||||
1. If U is found, create a `UNLModify` pseudo-transaction
|
||||
`TxReEnableValidator = {false, x, U}`
|
||||
|
||||
1. If any `UNLModify` pseudo-transactions are created, add them to the
|
||||
transaction set. The transaction set goes through the consensus algorithm.
|
||||
|
||||
1. If have enough support, the `UNLModify` pseudo-transactions remain in the
|
||||
transaction set agreed by the validators. Then the pseudo-transactions are
|
||||
applied to the ledger:
|
||||
|
||||
1. If have `TxDisableValidator`, set `ToDisable=TxDisableValidator.V`.
|
||||
Else clear `ToDisable`.
|
||||
|
||||
1. If have `TxReEnableValidator`, set
|
||||
`ToReEnable=TxReEnableValidator.U`. Else clear `ToReEnable`.
|
||||
|
||||
* **Else** (not a flag ledger)
|
||||
|
||||
1. Copy the negative UNL ledger component from the parent ledger
|
||||
|
||||
The negative UNL is stored on each ledger because we don't know when a validator
|
||||
may reconnect to the network. If the negative UNL was stored only on every flag
|
||||
ledger, then a new validator would have to wait until it acquires the latest
|
||||
flag ledger to know the negative UNL. So any new ledgers created that are not
|
||||
flag ledgers copy the negative UNL from the parent ledger.
|
||||
|
||||
Note that when we have a validator to disable and a validator to re-enable at
|
||||
the same flag ledger, we create two separate `UNLModify` pseudo-transactions. We
|
||||
want either one or the other or both to make it into the ledger on their own
|
||||
merits.
|
||||
|
||||
Readers may have noticed that we defined several rules of creating the
|
||||
`UNLModify` pseudo-transactions but did not describe how to enforce the rules.
|
||||
The rules are actually enforced by the existing consensus algorithm. Unless
|
||||
enough validators propose the same pseudo-transaction it will not be included in
|
||||
the transaction set of the ledger.
|
||||
|
||||
### Quorum Size Calculation
|
||||
|
||||
The effective quorum is 80% of the effective UNL. Note that because at most 25%
|
||||
of the original UNL can be on the negative UNL, the quorum should not be lower
|
||||
than the absolute minimum quorum (i.e. 60%) of the original UNL. However,
|
||||
considering that different nodes may have different UNLs, to be safe we compute
|
||||
`quorum = Ceiling(max(60% * original UNL, 80% * effective UNL))`.
|
||||
|
||||
### Filter Validation Messages
|
||||
|
||||
If a validator V is in the negative UNL, it still participates in consensus
|
||||
sessions in the same way, i.e. V still follows the protocol and publishes
|
||||
proposal and validation messages. The messages from V are still stored the same
|
||||
way by everyone, used to calculate the new PAV for V, and could be used in
|
||||
future consensus sessions if needed. However V's ledger validation message is
|
||||
not counted when checking if the ledger is fully validated.
|
||||
|
||||
### High Level Sequence Diagram of Code Changes
|
||||
|
||||
The diagram below is the sequence of one round of consensus. Classes and
|
||||
components with non-trivial changes are colored green.
|
||||
|
||||
* The `ValidatorList` class is modified to compute the quorum of the effective
|
||||
UNL.
|
||||
|
||||
* The `Validations` class provides an interface for querying the validation
|
||||
messages from trusted validators.
|
||||
|
||||
* The `ConsensusAdaptor` component:
|
||||
|
||||
* The `RCLConsensus::Adaptor` class is modified for creating `UNLModify`
|
||||
Pseudo-Transactions.
|
||||
|
||||
* The `Change` class is modified for applying `UNLModify`
|
||||
Pseudo-Transactions.
|
||||
|
||||
* The `Ledger` class is modified for creating and adjusting the negative UNL
|
||||
ledger component.
|
||||
|
||||
* The `LedgerMaster` class is modified for filtering out validation messages
|
||||
from negative UNL validators when verifying if a ledger is fully
|
||||
validated.
|
||||
|
||||

|
||||
|
||||
|
||||
## Roads Not Taken
|
||||
|
||||
### Use a Mechanism Like Fee Voting to Process UNLModify Pseudo-Transactions
|
||||
|
||||
The previous version of the negative UNL specification used the same mechanism
|
||||
as the [fee voting](https://xrpl.org/fee-voting.html#voting-process.) for
|
||||
creating the negative UNL, and used the negative UNL as soon as the ledger was
|
||||
fully validated. However the timing of fully validation can differ among nodes,
|
||||
so different negative UNLs could be used, resulting in different effective UNLs
|
||||
and different quorums for the same ledger. As a result, the network's safety is
|
||||
impacted.
|
||||
|
||||
This updated version does not impact safety though operates a bit more slowly.
|
||||
The negative UNL modifications in the *UNLModify* pseudo-transaction approved by
|
||||
the consensus will take effect at the next flag ledger. The extra time of the
|
||||
256 ledgers should be enough for nodes to be in sync of the negative UNL
|
||||
modifications.
|
||||
|
||||
### Use an Expiration Approach to Re-enable Validators
|
||||
|
||||
After a validator disabled by the negative UNL becomes reliable, other
|
||||
validators explicitly vote for re-enabling it. An alternative approach to
|
||||
re-enable a validator is the expiration approach, which was considered in the
|
||||
previous version of the specification. In the expiration approach, every entry
|
||||
in the negative UNL has a fixed expiration time. One flag ledger interval was
|
||||
chosen as the expiration interval. Once expired, the other validators must
|
||||
continue voting to keep the unreliable validator on the negative UNL. The
|
||||
advantage of this approach is its simplicity. But it has a requirement. The
|
||||
negative UNL protocol must be able to vote multiple unreliable validators to be
|
||||
disabled at the same flag ledger. In this version of the specification, however,
|
||||
only one unreliable validator can be disabled at a flag ledger. So the
|
||||
expiration approach cannot be simply applied.
|
||||
|
||||
### Validator Reliability Measurement and Flag Ledger Frequency
|
||||
|
||||
If the ledger time is about 4.5 seconds and the low-water mark is 50%, then in
|
||||
the worst case, it takes 48 minutes *((0.5 * 256 + 256 + 256) * 4.5 / 60 = 48)*
|
||||
to put an offline validator on the negative UNL. We considered lowering the flag
|
||||
ledger frequency so that the negative UNL can be more responsive. We also
|
||||
considered decoupling the reliability measurement and flag ledger frequency to
|
||||
be more flexible. In practice, however, their benefits are not clear.
|
||||
|
||||
|
||||
## New Attack Vectors
|
||||
|
||||
A group of malicious validators may try to frame a reliable validator and put it
|
||||
on the negative UNL. But they cannot succeed. Because:
|
||||
|
||||
1. A reliable validator sends a signed validation message every ledger. A
|
||||
sufficient peer-to-peer network will propagate the validation messages to other
|
||||
validators. The validators will decide if another validator is reliable or not
|
||||
only by its local observation of the validation messages received. So an honest
|
||||
validator’s vote on another validator’s reliability is accurate.
|
||||
|
||||
1. Given the votes are accurate, and one vote per validator, an honest validator
|
||||
will not create a UNLModify transaction of a reliable validator.
|
||||
|
||||
1. A validator can be added to a negative UNL only through a UNLModify
|
||||
transaction.
|
||||
|
||||
Assuming the group of malicious validators is less than the quorum, they cannot
|
||||
frame a reliable validator.
|
||||
|
||||
## Summary
|
||||
|
||||
The bullet points below briefly summarize the current proposal:
|
||||
|
||||
* The motivation of the negative UNL is to improve the liveness of the network.
|
||||
|
||||
* The targeted faults are the ones frequently observed in the production
|
||||
network.
|
||||
|
||||
* Validators propose negative UNL candidates based on their local measurements.
|
||||
|
||||
* The absolute minimum quorum is 60% of the original UNL.
|
||||
|
||||
* The format of the ledger is changed, and a new *UNLModify* pseudo-transaction
|
||||
is added. Any tools or systems that rely on the format of these data will have
|
||||
to be updated.
|
||||
|
||||
* The negative UNL can only be modified on the flag ledgers.
|
||||
|
||||
* At most one validator can be added to the negative UNL at a flag ledger.
|
||||
|
||||
* At most one validator can be removed from the negative UNL at a flag ledger.
|
||||
|
||||
* If a validator's reliability status changes, it takes two flag ledgers to
|
||||
modify the negative UNL.
|
||||
|
||||
* The quorum is the larger of 80% of the effective UNL and 60% of the original
|
||||
UNL.
|
||||
|
||||
* If a validator is on the negative UNL, its validation messages are ignored
|
||||
when the local node verifies if a ledger is fully validated.
|
||||
|
||||
## FAQ
|
||||
|
||||
### Question: What are UNLs?
|
||||
|
||||
Quote from the [Technical FAQ](https://xrpl.org/technical-faq.html): "They are
|
||||
the lists of transaction validators a given participant believes will not
|
||||
conspire to defraud them."
|
||||
|
||||
### Question: How does the negative UNL proposal affect network liveness?
|
||||
|
||||
The network can make forward progress when more than a quorum of the trusted
|
||||
validators agree with the progress. The lower the quorum size is, the easier for
|
||||
the network to progress. If the quorum is too low, however, the network is not
|
||||
safe because nodes may have different results. So the quorum size used in the
|
||||
consensus protocol is a balance between the safety and the liveness of the
|
||||
network. The negative UNL reduces the size of the effective UNL, resulting in a
|
||||
lower quorum size while keeping the network safe.
|
||||
|
||||
<h3> Question: How does a validator get into the negative UNL? How is a
|
||||
validator removed from the negative UNL? </h3>
|
||||
|
||||
A validator’s reliability is measured by other validators. If a validator
|
||||
becomes unreliable, at a flag ledger, other validators propose *UNLModify*
|
||||
pseudo-transactions which vote the validator to add to the negative UNL during
|
||||
the consensus session. If agreed, the validator is added to the negative UNL at
|
||||
the next flag ledger. The mechanism of removing a validator from the negative
|
||||
UNL is the same.
|
||||
|
||||
### Question: Given a negative UNL, what happens if the UNL changes?
|
||||
|
||||
Answer: Let’s consider the cases:
|
||||
|
||||
1. A validator is added to the UNL, and it is already in the negative UNL. This
|
||||
case could happen when not all the nodes have the same UNL. Note that the
|
||||
negative UNL on the ledger lists unreliable nodes that are not necessarily the
|
||||
validators for everyone.
|
||||
|
||||
In this case, the liveness is affected negatively. Because the minimum
|
||||
quorum could be larger but the usable validators are not increased.
|
||||
|
||||
1. A validator is removed from the UNL, and it is in the negative UNL.
|
||||
|
||||
In this case, the liveness is affected positively. Because the quorum could
|
||||
be smaller but the usable validators are not reduced.
|
||||
|
||||
1. A validator is added to the UNL, and it is not in the negative UNL.
|
||||
1. A validator is removed from the UNL, and it is not in the negative UNL.
|
||||
|
||||
Case 3 and 4 are not affected by the negative UNL protocol.
|
||||
|
||||
### Question: Can we simply lower the quorum to 60% without the negative UNL?
|
||||
|
||||
Answer: No, because the negative UNL approach is safer.
|
||||
|
||||
First let’s compare the two approaches intuitively, (1) the *negative UNL*
|
||||
approach, and (2) *lower quorum*: simply lowering the quorum from 80% to 60%
|
||||
without the negative UNL. The negative UNL approach uses consensus to come up
|
||||
with a list of unreliable validators, which are then removed from the effective
|
||||
UNL temporarily. With this approach, the list of unreliable validators is agreed
|
||||
to by a quorum of validators and will be used by every node in the network to
|
||||
adjust its UNL. The quorum is always 80% of the effective UNL. The lower quorum
|
||||
approach is a tradeoff between safety and liveness and against our principle of
|
||||
preferring safety over liveness. Note that different validators don't have to
|
||||
agree on which validation sources they are ignoring.
|
||||
|
||||
Next we compare the two approaches quantitatively with examples, and apply
|
||||
Theorem 8 of [Analysis of the XRP Ledger Consensus
|
||||
Protocol](https://arxiv.org/abs/1802.07242) paper:
|
||||
|
||||
*XRP LCP guarantees fork safety if **O<sub>i,j</sub> > n<sub>j</sub> / 2 +
|
||||
n<sub>i</sub> − q<sub>i</sub> + t<sub>i,j</sub>** for every pair of nodes
|
||||
P<sub>i</sub>, P<sub>j</sub>,*
|
||||
|
||||
where *O<sub>i,j</sub>* is the overlapping requirement, n<sub>j</sub> and
|
||||
n<sub>i</sub> are UNL sizes, q<sub>i</sub> is the quorum size of P<sub>i</sub>,
|
||||
*t<sub>i,j</sub> = min(t<sub>i</sub>, t<sub>j</sub>, O<sub>i,j</sub>)*, and
|
||||
t<sub>i</sub> and t<sub>j</sub> are the number of faults can be tolerated by
|
||||
P<sub>i</sub> and P<sub>j</sub>.
|
||||
|
||||
We denote *UNL<sub>i</sub>* as *P<sub>i</sub>'s UNL*, and *|UNL<sub>i</sub>|* as
|
||||
the size of *P<sub>i</sub>'s UNL*.
|
||||
|
||||
Assuming *|UNL<sub>i</sub>| = |UNL<sub>j</sub>|*, let's consider the following
|
||||
three cases:
|
||||
|
||||
1. With 80% quorum and 20% faults, *O<sub>i,j</sub> > 100% / 2 + 100% - 80% +
|
||||
20% = 90%*. I.e. fork safety requires > 90% UNL overlaps. This is one of the
|
||||
results in the analysis paper.
|
||||
|
||||
1. If the quorum is 60%, the relationship between the overlapping requirement
|
||||
and the faults that can be tolerated is *O<sub>i,j</sub> > 90% +
|
||||
t<sub>i,j</sub>*. Under the same overlapping condition (i.e. 90%), to guarantee
|
||||
the fork safety, the network cannot tolerate any faults. So under the same
|
||||
overlapping condition, if the quorum is simply lowered, the network can tolerate
|
||||
fewer faults.
|
||||
|
||||
1. With the negative UNL approach, we want to argue that the inequation
|
||||
*O<sub>i,j</sub> > n<sub>j</sub> / 2 + n<sub>i</sub> − q<sub>i</sub> +
|
||||
t<sub>i,j</sub>* is always true to guarantee fork safety, while the negative UNL
|
||||
protocol runs, i.e. the effective quorum is lowered without weakening the
|
||||
network's fault tolerance. To make the discussion easier, we rewrite the
|
||||
inequation as *O<sub>i,j</sub> > n<sub>j</sub> / 2 + (n<sub>i</sub> −
|
||||
q<sub>i</sub>) + min(t<sub>i</sub>, t<sub>j</sub>)*, where O<sub>i,j</sub> is
|
||||
dropped from the definition of t<sub>i,j</sub> because *O<sub>i,j</sub> >
|
||||
min(t<sub>i</sub>, t<sub>j</sub>)* always holds under the parameters we will
|
||||
use. Assuming a validator V is added to the negative UNL, now let's consider the
|
||||
4 cases:
|
||||
|
||||
1. V is not on UNL<sub>i</sub> nor UNL<sub>j</sub>
|
||||
|
||||
The inequation holds because none of the variables change.
|
||||
|
||||
1. V is on UNL<sub>i</sub> but not on UNL<sub>j</sub>
|
||||
|
||||
The value of *(n<sub>i</sub> − q<sub>i</sub>)* is smaller. The value of
|
||||
*min(t<sub>i</sub>, t<sub>j</sub>)* could be smaller too. Other
|
||||
variables do not change. Overall, the left side of the inequation does
|
||||
not change, but the right side is smaller. So the inequation holds.
|
||||
|
||||
1. V is not on UNL<sub>i</sub> but on UNL<sub>j</sub>
|
||||
|
||||
The value of *n<sub>j</sub> / 2* is smaller. The value of
|
||||
*min(t<sub>i</sub>, t<sub>j</sub>)* could be smaller too. Other
|
||||
variables do not change. Overall, the left side of the inequation does
|
||||
not change, but the right side is smaller. So the inequation holds.
|
||||
|
||||
1. V is on both UNL<sub>i</sub> and UNL<sub>j</sub>
|
||||
|
||||
The value of *O<sub>i,j</sub>* is reduced by 1. The values of
|
||||
*n<sub>j</sub> / 2*, *(n<sub>i</sub> − q<sub>i</sub>)*, and
|
||||
*min(t<sub>i</sub>, t<sub>j</sub>)* are reduced by 0.5, 0.2, and 1
|
||||
respectively. The right side is reduced by 1.7. Overall, the left side
|
||||
of the inequation is reduced by 1, and the right side is reduced by 1.7.
|
||||
So the inequation holds.
|
||||
|
||||
The inequation holds for all the cases. So with the negative UNL approach,
|
||||
the network's fork safety is preserved, while the quorum is lowered that
|
||||
increases the network's liveness.
|
||||
|
||||
<h3> Question: We have observed that occasionally a validator wanders off on its
|
||||
own chain. How is this case handled by the negative UNL algorithm? </h3>
|
||||
|
||||
Answer: The case that a validator wanders off on its own chain can be measured
|
||||
with the validations agreement. Because the validations by this validator must
|
||||
be different from other validators' validations of the same sequence numbers.
|
||||
When there are enough disagreed validations, other validators will vote this
|
||||
validator onto the negative UNL.
|
||||
|
||||
In general by measuring the agreement of validations, we also measured the
|
||||
"sanity". If two validators have too many disagreements, one of them could be
|
||||
insane. When enough validators think a validator is insane, that validator is
|
||||
put on the negative UNL.
|
||||
|
||||
<h3> Question: Why would there be at most one disable UNLModify and one
|
||||
re-enable UNLModify transaction per flag ledger? </h3>
|
||||
|
||||
Answer: It is a design choice so that the effective UNL does not change too
|
||||
quickly. A typical targeted scenario is several validators go offline slowly
|
||||
during a long weekend. The current design can handle this kind of cases well
|
||||
without changing the effective UNL too quickly.
|
||||
|
||||
## Appendix
|
||||
|
||||
### Confidence Test
|
||||
|
||||
We will use two test networks, a single machine test network with multiple IP
|
||||
addresses and the QE test network with multiple machines. The single machine
|
||||
network will be used to test all the test cases and to debug. The QE network
|
||||
will be used after that. We want to see the test cases still pass with real
|
||||
network delay. A test case specifies:
|
||||
|
||||
1. a UNL with different number of validators for different test cases,
|
||||
1. a network with zero or more non-validator nodes,
|
||||
1. a sequence of validator reliability change events (by killing/restarting
|
||||
nodes, or by running modified rippled that does not send all validation
|
||||
messages),
|
||||
1. the correct outcomes.
|
||||
|
||||
For all the test cases, the correct outcomes are verified by examining logs. We
|
||||
will grep the log to see if the correct negative UNLs are generated, and whether
|
||||
or not the network is making progress when it should be. The ripdtop tool will
|
||||
be helpful for monitoring validators' states and ledger progress. Some of the
|
||||
timing parameters of rippled will be changed to have faster ledger time. Most if
|
||||
not all test cases do not need client transactions.
|
||||
|
||||
For example, the test cases for the prototype:
|
||||
1. A 10-validator UNL.
|
||||
1. The network does not have other nodes.
|
||||
1. The validators will be started from the genesis. Once they start to produce
|
||||
ledgers, we kill five validators, one every flag ledger interval. Then we
|
||||
will restart them one by one.
|
||||
1. A sequence of events (or the lack of events) such as a killed validator is
|
||||
added to the negative UNL.
|
||||
|
||||
#### Roads Not Taken: Test with Extended CSF
|
||||
|
||||
We considered testing with the current unit test framework, specifically the
|
||||
[Consensus Simulation
|
||||
Framework](https://github.com/ripple/rippled/blob/develop/src/test/csf/README.md)
|
||||
(CSF). However, the CSF currently can only test the generic consensus algorithm
|
||||
as in the paper: [Analysis of the XRP Ledger Consensus
|
||||
Protocol](https://arxiv.org/abs/1802.07242).
|
||||
79
docs/0001-negative-unl/negativeUNLSqDiagram.puml
Normal file
79
docs/0001-negative-unl/negativeUNLSqDiagram.puml
Normal file
@@ -0,0 +1,79 @@
|
||||
@startuml negativeUNL_highLevel_sequence
|
||||
|
||||
skinparam sequenceArrowThickness 2
|
||||
skinparam roundcorner 20
|
||||
skinparam maxmessagesize 160
|
||||
|
||||
actor "Rippled Start" as RS
|
||||
participant "Timer" as T
|
||||
participant "NetworkOPs" as NOP
|
||||
participant "ValidatorList" as VL #lightgreen
|
||||
participant "Consensus" as GC
|
||||
participant "ConsensusAdaptor" as CA #lightgreen
|
||||
participant "Validations" as RM #lightgreen
|
||||
|
||||
RS -> NOP: begin consensus
|
||||
activate NOP
|
||||
NOP -[#green]> VL: <font color=green>update negative UNL
|
||||
hnote over VL#lightgreen: store a copy of\nnegative UNL
|
||||
VL -> NOP
|
||||
NOP -> VL: update trusted validators
|
||||
activate VL
|
||||
VL -> VL: re-calculate quorum
|
||||
hnote over VL#lightgreen: ignore negative listed validators\nwhen calculate quorum
|
||||
VL -> NOP
|
||||
deactivate VL
|
||||
NOP -> GC: start round
|
||||
activate GC
|
||||
GC -> GC: phase = OPEN
|
||||
GC -> NOP
|
||||
deactivate GC
|
||||
deactivate NOP
|
||||
|
||||
loop at regular frequency
|
||||
T -> GC: timerEntry
|
||||
activate GC
|
||||
end
|
||||
|
||||
alt phase == OPEN
|
||||
alt should close ledger
|
||||
GC -> GC: phase = ESTABLISH
|
||||
GC -> CA: onClose
|
||||
activate CA
|
||||
alt sqn%256==0
|
||||
CA -[#green]> RM: <font color=green>getValidations
|
||||
CA -[#green]> CA: <font color=green>create UNLModify Tx
|
||||
hnote over CA#lightgreen: use validatations of the last 256 ledgers\nto figure out UNLModify Tx candidates.\nIf any, create UNLModify Tx, and add to TxSet.
|
||||
end
|
||||
CA -> GC
|
||||
GC -> CA: propose
|
||||
deactivate CA
|
||||
end
|
||||
else phase == ESTABLISH
|
||||
hnote over GC: receive peer postions
|
||||
GC -> GC : update our position
|
||||
GC -> CA : propose \n(if position changed)
|
||||
GC -> GC : check if have consensus
|
||||
alt consensus reached
|
||||
GC -> GC: phase = ACCEPT
|
||||
GC -> CA : onAccept
|
||||
activate CA
|
||||
CA -> CA : build LCL
|
||||
hnote over CA #lightgreen: copy negative UNL from parent ledger
|
||||
alt sqn%256==0
|
||||
CA -[#green]> CA: <font color=green>Adjust negative UNL
|
||||
CA -[#green]> CA: <font color=green>apply UNLModify Tx
|
||||
end
|
||||
CA -> CA : validate and send validation message
|
||||
activate NOP
|
||||
CA -> NOP : end consensus and\n<b>begin next consensus round
|
||||
deactivate NOP
|
||||
deactivate CA
|
||||
hnote over RM: receive validations
|
||||
end
|
||||
else phase == ACCEPTED
|
||||
hnote over GC: timerEntry hash nothing to do at this phase
|
||||
end
|
||||
deactivate GC
|
||||
|
||||
@enduml
|
||||
BIN
docs/0001-negative-unl/negativeUNL_highLevel_sequence.png
Normal file
BIN
docs/0001-negative-unl/negativeUNL_highLevel_sequence.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 138 KiB |
88
docs/0010-ledger-replay/README.md
Normal file
88
docs/0010-ledger-replay/README.md
Normal file
@@ -0,0 +1,88 @@
|
||||
# Ledger Replay
|
||||
|
||||
`LedgerReplayer` is a new `Stoppable` for replaying ledgers.
|
||||
Patterned after two other `Stoppable`s under `JobQueue`---`InboundLedgers`
|
||||
and `InboundTransactions`---it acts like a factory for creating
|
||||
state-machine workers, and a network message demultiplexer for those workers.
|
||||
Think of these workers like asynchronous functions.
|
||||
Like functions, they each take a set of parameters.
|
||||
The `Stoppable` memoizes these functions. It maintains a table for each
|
||||
worker type, mapping sets of arguments to the worker currently working
|
||||
on that argument set.
|
||||
Whenever the `Stoppable` is asked to construct a worker, it first searches its
|
||||
table to see if there is an existing worker with the same or overlapping
|
||||
argument set.
|
||||
If one exists, then it is used. If not, then a new one is created,
|
||||
initialized, and added to the table.
|
||||
|
||||
For `LedgerReplayer`, there are three worker types: `LedgerReplayTask`,
|
||||
`SkipListAcquire`, and `LedgerDeltaAcquire`.
|
||||
Each is derived from `TimeoutCounter` to give it a timeout.
|
||||
For `LedgerReplayTask`, the parameter set
|
||||
is {reason, finish ledger ID, number of ledgers}. For `SkipListAcquire` and
|
||||
`LedgerDeltaAcquire`, there is just one parameter: a ledger ID.
|
||||
|
||||
Each `Stoppable` has an entry point. For `LedgerReplayer`, it is `replay`.
|
||||
`replay` creates two workers: a `LedgerReplayTask` and a `SkipListAcquire`.
|
||||
`LedgerDeltaAcquire`s are created in the callback for when the skip list
|
||||
returns.
|
||||
|
||||
For `SkipListAcquire` and `LedgerDeltaAcquire`, initialization fires off the
|
||||
underlying asynchronous network request and starts the timeout. The argument
|
||||
set identifying the worker is included in the network request, and copied to
|
||||
the network response. `SkipListAcquire` sends a request for a proof path for
|
||||
the skip list of the desired ledger. `LedgerDeltaAcquire` sends a request for
|
||||
the transaction set of the desired ledger.
|
||||
|
||||
`LedgerReplayer` is also a network message demultiplexer.
|
||||
When a response arrives for a request that was sent by a `SkipListAcquire` or
|
||||
`LedgerDeltaAcquire` worker, the `Peer` object knows to send it to the
|
||||
`LedgerReplayer`, which looks up the worker waiting for that response based on
|
||||
the identifying argument set included in the response.
|
||||
|
||||
`LedgerReplayTask` may ask `InboundLedgers` to send requests to acquire
|
||||
the start ledger, but there is no way to attach a callback or be notified when
|
||||
the `InboundLedger` worker completes. All the responses for its messages will
|
||||
be directed to `InboundLedgers`, not `LedgerReplayer`. Instead,
|
||||
`LedgerReplayTask` checks whether the start ledger has arrived every time its
|
||||
timeout expires.
|
||||
|
||||
Like a promise, each worker keeps track of whether it is pending (`!isDone()`)
|
||||
or whether it has resolved successfully (`complete_ == true`) or unsuccessfully
|
||||
(`failed_ == true`). It will never exist in both resolved states at once, nor
|
||||
will it return to a pending state after reaching a resolved state.
|
||||
|
||||
Like promises, some workers can accept continuations to be called when they
|
||||
reach a resolved state, or immediately if they are already resolved.
|
||||
`SkipListAcquire` and `LedgerDeltaAcquire` both accept continuations of a type
|
||||
specific to their payload, both via a method named `addDataCallback()`. Continuations
|
||||
cannot be removed explicitly, but they are held by `std::weak_ptr` so they can
|
||||
be removed implicitly.
|
||||
|
||||
`LedgerReplayTask` is simultaneously:
|
||||
|
||||
1. an asynchronous function,
|
||||
1. a continuation to one `SkipListAcquire` asynchronous function,
|
||||
1. a continuation to zero or more `LedgerDeltaAcquire` asynchronous functions, and
|
||||
1. a continuation to its own timeout.
|
||||
|
||||
Each of these roles corresponds to different entry points:
|
||||
|
||||
1. `init()`
|
||||
1. the callback added to `SkipListAcquire`, which calls `updateSkipList(...)` or `cancel()`
|
||||
1. the callback added to `LedgerDeltaAcquire`, which calls `deltaReady(...)` or `cancel()`
|
||||
1. `onTimer()`
|
||||
|
||||
Each of these entry points does something unique to that entry point. They
|
||||
either (a) transition `LedgerReplayTask` to a terminal failed resolved state
|
||||
(`cancel()` and `onTimer()`) or (b) try to make progress toward the successful
|
||||
resolved state. `init()` and `updateSkipList(...)` call `trigger()` while
|
||||
`deltaReady(...)` calls `tryAdvance()`. There's a similarity between this
|
||||
pattern and the way coroutines are implemented, where every yield saves the spot
|
||||
in the code where it left off and every resume jumps back to that spot.
|
||||
|
||||
### Sequence Diagram
|
||||

|
||||
|
||||
### Class Diagram
|
||||

|
||||
BIN
docs/0010-ledger-replay/ledger_replay_classes.png
Normal file
BIN
docs/0010-ledger-replay/ledger_replay_classes.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 90 KiB |
98
docs/0010-ledger-replay/ledger_replay_classes.puml
Normal file
98
docs/0010-ledger-replay/ledger_replay_classes.puml
Normal file
@@ -0,0 +1,98 @@
|
||||
@startuml
|
||||
|
||||
class TimeoutCounter {
|
||||
#app_ : Application&
|
||||
}
|
||||
|
||||
TimeoutCounter o-- "1" Application
|
||||
': app_
|
||||
|
||||
Stoppable <.. Application
|
||||
|
||||
class Application {
|
||||
-m_ledgerReplayer : uptr<LedgerReplayer>
|
||||
-m_inboundLedgers : uptr<InboundLedgers>
|
||||
}
|
||||
|
||||
Application *-- "1" LedgerReplayer
|
||||
': m_ledgerReplayer
|
||||
Application *-- "1" InboundLedgers
|
||||
': m_inboundLedgers
|
||||
|
||||
Stoppable <.. InboundLedgers
|
||||
Application "1" --o InboundLedgers
|
||||
': app_
|
||||
|
||||
class InboundLedgers {
|
||||
-app_ : Application&
|
||||
}
|
||||
|
||||
Stoppable <.. LedgerReplayer
|
||||
InboundLedgers "1" --o LedgerReplayer
|
||||
': inboundLedgers_
|
||||
Application "1" --o LedgerReplayer
|
||||
': app_
|
||||
|
||||
class LedgerReplayer {
|
||||
+createDeltas(LedgerReplayTask)
|
||||
-app_ : Application&
|
||||
-inboundLedgers_ : InboundLedgers&
|
||||
-tasks_ : vector<sptr<LedgerReplayTask>>
|
||||
-deltas_ : hash_map<u256, wptr<LedgerDeltaAcquire>>
|
||||
-skipLists_ : hash_map<u256, wptr<SkipListAcquire>>
|
||||
}
|
||||
|
||||
LedgerReplayer *-- LedgerReplayTask
|
||||
': tasks_
|
||||
LedgerReplayer o-- LedgerDeltaAcquire
|
||||
': deltas_
|
||||
LedgerReplayer o-- SkipListAcquire
|
||||
': skipLists_
|
||||
|
||||
TimeoutCounter <.. LedgerReplayTask
|
||||
InboundLedgers "1" --o LedgerReplayTask
|
||||
': inboundLedgers_
|
||||
LedgerReplayer "1" --o LedgerReplayTask
|
||||
': replayer_
|
||||
|
||||
class LedgerReplayTask {
|
||||
-inboundLedgers_ : InboundLedgers&
|
||||
-replayer_ : LedgerReplayer&
|
||||
-skipListAcquirer_ : sptr<SkipListAcquire>
|
||||
-deltas_ : vector<sptr<LedgerDeltaAcquire>>
|
||||
+addDelta(sptr<LedgerDeltaAcquire>)
|
||||
}
|
||||
|
||||
LedgerReplayTask *-- "1" SkipListAcquire
|
||||
': skipListAcquirer_
|
||||
LedgerReplayTask *-- LedgerDeltaAcquire
|
||||
': deltas_
|
||||
|
||||
TimeoutCounter <.. SkipListAcquire
|
||||
InboundLedgers "1" --o SkipListAcquire
|
||||
': inboundLedgers_
|
||||
LedgerReplayer "1" --o SkipListAcquire
|
||||
': replayer_
|
||||
LedgerReplayTask --o SkipListAcquire : implicit via callback
|
||||
|
||||
class SkipListAcquire {
|
||||
+addDataCallback(callback)
|
||||
-inboundLedgers_ : InboundLedgers&
|
||||
-replayer_ : LedgerReplayer&
|
||||
-dataReadyCallbacks_ : vector<callback>
|
||||
}
|
||||
|
||||
TimeoutCounter <.. LedgerDeltaAcquire
|
||||
InboundLedgers "1" --o LedgerDeltaAcquire
|
||||
': inboundLedgers_
|
||||
LedgerReplayer "1" --o LedgerDeltaAcquire
|
||||
': replayer_
|
||||
LedgerReplayTask --o LedgerDeltaAcquire : implicit via callback
|
||||
|
||||
class LedgerDeltaAcquire {
|
||||
+addDataCallback(callback)
|
||||
-inboundLedgers_ : InboundLedgers&
|
||||
-replayer_ : LedgerReplayer&
|
||||
-dataReadyCallbacks_ : vector<callback>
|
||||
}
|
||||
@enduml
|
||||
BIN
docs/0010-ledger-replay/ledger_replay_sequence.png
Normal file
BIN
docs/0010-ledger-replay/ledger_replay_sequence.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 121 KiB |
85
docs/0010-ledger-replay/ledger_replay_sequence.puml
Normal file
85
docs/0010-ledger-replay/ledger_replay_sequence.puml
Normal file
@@ -0,0 +1,85 @@
|
||||
@startuml
|
||||
|
||||
autoactivate on
|
||||
|
||||
' participant app as "Application"
|
||||
participant peer as "Peer"
|
||||
participant lr as "LedgerReplayer"
|
||||
participant lrt as "LedgerReplayTask"
|
||||
participant sla as "SkipListAcquire"
|
||||
participant lda as "LedgerDeltaAcquire"
|
||||
|
||||
[-> lr : replay(finishId, numLedgers)
|
||||
lr -> sla : make_shared(finishHash)
|
||||
return skipList
|
||||
lr -> lrt : make_shared(skipList)
|
||||
return task
|
||||
lr -> sla : init(numPeers=1)
|
||||
sla -> sla : trigger(numPeers=1)
|
||||
sla -> peer : sendRequest(ProofPathRequest)
|
||||
return
|
||||
return
|
||||
return
|
||||
lr -> lrt : init()
|
||||
lrt -> sla : addDataCallback(callback)
|
||||
return
|
||||
return
|
||||
deactivate lr
|
||||
|
||||
[-> peer : onMessage(ProofPathResponse)
|
||||
peer -> lr : gotSkipList(ledgerHeader, item)
|
||||
lr -> sla : processData(ledgerSeq, item)
|
||||
sla -> sla : onSkipListAcquired(skipList, ledgerSeq)
|
||||
sla -> sla : notify()
|
||||
note over sla: call the callbacks added by\naddDataCallback(callback).
|
||||
sla -> lrt : callback(ledgerId)
|
||||
lrt -> lrt : updateSkipList(ledgerId, ledgerSeq, skipList)
|
||||
lrt -> lr : createDeltas(this)
|
||||
loop
|
||||
lr -> lda : make_shared(ledgerId, ledgerSeq)
|
||||
return delta
|
||||
lr -> lrt : addDelta(delta)
|
||||
lrt -> lda : addDataCallback(callback)
|
||||
return
|
||||
return
|
||||
lr -> lda : init(numPeers=1)
|
||||
lda -> lda : trigger(numPeers=1)
|
||||
lda -> peer : sendRequest(ReplayDeltaRequest)
|
||||
return
|
||||
return
|
||||
return
|
||||
end
|
||||
return
|
||||
return
|
||||
return
|
||||
return
|
||||
return
|
||||
return
|
||||
deactivate peer
|
||||
|
||||
[-> peer : onMessage(ReplayDeltaResponse)
|
||||
peer -> lr : gotReplayDelta(ledgerHeader)
|
||||
lr -> lda : processData(ledgerHeader, txns)
|
||||
lda -> lda : notify()
|
||||
note over lda: call the callbacks added by\naddDataCallback(callback).
|
||||
lda -> lrt : callback(ledgerId)
|
||||
lrt -> lrt : deltaReady(ledgerId)
|
||||
lrt -> lrt : tryAdvance()
|
||||
loop as long as child can be built
|
||||
lrt -> lda : tryBuild(parent)
|
||||
lda -> lda : onLedgerBuilt()
|
||||
note over lda
|
||||
Schedule a job to store the built ledger.
|
||||
end note
|
||||
return
|
||||
return child
|
||||
end
|
||||
return
|
||||
return
|
||||
return
|
||||
return
|
||||
return
|
||||
deactivate peer
|
||||
|
||||
|
||||
@enduml
|
||||
@@ -1,16 +1,5 @@
|
||||
# Rippled Docker Image
|
||||
# `rippled` Docker Image
|
||||
|
||||
Rippled has a continuous deployment pipeline that turns every git commit into a
|
||||
docker image for quick testing and deployment.
|
||||
|
||||
To run the tip of the latest release via docker:
|
||||
|
||||
```$ docker run -P -v /srv/rippled/ ripple/rippled:latest```
|
||||
|
||||
To run the tip of active development:
|
||||
|
||||
```$ docker run -P -v /srv/rippled/ ripple/rippled:develop```
|
||||
|
||||
Where ```/srv/rippled``` points to a directory containing a rippled.cfg and
|
||||
database files. By default, port 5005/tcp maps to the RPC port and 51235/udp to
|
||||
the peer port.
|
||||
- Some info relating to Docker containers can be found here: [../Builds/containers](../Builds/containers)
|
||||
- Images for building and testing rippled can be found here: [thejohnfreeman/rippled-docker](https://github.com/thejohnfreeman/rippled-docker/)
|
||||
- These images do not have rippled. They have all the tools necessary to build rippled.
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
FROM ubuntu:16.04
|
||||
|
||||
RUN apt -y update
|
||||
RUN apt -y upgrade
|
||||
RUN apt -y install build-essential g++ git libbz2-dev wget python-dev
|
||||
RUN apt -y install cmake flex bison graphviz graphviz-dev libicu-dev
|
||||
RUN apt -y install jarwrapper java-common
|
||||
|
||||
RUN cd /tmp
|
||||
ENV CM_INSTALLER=cmake-3.10.0-rc3-Linux-x86_64.sh
|
||||
ENV CM_VER_DIR=/opt/local/cmake-3.10.0
|
||||
RUN cd /tmp && wget https://cmake.org/files/v3.10/$CM_INSTALLER && chmod a+x $CM_INSTALLER
|
||||
RUN mkdir -p $CM_VER_DIR
|
||||
RUN ln -s $CM_VER_DIR /opt/local/cmake
|
||||
RUN /tmp/$CM_INSTALLER --prefix=$CM_VER_DIR --exclude-subdir
|
||||
RUN rm -f /tmp/$CM_INSTALLER
|
||||
|
||||
RUN cd /tmp && wget https://ftp.stack.nl/pub/users/dimitri/doxygen-1.8.14.src.tar.gz
|
||||
RUN cd /tmp && tar xvf doxygen-1.8.14.src.tar.gz
|
||||
RUN mkdir -p /tmp/doxygen-1.8.14/build
|
||||
RUN cd /tmp/doxygen-1.8.14/build && /opt/local/cmake/bin/cmake -G "Unix Makefiles" ..
|
||||
RUN cd /tmp/doxygen-1.8.14/build && make -j2
|
||||
RUN cd /tmp/doxygen-1.8.14/build && make install
|
||||
RUN rm -f /tmp/doxygen-1.8.14.src.tar.gz
|
||||
RUN rm -rf /tmp/doxygen-1.8.14
|
||||
|
||||
RUN mkdir -p /opt/plantuml
|
||||
RUN wget -O /opt/plantuml/plantuml.jar http://sourceforge.net/projects/plantuml/files/plantuml.jar/download
|
||||
ENV PLANTUML_JAR=/opt/plantuml/plantuml.jar
|
||||
|
||||
CMD cd /opt/rippled/docs && doxygen source.dox
|
||||
343
docs/Doxyfile
Normal file
343
docs/Doxyfile
Normal file
@@ -0,0 +1,343 @@
|
||||
#---------------------------------------------------------------------------
|
||||
# Project related configuration options
|
||||
#---------------------------------------------------------------------------
|
||||
DOXYFILE_ENCODING = UTF-8
|
||||
PROJECT_NAME = "rippled"
|
||||
PROJECT_NUMBER =
|
||||
PROJECT_BRIEF =
|
||||
PROJECT_LOGO =
|
||||
PROJECT_LOGO =
|
||||
OUTPUT_DIRECTORY = $(DOXYGEN_OUTPUT_DIRECTORY)
|
||||
CREATE_SUBDIRS = NO
|
||||
ALLOW_UNICODE_NAMES = NO
|
||||
OUTPUT_LANGUAGE = English
|
||||
BRIEF_MEMBER_DESC = YES
|
||||
REPEAT_BRIEF = YES
|
||||
ABBREVIATE_BRIEF =
|
||||
ALWAYS_DETAILED_SEC = NO
|
||||
INLINE_INHERITED_MEMB = YES
|
||||
FULL_PATH_NAMES = NO
|
||||
STRIP_FROM_PATH = src/
|
||||
STRIP_FROM_INC_PATH =
|
||||
SHORT_NAMES = NO
|
||||
JAVADOC_AUTOBRIEF = YES
|
||||
QT_AUTOBRIEF = NO
|
||||
MULTILINE_CPP_IS_BRIEF = NO
|
||||
INHERIT_DOCS = YES
|
||||
SEPARATE_MEMBER_PAGES = NO
|
||||
TAB_SIZE = 4
|
||||
ALIASES =
|
||||
OPTIMIZE_OUTPUT_FOR_C = NO
|
||||
OPTIMIZE_OUTPUT_JAVA = NO
|
||||
OPTIMIZE_FOR_FORTRAN = NO
|
||||
OPTIMIZE_OUTPUT_VHDL = NO
|
||||
EXTENSION_MAPPING =
|
||||
MARKDOWN_SUPPORT = YES
|
||||
AUTOLINK_SUPPORT = YES
|
||||
BUILTIN_STL_SUPPORT = YES
|
||||
CPP_CLI_SUPPORT = NO
|
||||
SIP_SUPPORT = NO
|
||||
IDL_PROPERTY_SUPPORT = YES
|
||||
DISTRIBUTE_GROUP_DOC = NO
|
||||
GROUP_NESTED_COMPOUNDS = NO
|
||||
SUBGROUPING = YES
|
||||
INLINE_GROUPED_CLASSES = NO
|
||||
INLINE_SIMPLE_STRUCTS = NO
|
||||
TYPEDEF_HIDES_STRUCT = NO
|
||||
LOOKUP_CACHE_SIZE = 0
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Build related configuration options
|
||||
#---------------------------------------------------------------------------
|
||||
EXTRACT_ALL = YES
|
||||
EXTRACT_PRIVATE = YES
|
||||
EXTRACT_PACKAGE = NO
|
||||
EXTRACT_STATIC = YES
|
||||
EXTRACT_LOCAL_CLASSES = YES
|
||||
EXTRACT_LOCAL_METHODS = YES
|
||||
EXTRACT_ANON_NSPACES = NO
|
||||
HIDE_UNDOC_MEMBERS = NO
|
||||
HIDE_UNDOC_CLASSES = NO
|
||||
HIDE_FRIEND_COMPOUNDS = NO
|
||||
HIDE_IN_BODY_DOCS = NO
|
||||
INTERNAL_DOCS = NO
|
||||
CASE_SENSE_NAMES = YES
|
||||
HIDE_SCOPE_NAMES = NO
|
||||
HIDE_COMPOUND_REFERENCE= NO
|
||||
SHOW_INCLUDE_FILES = NO
|
||||
SHOW_GROUPED_MEMB_INC = NO
|
||||
FORCE_LOCAL_INCLUDES = NO
|
||||
INLINE_INFO = NO
|
||||
SORT_MEMBER_DOCS = NO
|
||||
SORT_BRIEF_DOCS = NO
|
||||
SORT_MEMBERS_CTORS_1ST = YES
|
||||
SORT_GROUP_NAMES = NO
|
||||
SORT_BY_SCOPE_NAME = NO
|
||||
STRICT_PROTO_MATCHING = NO
|
||||
GENERATE_TODOLIST = NO
|
||||
GENERATE_TESTLIST = NO
|
||||
GENERATE_BUGLIST = NO
|
||||
GENERATE_DEPRECATEDLIST= NO
|
||||
ENABLED_SECTIONS =
|
||||
MAX_INITIALIZER_LINES = 30
|
||||
SHOW_USED_FILES = NO
|
||||
SHOW_FILES = NO
|
||||
SHOW_NAMESPACES = YES
|
||||
FILE_VERSION_FILTER =
|
||||
LAYOUT_FILE =
|
||||
CITE_BIB_FILES =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to warning and progress messages
|
||||
#---------------------------------------------------------------------------
|
||||
QUIET = NO
|
||||
WARNINGS = YES
|
||||
WARN_IF_UNDOCUMENTED = YES
|
||||
WARN_IF_DOC_ERROR = YES
|
||||
WARN_NO_PARAMDOC = NO
|
||||
WARN_AS_ERROR = NO
|
||||
WARN_FORMAT = "$file:$line: $text"
|
||||
WARN_LOGFILE =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the input files
|
||||
#---------------------------------------------------------------------------
|
||||
INPUT = \
|
||||
. \
|
||||
|
||||
|
||||
INPUT_ENCODING = UTF-8
|
||||
FILE_PATTERNS = *.h *.cpp *.md
|
||||
RECURSIVE = YES
|
||||
EXCLUDE = \
|
||||
.github \
|
||||
external/ed25519-donna \
|
||||
external/secp256k1 \
|
||||
|
||||
EXCLUDE_SYMLINKS = NO
|
||||
EXCLUDE_PATTERNS =
|
||||
EXCLUDE_SYMBOLS =
|
||||
EXAMPLE_PATH =
|
||||
EXAMPLE_PATTERNS =
|
||||
EXAMPLE_RECURSIVE = NO
|
||||
IMAGE_PATH = \
|
||||
docs/images/ \
|
||||
docs/images/consensus/ \
|
||||
src/test/csf/ \
|
||||
|
||||
INPUT_FILTER =
|
||||
FILTER_PATTERNS =
|
||||
FILTER_SOURCE_FILES = NO
|
||||
FILTER_SOURCE_PATTERNS =
|
||||
USE_MDFILE_AS_MAINPAGE = ./README.md
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to source browsing
|
||||
#---------------------------------------------------------------------------
|
||||
SOURCE_BROWSER = YES
|
||||
INLINE_SOURCES = NO
|
||||
STRIP_CODE_COMMENTS = YES
|
||||
REFERENCED_BY_RELATION = NO
|
||||
REFERENCES_RELATION = NO
|
||||
REFERENCES_LINK_SOURCE = YES
|
||||
SOURCE_TOOLTIPS = YES
|
||||
USE_HTAGS = NO
|
||||
VERBATIM_HEADERS = YES
|
||||
CLANG_ASSISTED_PARSING = NO
|
||||
CLANG_OPTIONS =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the alphabetical class index
|
||||
#---------------------------------------------------------------------------
|
||||
ALPHABETICAL_INDEX = YES
|
||||
COLS_IN_ALPHA_INDEX = 5
|
||||
IGNORE_PREFIX =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the HTML output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_HTML = YES
|
||||
HTML_OUTPUT = html
|
||||
HTML_FILE_EXTENSION = .html
|
||||
HTML_HEADER =
|
||||
HTML_FOOTER =
|
||||
HTML_STYLESHEET =
|
||||
HTML_EXTRA_STYLESHEET =
|
||||
HTML_EXTRA_FILES =
|
||||
HTML_COLORSTYLE_HUE = 220
|
||||
HTML_COLORSTYLE_SAT = 100
|
||||
HTML_COLORSTYLE_GAMMA = 80
|
||||
HTML_TIMESTAMP = NO
|
||||
HTML_DYNAMIC_SECTIONS = NO
|
||||
HTML_INDEX_NUM_ENTRIES = 100
|
||||
GENERATE_DOCSET = NO
|
||||
DOCSET_FEEDNAME = "Doxygen generated docs"
|
||||
DOCSET_BUNDLE_ID = org.doxygen.Project
|
||||
DOCSET_PUBLISHER_ID = org.doxygen.Publisher
|
||||
DOCSET_PUBLISHER_NAME = Publisher
|
||||
GENERATE_HTMLHELP = NO
|
||||
CHM_FILE =
|
||||
HHC_LOCATION =
|
||||
GENERATE_CHI = NO
|
||||
CHM_INDEX_ENCODING =
|
||||
BINARY_TOC = NO
|
||||
TOC_EXPAND = NO
|
||||
GENERATE_QHP = NO
|
||||
QCH_FILE =
|
||||
QHP_NAMESPACE = org.doxygen.Project
|
||||
QHP_VIRTUAL_FOLDER = doc
|
||||
QHP_CUST_FILTER_NAME =
|
||||
QHP_CUST_FILTER_ATTRS =
|
||||
QHP_SECT_FILTER_ATTRS =
|
||||
QHG_LOCATION =
|
||||
GENERATE_ECLIPSEHELP = NO
|
||||
ECLIPSE_DOC_ID = org.doxygen.Project
|
||||
DISABLE_INDEX = NO
|
||||
GENERATE_TREEVIEW = NO
|
||||
ENUM_VALUES_PER_LINE = 4
|
||||
TREEVIEW_WIDTH = 250
|
||||
EXT_LINKS_IN_WINDOW = NO
|
||||
FORMULA_FONTSIZE = 10
|
||||
FORMULA_TRANSPARENT = YES
|
||||
USE_MATHJAX = NO
|
||||
MATHJAX_FORMAT = HTML-CSS
|
||||
MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest
|
||||
MATHJAX_EXTENSIONS =
|
||||
MATHJAX_CODEFILE =
|
||||
SEARCHENGINE = YES
|
||||
SERVER_BASED_SEARCH = NO
|
||||
EXTERNAL_SEARCH = NO
|
||||
SEARCHENGINE_URL =
|
||||
SEARCHDATA_FILE = searchdata.xml
|
||||
EXTERNAL_SEARCH_ID =
|
||||
EXTRA_SEARCH_MAPPINGS =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the LaTeX output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_LATEX = NO
|
||||
LATEX_OUTPUT = latex
|
||||
LATEX_CMD_NAME =
|
||||
MAKEINDEX_CMD_NAME = makeindex
|
||||
COMPACT_LATEX = NO
|
||||
PAPER_TYPE = a4
|
||||
EXTRA_PACKAGES =
|
||||
LATEX_HEADER =
|
||||
LATEX_FOOTER =
|
||||
LATEX_EXTRA_STYLESHEET =
|
||||
LATEX_EXTRA_FILES =
|
||||
PDF_HYPERLINKS = YES
|
||||
USE_PDFLATEX = YES
|
||||
LATEX_BATCHMODE = NO
|
||||
LATEX_HIDE_INDICES = NO
|
||||
LATEX_SOURCE_CODE = NO
|
||||
LATEX_BIB_STYLE = plain
|
||||
LATEX_TIMESTAMP = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the RTF output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_RTF = NO
|
||||
RTF_OUTPUT = rtf
|
||||
COMPACT_RTF = NO
|
||||
RTF_HYPERLINKS = NO
|
||||
RTF_STYLESHEET_FILE =
|
||||
RTF_EXTENSIONS_FILE =
|
||||
RTF_SOURCE_CODE = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the man page output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_MAN = NO
|
||||
MAN_OUTPUT = man
|
||||
MAN_EXTENSION = .3
|
||||
MAN_SUBDIR =
|
||||
MAN_LINKS = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the XML output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_XML = NO
|
||||
XML_OUTPUT = xml
|
||||
XML_PROGRAMLISTING = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the DOCBOOK output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_DOCBOOK = NO
|
||||
DOCBOOK_OUTPUT = docbook
|
||||
DOCBOOK_PROGRAMLISTING = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options for the AutoGen Definitions output
|
||||
#---------------------------------------------------------------------------
|
||||
GENERATE_AUTOGEN_DEF = NO
|
||||
GENERATE_PERLMOD = NO
|
||||
PERLMOD_LATEX = NO
|
||||
PERLMOD_PRETTY = YES
|
||||
PERLMOD_MAKEVAR_PREFIX =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the preprocessor
|
||||
#---------------------------------------------------------------------------
|
||||
ENABLE_PREPROCESSING = YES
|
||||
MACRO_EXPANSION = YES
|
||||
EXPAND_ONLY_PREDEF = YES
|
||||
SEARCH_INCLUDES = YES
|
||||
INCLUDE_PATH = $(DOXYGEN_INCLUDE_PATH)
|
||||
INCLUDE_FILE_PATTERNS =
|
||||
PREDEFINED = DOXYGEN \
|
||||
GENERATING_DOCS \
|
||||
_MSC_VER \
|
||||
NUDB_POSIX_FILE=1
|
||||
|
||||
EXPAND_AS_DEFINED =
|
||||
SKIP_FUNCTION_MACROS = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to external references
|
||||
#---------------------------------------------------------------------------
|
||||
TAGFILES = $(DOXYGEN_TAGFILES)
|
||||
GENERATE_TAGFILE =
|
||||
ALLEXTERNALS = NO
|
||||
EXTERNAL_GROUPS = YES
|
||||
EXTERNAL_PAGES = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the dot tool
|
||||
#---------------------------------------------------------------------------
|
||||
CLASS_DIAGRAMS = NO
|
||||
DIA_PATH =
|
||||
HIDE_UNDOC_RELATIONS = YES
|
||||
HAVE_DOT = YES
|
||||
# DOT_NUM_THREADS = 0 means 1 for every processor.
|
||||
DOT_NUM_THREADS = 0
|
||||
DOT_FONTNAME = Helvetica
|
||||
DOT_FONTSIZE = 10
|
||||
DOT_FONTPATH =
|
||||
CLASS_GRAPH = YES
|
||||
COLLABORATION_GRAPH = YES
|
||||
GROUP_GRAPHS = YES
|
||||
UML_LOOK = NO
|
||||
UML_LIMIT_NUM_FIELDS = 10
|
||||
TEMPLATE_RELATIONS = NO
|
||||
INCLUDE_GRAPH = YES
|
||||
INCLUDED_BY_GRAPH = YES
|
||||
CALL_GRAPH = NO
|
||||
CALLER_GRAPH = NO
|
||||
GRAPHICAL_HIERARCHY = YES
|
||||
DIRECTORY_GRAPH = YES
|
||||
DOT_IMAGE_FORMAT = png
|
||||
INTERACTIVE_SVG = NO
|
||||
DOT_PATH = $(DOXYGEN_DOT_PATH)
|
||||
DOTFILE_DIRS =
|
||||
MSCFILE_DIRS =
|
||||
DIAFILE_DIRS =
|
||||
PLANTUML_JAR_PATH = $(DOXYGEN_PLANTUML_JAR_PATH)
|
||||
PLANTUML_INCLUDE_PATH =
|
||||
DOT_GRAPH_MAX_NODES = 50
|
||||
MAX_DOT_GRAPH_DEPTH = 0
|
||||
DOT_TRANSPARENT = NO
|
||||
DOT_MULTI_TARGETS = NO
|
||||
GENERATE_LEGEND = YES
|
||||
DOT_CLEANUP = YES
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user