mirror of
https://github.com/XRPLF/rippled.git
synced 2025-11-04 19:25:51 +00:00
Compare commits
256 Commits
a1q123456/
...
bthomee/re
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
94c0ad795d | ||
|
|
1862ea9e9f | ||
|
|
57030f085d | ||
|
|
f3a2ec1fb2 | ||
|
|
1d42c4f6de | ||
|
|
ada83564d8 | ||
|
|
b18dece145 | ||
|
|
63a08560ca | ||
|
|
8ac8a47c99 | ||
|
|
12c4b5a632 | ||
|
|
25c5e3b17f | ||
|
|
8eb233c2ea | ||
|
|
50fc93f742 | ||
|
|
ab45a8a737 | ||
|
|
dfafb141cc | ||
|
|
4e32d2ed98 | ||
|
|
fa69918124 | ||
|
|
cbbb2b1be0 | ||
|
|
cf2d763fa1 | ||
|
|
2dd1d682ac | ||
|
|
4cb1084c02 | ||
|
|
8d1b3b3994 | ||
|
|
b39d7a6519 | ||
|
|
b0910e359e | ||
|
|
44e027e516 | ||
|
|
a10f42a3aa | ||
|
|
efd4c1b95d | ||
|
|
f8b4f692f1 | ||
|
|
80a3ae6386 | ||
|
|
48d38c1e2c | ||
|
|
553fb5be3b | ||
|
|
efa917d9f3 | ||
|
|
bd3bc917f8 | ||
|
|
ed5d6f3e22 | ||
|
|
a8e4da0b11 | ||
|
|
1dd60242de | ||
|
|
76611c3f46 | ||
|
|
5efaf0c328 | ||
|
|
0aa23933ea | ||
|
|
21f3c12d85 | ||
|
|
7d5ed0cd8d | ||
|
|
d9960d5ba0 | ||
|
|
91fa6b2295 | ||
|
|
76f774e22d | ||
|
|
f4f7618173 | ||
|
|
66f16469f9 | ||
|
|
1845b1c656 | ||
|
|
e192ffe964 | ||
|
|
2bf77cc8f6 | ||
|
|
5e33ca56fd | ||
|
|
7c39c810eb | ||
|
|
a7792ebcae | ||
|
|
83ee3788e1 | ||
|
|
ae719b86d3 | ||
|
|
dd722f8b3f | ||
|
|
30190a5feb | ||
|
|
afb6e0e41b | ||
|
|
5523557226 | ||
|
|
b64707f53b | ||
|
|
0b113f371f | ||
|
|
b4c894c1ba | ||
|
|
92281a4ede | ||
|
|
e80642fc12 | ||
|
|
640ce4988f | ||
|
|
a422855ea7 | ||
|
|
108f90586c | ||
|
|
519d1dbc34 | ||
|
|
3d44758e5a | ||
|
|
97bc94a7f6 | ||
|
|
34619f2504 | ||
|
|
3509de9c5f | ||
|
|
459d0da010 | ||
|
|
8637d606a4 | ||
|
|
8456b8275e | ||
|
|
3c88786bb0 | ||
|
|
46ba8a28fe | ||
|
|
5ecde3cf39 | ||
|
|
620fb26823 | ||
|
|
6b6b213cf5 | ||
|
|
f61086b43c | ||
|
|
176fd2b6e4 | ||
|
|
2df730438d | ||
|
|
5d79bfc531 | ||
|
|
51ef35ab55 | ||
|
|
330a3215bc | ||
|
|
85c2ceacde | ||
|
|
70d5c624e8 | ||
|
|
8e4fda160d | ||
|
|
072b1c442c | ||
|
|
294e03ecf5 | ||
|
|
550f90a75e | ||
|
|
d67dcfe3c4 | ||
|
|
0fd2f715bb | ||
|
|
807462b191 | ||
|
|
19c4226d3d | ||
|
|
d02c306f1e | ||
|
|
cfd26f444c | ||
|
|
2c3024716b | ||
|
|
a12f5de68d | ||
|
|
51c5f2bfc9 | ||
|
|
73ff54143d | ||
|
|
08b136528e | ||
|
|
6b8a589447 | ||
|
|
ffeabc9642 | ||
|
|
3cbdf818a7 | ||
|
|
c46888f8f7 | ||
|
|
2ae65d2fdb | ||
|
|
bd834c87e0 | ||
|
|
dc8b37a524 | ||
|
|
617a895af5 | ||
|
|
1af1048c58 | ||
|
|
f07ba87e51 | ||
|
|
e66558a883 | ||
|
|
510314d344 | ||
|
|
37b951859c | ||
|
|
9494fc9668 | ||
|
|
8d01f35eb9 | ||
|
|
1020a32d76 | ||
|
|
17a2606591 | ||
|
|
ccb9f1e42d | ||
|
|
3e4e9a2ddc | ||
|
|
4caebfbd0e | ||
|
|
37c377a1b6 | ||
|
|
bd182c0a3e | ||
|
|
406c26cc72 | ||
|
|
9bd1ce436a | ||
|
|
f69ad4eff6 | ||
|
|
6fe0599cc2 | ||
|
|
e6f8bc720f | ||
|
|
fbd60fc000 | ||
|
|
61d628d654 | ||
|
|
3d92375d12 | ||
|
|
cdbe70b2a7 | ||
|
|
f6426ca183 | ||
|
|
e5f7a8442d | ||
|
|
e67e0395df | ||
|
|
148f669a25 | ||
|
|
f1eaa6a264 | ||
|
|
da4c8c9550 | ||
|
|
bcde2790a4 | ||
|
|
9ebeb413e4 | ||
|
|
6d40b882a4 | ||
|
|
9fe0a154f1 | ||
|
|
cb52c9af00 | ||
|
|
6bf8338038 | ||
|
|
b0f4174e47 | ||
|
|
3865dde0b8 | ||
|
|
811c980821 | ||
|
|
cf5f65b68e | ||
|
|
c38f2a3f2e | ||
|
|
16c2ff97cc | ||
|
|
32043463a8 | ||
|
|
724e9b1313 | ||
|
|
2e6f00aef2 | ||
|
|
e0b9812fc5 | ||
|
|
e4fdf33158 | ||
|
|
6e814d7ebd | ||
|
|
1e37d00d6c | ||
|
|
87ea3ba65d | ||
|
|
dedf3d3983 | ||
|
|
2df7dcfdeb | ||
|
|
1506e65558 | ||
|
|
808c86663c | ||
|
|
92431a4238 | ||
|
|
285120684c | ||
|
|
77fef8732b | ||
|
|
7775c725f3 | ||
|
|
c61096239c | ||
|
|
c5fe970646 | ||
|
|
c57cd8b23e | ||
|
|
c14ce956ad | ||
|
|
095dc4d9cc | ||
|
|
2e255812ae | ||
|
|
896b8c3b54 | ||
|
|
58dd07bbdf | ||
|
|
b13370ac0d | ||
|
|
f847e3287c | ||
|
|
56c1e078f2 | ||
|
|
afc05659ed | ||
|
|
b04d239926 | ||
|
|
dc1caa41b2 | ||
|
|
ceb0ce5634 | ||
|
|
fb89213d4d | ||
|
|
d8628d481d | ||
|
|
a14551b151 | ||
|
|
de33a6a241 | ||
|
|
28eec6ce1b | ||
|
|
c9a723128a | ||
|
|
da82e52613 | ||
|
|
c9d73b6135 | ||
|
|
b7ed99426b | ||
|
|
97f0747e10 | ||
|
|
abf12db788 | ||
|
|
bdfc376951 | ||
|
|
b40a3684ae | ||
|
|
86ef16dbeb | ||
|
|
39b5031ab5 | ||
|
|
94decc753b | ||
|
|
991891625a | ||
|
|
69314e6832 | ||
|
|
dbeb841b5a | ||
|
|
4eae037fee | ||
|
|
b5a63b39d3 | ||
|
|
6419f9a253 | ||
|
|
31c99caa65 | ||
|
|
d835e97490 | ||
|
|
baf4b8381f | ||
|
|
9b45b6888b | ||
|
|
7179ce9c58 | ||
|
|
921aef9934 | ||
|
|
e7a7bb83c1 | ||
|
|
5c2a3a2779 | ||
|
|
b2960b9e7f | ||
|
|
5713f9782a | ||
|
|
60e340d356 | ||
|
|
80d82c5b2b | ||
|
|
433eeabfa5 | ||
|
|
faa781b71f | ||
|
|
c233df720a | ||
|
|
7ff4f79d30 | ||
|
|
60909655d3 | ||
|
|
03e46cd026 | ||
|
|
e95683a0fb | ||
|
|
13353ae36d | ||
|
|
1a40f18bdd | ||
|
|
90e6380383 | ||
|
|
8bfaa7fe0a | ||
|
|
c9135a63cd | ||
|
|
452263eaa5 | ||
|
|
8aa94ea09a | ||
|
|
258ba71363 | ||
|
|
b8626ea3c6 | ||
|
|
6534757d85 | ||
|
|
8e94ea3154 | ||
|
|
b113190563 | ||
|
|
358b7f50a7 | ||
|
|
f47e2f4e82 | ||
|
|
a7eea9546f | ||
|
|
9874d47d7f | ||
|
|
c2f3e2e263 | ||
|
|
e18f27f5f7 | ||
|
|
df6daf0d8f | ||
|
|
e9d46f0bfc | ||
|
|
42fd74b77b | ||
|
|
c55ea56c5e | ||
|
|
1e01cd34f7 | ||
|
|
e2fa5c1b7c | ||
|
|
fc0984d286 | ||
|
|
8b3dcd41f7 | ||
|
|
8f2f5310e2 | ||
|
|
edb4f0342c | ||
|
|
ea17abb92a | ||
|
|
35a40a8e62 | ||
|
|
d494bf45b2 | ||
|
|
8bf4a5cbff | ||
|
|
58c2c82a30 |
@@ -1,5 +1,21 @@
|
|||||||
---
|
---
|
||||||
Language: Cpp
|
BreakBeforeBraces: Custom
|
||||||
|
BraceWrapping:
|
||||||
|
AfterClass: true
|
||||||
|
AfterControlStatement: true
|
||||||
|
AfterEnum: false
|
||||||
|
AfterFunction: true
|
||||||
|
AfterNamespace: false
|
||||||
|
AfterObjCDeclaration: true
|
||||||
|
AfterStruct: true
|
||||||
|
AfterUnion: true
|
||||||
|
BeforeCatch: true
|
||||||
|
BeforeElse: true
|
||||||
|
IndentBraces: false
|
||||||
|
KeepEmptyLinesAtTheStartOfBlocks: false
|
||||||
|
MaxEmptyLinesToKeep: 1
|
||||||
|
---
|
||||||
|
Language: Cpp
|
||||||
AccessModifierOffset: -4
|
AccessModifierOffset: -4
|
||||||
AlignAfterOpenBracket: AlwaysBreak
|
AlignAfterOpenBracket: AlwaysBreak
|
||||||
AlignConsecutiveAssignments: false
|
AlignConsecutiveAssignments: false
|
||||||
@@ -18,56 +34,41 @@ AlwaysBreakBeforeMultilineStrings: true
|
|||||||
AlwaysBreakTemplateDeclarations: true
|
AlwaysBreakTemplateDeclarations: true
|
||||||
BinPackArguments: false
|
BinPackArguments: false
|
||||||
BinPackParameters: false
|
BinPackParameters: false
|
||||||
BraceWrapping:
|
|
||||||
AfterClass: true
|
|
||||||
AfterControlStatement: true
|
|
||||||
AfterEnum: false
|
|
||||||
AfterFunction: true
|
|
||||||
AfterNamespace: false
|
|
||||||
AfterObjCDeclaration: true
|
|
||||||
AfterStruct: true
|
|
||||||
AfterUnion: true
|
|
||||||
BeforeCatch: true
|
|
||||||
BeforeElse: true
|
|
||||||
IndentBraces: false
|
|
||||||
BreakBeforeBinaryOperators: false
|
BreakBeforeBinaryOperators: false
|
||||||
BreakBeforeBraces: Custom
|
|
||||||
BreakBeforeTernaryOperators: true
|
BreakBeforeTernaryOperators: true
|
||||||
BreakConstructorInitializersBeforeComma: true
|
BreakConstructorInitializersBeforeComma: true
|
||||||
ColumnLimit: 80
|
ColumnLimit: 80
|
||||||
CommentPragmas: '^ IWYU pragma:'
|
CommentPragmas: "^ IWYU pragma:"
|
||||||
ConstructorInitializerAllOnOneLineOrOnePerLine: true
|
ConstructorInitializerAllOnOneLineOrOnePerLine: true
|
||||||
ConstructorInitializerIndentWidth: 4
|
ConstructorInitializerIndentWidth: 4
|
||||||
ContinuationIndentWidth: 4
|
ContinuationIndentWidth: 4
|
||||||
Cpp11BracedListStyle: true
|
Cpp11BracedListStyle: true
|
||||||
DerivePointerAlignment: false
|
DerivePointerAlignment: false
|
||||||
DisableFormat: false
|
DisableFormat: false
|
||||||
ExperimentalAutoDetectBinPacking: false
|
ExperimentalAutoDetectBinPacking: false
|
||||||
ForEachMacros: [ Q_FOREACH, BOOST_FOREACH ]
|
ForEachMacros: [Q_FOREACH, BOOST_FOREACH]
|
||||||
IncludeBlocks: Regroup
|
IncludeBlocks: Regroup
|
||||||
IncludeCategories:
|
IncludeCategories:
|
||||||
- Regex: '^<(test)/'
|
- Regex: "^<(test)/"
|
||||||
Priority: 0
|
Priority: 0
|
||||||
- Regex: '^<(xrpld)/'
|
- Regex: "^<(xrpld)/"
|
||||||
Priority: 1
|
Priority: 1
|
||||||
- Regex: '^<(xrpl)/'
|
- Regex: "^<(xrpl)/"
|
||||||
Priority: 2
|
Priority: 2
|
||||||
- Regex: '^<(boost)/'
|
- Regex: "^<(boost)/"
|
||||||
Priority: 3
|
Priority: 3
|
||||||
- Regex: '^.*/'
|
- Regex: "^.*/"
|
||||||
Priority: 4
|
Priority: 4
|
||||||
- Regex: '^.*\.h'
|
- Regex: '^.*\.h'
|
||||||
Priority: 5
|
Priority: 5
|
||||||
- Regex: '.*'
|
- Regex: ".*"
|
||||||
Priority: 6
|
Priority: 6
|
||||||
IncludeIsMainRegex: '$'
|
IncludeIsMainRegex: "$"
|
||||||
IndentCaseLabels: true
|
IndentCaseLabels: true
|
||||||
IndentFunctionDeclarationAfterType: false
|
IndentFunctionDeclarationAfterType: false
|
||||||
IndentRequiresClause: true
|
IndentRequiresClause: true
|
||||||
IndentWidth: 4
|
IndentWidth: 4
|
||||||
IndentWrappedFunctionNames: false
|
IndentWrappedFunctionNames: false
|
||||||
KeepEmptyLinesAtTheStartOfBlocks: false
|
|
||||||
MaxEmptyLinesToKeep: 1
|
|
||||||
NamespaceIndentation: None
|
NamespaceIndentation: None
|
||||||
ObjCSpaceAfterProperty: false
|
ObjCSpaceAfterProperty: false
|
||||||
ObjCSpaceBeforeProtocolList: false
|
ObjCSpaceBeforeProtocolList: false
|
||||||
@@ -78,20 +79,25 @@ PenaltyBreakString: 1000
|
|||||||
PenaltyExcessCharacter: 1000000
|
PenaltyExcessCharacter: 1000000
|
||||||
PenaltyReturnTypeOnItsOwnLine: 200
|
PenaltyReturnTypeOnItsOwnLine: 200
|
||||||
PointerAlignment: Left
|
PointerAlignment: Left
|
||||||
ReflowComments: true
|
ReflowComments: true
|
||||||
RequiresClausePosition: OwnLine
|
RequiresClausePosition: OwnLine
|
||||||
SortIncludes: true
|
SortIncludes: true
|
||||||
SpaceAfterCStyleCast: false
|
SpaceAfterCStyleCast: false
|
||||||
SpaceBeforeAssignmentOperators: true
|
SpaceBeforeAssignmentOperators: true
|
||||||
SpaceBeforeParens: ControlStatements
|
SpaceBeforeParens: ControlStatements
|
||||||
SpaceInEmptyParentheses: false
|
SpaceInEmptyParentheses: false
|
||||||
SpacesBeforeTrailingComments: 2
|
SpacesBeforeTrailingComments: 2
|
||||||
SpacesInAngles: false
|
SpacesInAngles: false
|
||||||
SpacesInContainerLiterals: true
|
SpacesInContainerLiterals: true
|
||||||
SpacesInCStyleCastParentheses: false
|
SpacesInCStyleCastParentheses: false
|
||||||
SpacesInParentheses: false
|
SpacesInParentheses: false
|
||||||
SpacesInSquareBrackets: false
|
SpacesInSquareBrackets: false
|
||||||
Standard: Cpp11
|
Standard: Cpp11
|
||||||
TabWidth: 8
|
TabWidth: 8
|
||||||
UseTab: Never
|
UseTab: Never
|
||||||
QualifierAlignment: Right
|
QualifierAlignment: Right
|
||||||
|
---
|
||||||
|
Language: Proto
|
||||||
|
BasedOnStyle: Google
|
||||||
|
ColumnLimit: 0
|
||||||
|
IndentWidth: 2
|
||||||
|
|||||||
@@ -27,11 +27,12 @@ github_checks:
|
|||||||
parsers:
|
parsers:
|
||||||
cobertura:
|
cobertura:
|
||||||
partials_as_hits: true
|
partials_as_hits: true
|
||||||
handle_missing_conditions : true
|
handle_missing_conditions: true
|
||||||
|
|
||||||
slack_app: false
|
slack_app: false
|
||||||
|
|
||||||
ignore:
|
ignore:
|
||||||
- "src/test/"
|
- "src/test/"
|
||||||
|
- "src/tests/"
|
||||||
- "include/xrpl/beast/test/"
|
- "include/xrpl/beast/test/"
|
||||||
- "include/xrpl/beast/unit_test/"
|
- "include/xrpl/beast/unit_test/"
|
||||||
|
|||||||
@@ -11,3 +11,6 @@ b9d007813378ad0ff45660dc07285b823c7e9855
|
|||||||
fe9a5365b8a52d4acc42eb27369247e6f238a4f9
|
fe9a5365b8a52d4acc42eb27369247e6f238a4f9
|
||||||
9a93577314e6a8d4b4a8368cc9d2b15a5d8303e8
|
9a93577314e6a8d4b4a8368cc9d2b15a5d8303e8
|
||||||
552377c76f55b403a1c876df873a23d780fcc81c
|
552377c76f55b403a1c876df873a23d780fcc81c
|
||||||
|
97f0747e103f13e26e45b731731059b32f7679ac
|
||||||
|
b13370ac0d207217354f1fc1c29aef87769fb8a1
|
||||||
|
896b8c3b54a22b0497cb0d1ce95e1095f9a227ce
|
||||||
|
|||||||
13
.github/ISSUE_TEMPLATE/bug_report.md
vendored
13
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -2,30 +2,35 @@
|
|||||||
name: Bug Report
|
name: Bug Report
|
||||||
about: Create a report to help us improve rippled
|
about: Create a report to help us improve rippled
|
||||||
title: "[Title with short description] (Version: [rippled version])"
|
title: "[Title with short description] (Version: [rippled version])"
|
||||||
labels: ''
|
labels: ""
|
||||||
assignees: ''
|
assignees: ""
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
<!-- Please search existing issues to avoid creating duplicates.-->
|
<!-- Please search existing issues to avoid creating duplicates.-->
|
||||||
|
|
||||||
## Issue Description
|
## Issue Description
|
||||||
|
|
||||||
<!--Provide a summary for your issue/bug.-->
|
<!--Provide a summary for your issue/bug.-->
|
||||||
|
|
||||||
## Steps to Reproduce
|
## Steps to Reproduce
|
||||||
|
|
||||||
<!--List in detail the exact steps to reproduce the unexpected behavior of the software.-->
|
<!--List in detail the exact steps to reproduce the unexpected behavior of the software.-->
|
||||||
|
|
||||||
## Expected Result
|
## Expected Result
|
||||||
|
|
||||||
<!--Explain in detail what behavior you expected to happen.-->
|
<!--Explain in detail what behavior you expected to happen.-->
|
||||||
|
|
||||||
## Actual Result
|
## Actual Result
|
||||||
|
|
||||||
<!--Explain in detail what behavior actually happened.-->
|
<!--Explain in detail what behavior actually happened.-->
|
||||||
|
|
||||||
## Environment
|
## Environment
|
||||||
|
|
||||||
<!--Please describe your environment setup (such as Ubuntu 18.04 with Boost 1.70).-->
|
<!--Please describe your environment setup (such as Ubuntu 18.04 with Boost 1.70).-->
|
||||||
<!-- If you are using a formal release, please use the version returned by './rippled --version' as the version number-->
|
<!-- If you are using a formal release, please use the version returned by './rippled --version' as the version number-->
|
||||||
<!-- If you are working off of develop, please add the git hash via 'git rev-parse HEAD'-->
|
<!-- If you are working off of develop, please add the git hash via 'git rev-parse HEAD'-->
|
||||||
|
|
||||||
## Supporting Files
|
## Supporting Files
|
||||||
|
|
||||||
<!--If you have supporting files such as a log, feel free to post a link here using Github Gist.-->
|
<!--If you have supporting files such as a log, feel free to post a link here using Github Gist.-->
|
||||||
<!--Consider adding configuration files with private information removed via Github Gist. -->
|
<!--Consider adding configuration files with private information removed via Github Gist. -->
|
||||||
|
|
||||||
|
|||||||
8
.github/ISSUE_TEMPLATE/feature_request.md
vendored
8
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -3,19 +3,23 @@ name: Feature Request
|
|||||||
about: Suggest a new feature for the rippled project
|
about: Suggest a new feature for the rippled project
|
||||||
title: "[Title with short description] (Version: [rippled version])"
|
title: "[Title with short description] (Version: [rippled version])"
|
||||||
labels: Feature Request
|
labels: Feature Request
|
||||||
assignees: ''
|
assignees: ""
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
<!-- Please search existing issues to avoid creating duplicates.-->
|
<!-- Please search existing issues to avoid creating duplicates.-->
|
||||||
|
|
||||||
## Summary
|
## Summary
|
||||||
|
|
||||||
<!-- Provide a summary to the feature request-->
|
<!-- Provide a summary to the feature request-->
|
||||||
|
|
||||||
## Motivation
|
## Motivation
|
||||||
|
|
||||||
<!-- Why do we need this feature?-->
|
<!-- Why do we need this feature?-->
|
||||||
|
|
||||||
## Solution
|
## Solution
|
||||||
|
|
||||||
<!-- What is the solution?-->
|
<!-- What is the solution?-->
|
||||||
|
|
||||||
## Paths Not Taken
|
## Paths Not Taken
|
||||||
|
|
||||||
<!-- What other alternatives have been considered?-->
|
<!-- What other alternatives have been considered?-->
|
||||||
|
|||||||
49
.github/actions/build-deps/action.yml
vendored
Normal file
49
.github/actions/build-deps/action.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
name: Build Conan dependencies
|
||||||
|
description: "Install Conan dependencies, optionally forcing a rebuild of all dependencies."
|
||||||
|
|
||||||
|
# Note that actions do not support 'type' and all inputs are strings, see
|
||||||
|
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
||||||
|
inputs:
|
||||||
|
build_dir:
|
||||||
|
description: "The directory where to build."
|
||||||
|
required: true
|
||||||
|
build_type:
|
||||||
|
description: 'The build type to use ("Debug", "Release").'
|
||||||
|
required: true
|
||||||
|
build_nproc:
|
||||||
|
description: "The number of processors to use for building."
|
||||||
|
required: true
|
||||||
|
force_build:
|
||||||
|
description: 'Force building of all dependencies ("true", "false").'
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
log_verbosity:
|
||||||
|
description: "The logging verbosity."
|
||||||
|
required: false
|
||||||
|
default: "verbose"
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: composite
|
||||||
|
steps:
|
||||||
|
- name: Install Conan dependencies
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
BUILD_DIR: ${{ inputs.build_dir }}
|
||||||
|
BUILD_NPROC: ${{ inputs.build_nproc }}
|
||||||
|
BUILD_OPTION: ${{ inputs.force_build == 'true' && '*' || 'missing' }}
|
||||||
|
BUILD_TYPE: ${{ inputs.build_type }}
|
||||||
|
LOG_VERBOSITY: ${{ inputs.log_verbosity }}
|
||||||
|
run: |
|
||||||
|
echo 'Installing dependencies.'
|
||||||
|
mkdir -p "${BUILD_DIR}"
|
||||||
|
cd "${BUILD_DIR}"
|
||||||
|
conan install \
|
||||||
|
--output-folder . \
|
||||||
|
--build="${BUILD_OPTION}" \
|
||||||
|
--options:host='&:tests=True' \
|
||||||
|
--options:host='&:xrpld=True' \
|
||||||
|
--settings:all build_type="${BUILD_TYPE}" \
|
||||||
|
--conf:all tools.build:jobs=${BUILD_NPROC} \
|
||||||
|
--conf:all tools.build:verbosity="${LOG_VERBOSITY}" \
|
||||||
|
--conf:all tools.compilation:verbosity="${LOG_VERBOSITY}" \
|
||||||
|
..
|
||||||
34
.github/actions/build/action.yml
vendored
34
.github/actions/build/action.yml
vendored
@@ -1,34 +0,0 @@
|
|||||||
name: build
|
|
||||||
inputs:
|
|
||||||
generator:
|
|
||||||
default: null
|
|
||||||
configuration:
|
|
||||||
required: true
|
|
||||||
cmake-args:
|
|
||||||
default: null
|
|
||||||
cmake-target:
|
|
||||||
default: all
|
|
||||||
# An implicit input is the environment variable `build_dir`.
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: configure
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
cd ${build_dir}
|
|
||||||
cmake \
|
|
||||||
${{ inputs.generator && format('-G "{0}"', inputs.generator) || '' }} \
|
|
||||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
|
||||||
-DCMAKE_BUILD_TYPE=${{ inputs.configuration }} \
|
|
||||||
-Dtests=TRUE \
|
|
||||||
-Dxrpld=TRUE \
|
|
||||||
${{ inputs.cmake-args }} \
|
|
||||||
..
|
|
||||||
- name: build
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
cmake \
|
|
||||||
--build ${build_dir} \
|
|
||||||
--config ${{ inputs.configuration }} \
|
|
||||||
--parallel ${NUM_PROCESSORS:-$(nproc)} \
|
|
||||||
--target ${{ inputs.cmake-target }}
|
|
||||||
57
.github/actions/dependencies/action.yml
vendored
57
.github/actions/dependencies/action.yml
vendored
@@ -1,57 +0,0 @@
|
|||||||
name: dependencies
|
|
||||||
inputs:
|
|
||||||
configuration:
|
|
||||||
required: true
|
|
||||||
# An implicit input is the environment variable `build_dir`.
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: unlock Conan
|
|
||||||
shell: bash
|
|
||||||
run: conan remove --locks
|
|
||||||
- name: export custom recipes
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
conan config set general.revisions_enabled=1
|
|
||||||
conan export external/snappy snappy/1.1.10@
|
|
||||||
conan export external/rocksdb rocksdb/9.7.3@
|
|
||||||
conan export external/soci soci/4.0.3@
|
|
||||||
conan export external/nudb nudb/2.0.8@
|
|
||||||
- name: add Ripple Conan remote
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
conan remote list
|
|
||||||
conan remote remove ripple || true
|
|
||||||
# Do not quote the URL. An empty string will be accepted (with
|
|
||||||
# a non-fatal warning), but a missing argument will not.
|
|
||||||
conan remote add ripple ${{ env.CONAN_URL }} --insert 0
|
|
||||||
- name: try to authenticate to Ripple Conan remote
|
|
||||||
id: remote
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
# `conan user` implicitly uses the environment variables
|
|
||||||
# CONAN_LOGIN_USERNAME_<REMOTE> and CONAN_PASSWORD_<REMOTE>.
|
|
||||||
# https://docs.conan.io/1/reference/commands/misc/user.html#using-environment-variables
|
|
||||||
# https://docs.conan.io/1/reference/env_vars.html#conan-login-username-conan-login-username-remote-name
|
|
||||||
# https://docs.conan.io/1/reference/env_vars.html#conan-password-conan-password-remote-name
|
|
||||||
echo outcome=$(conan user --remote ripple --password >&2 \
|
|
||||||
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
|
|
||||||
- name: list missing binaries
|
|
||||||
id: binaries
|
|
||||||
shell: bash
|
|
||||||
# Print the list of dependencies that would need to be built locally.
|
|
||||||
# A non-empty list means we have "failed" to cache binaries remotely.
|
|
||||||
run: |
|
|
||||||
echo missing=$(conan info . --build missing --settings build_type=${{ inputs.configuration }} --json 2>/dev/null | grep '^\[') | tee ${GITHUB_OUTPUT}
|
|
||||||
- name: install dependencies
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir ${build_dir}
|
|
||||||
cd ${build_dir}
|
|
||||||
conan install \
|
|
||||||
--output-folder . \
|
|
||||||
--build missing \
|
|
||||||
--options tests=True \
|
|
||||||
--options xrpld=True \
|
|
||||||
--settings build_type=${{ inputs.configuration }} \
|
|
||||||
..
|
|
||||||
43
.github/actions/print-env/action.yml
vendored
Normal file
43
.github/actions/print-env/action.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
name: Print build environment
|
||||||
|
description: "Print environment and some tooling versions"
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: composite
|
||||||
|
steps:
|
||||||
|
- name: Check configuration (Windows)
|
||||||
|
if: ${{ runner.os == 'Windows' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo 'Checking environment variables.'
|
||||||
|
set
|
||||||
|
|
||||||
|
echo 'Checking CMake version.'
|
||||||
|
cmake --version
|
||||||
|
|
||||||
|
echo 'Checking Conan version.'
|
||||||
|
conan --version
|
||||||
|
|
||||||
|
- name: Check configuration (Linux and macOS)
|
||||||
|
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo 'Checking path.'
|
||||||
|
echo ${PATH} | tr ':' '\n'
|
||||||
|
|
||||||
|
echo 'Checking environment variables.'
|
||||||
|
env | sort
|
||||||
|
|
||||||
|
echo 'Checking CMake version.'
|
||||||
|
cmake --version
|
||||||
|
|
||||||
|
echo 'Checking compiler version.'
|
||||||
|
${{ runner.os == 'Linux' && '${CC}' || 'clang' }} --version
|
||||||
|
|
||||||
|
echo 'Checking Conan version.'
|
||||||
|
conan --version
|
||||||
|
|
||||||
|
echo 'Checking Ninja version.'
|
||||||
|
ninja --version
|
||||||
|
|
||||||
|
echo 'Checking nproc version.'
|
||||||
|
nproc --version
|
||||||
46
.github/actions/setup-conan/action.yml
vendored
Normal file
46
.github/actions/setup-conan/action.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
name: Setup Conan
|
||||||
|
description: "Set up Conan configuration, profile, and remote."
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
conan_remote_name:
|
||||||
|
description: "The name of the Conan remote to use."
|
||||||
|
required: false
|
||||||
|
default: xrplf
|
||||||
|
conan_remote_url:
|
||||||
|
description: "The URL of the Conan endpoint to use."
|
||||||
|
required: false
|
||||||
|
default: https://conan.ripplex.io
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: composite
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Set up Conan configuration
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo 'Installing configuration.'
|
||||||
|
cat conan/global.conf ${{ runner.os == 'Linux' && '>>' || '>' }} $(conan config home)/global.conf
|
||||||
|
|
||||||
|
echo 'Conan configuration:'
|
||||||
|
conan config show '*'
|
||||||
|
|
||||||
|
- name: Set up Conan profile
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo 'Installing profile.'
|
||||||
|
conan config install conan/profiles/default -tf $(conan config home)/profiles/
|
||||||
|
|
||||||
|
echo 'Conan profile:'
|
||||||
|
conan profile show
|
||||||
|
|
||||||
|
- name: Set up Conan remote
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||||
|
CONAN_REMOTE_URL: ${{ inputs.conan_remote_url }}
|
||||||
|
run: |
|
||||||
|
echo "Adding Conan remote '${CONAN_REMOTE_NAME}' at '${CONAN_REMOTE_URL}'."
|
||||||
|
conan remote add --index 0 --force "${CONAN_REMOTE_NAME}" "${CONAN_REMOTE_URL}"
|
||||||
|
|
||||||
|
echo 'Listing Conan remotes.'
|
||||||
|
conan remote list
|
||||||
@@ -25,32 +25,32 @@ more dependencies listed later.
|
|||||||
**tl;dr:** The modules listed first are more independent than the modules
|
**tl;dr:** The modules listed first are more independent than the modules
|
||||||
listed later.
|
listed later.
|
||||||
|
|
||||||
| Level / Tier | Module(s) |
|
| Level / Tier | Module(s) |
|
||||||
|--------------|-----------------------------------------------|
|
| ------------ | -------------------------------------------------------------------------------------------------------- |
|
||||||
| 01 | ripple/beast ripple/unity
|
| 01 | ripple/beast ripple/unity |
|
||||||
| 02 | ripple/basics
|
| 02 | ripple/basics |
|
||||||
| 03 | ripple/json ripple/crypto
|
| 03 | ripple/json ripple/crypto |
|
||||||
| 04 | ripple/protocol
|
| 04 | ripple/protocol |
|
||||||
| 05 | ripple/core ripple/conditions ripple/consensus ripple/resource ripple/server
|
| 05 | ripple/core ripple/conditions ripple/consensus ripple/resource ripple/server |
|
||||||
| 06 | ripple/peerfinder ripple/ledger ripple/nodestore ripple/net
|
| 06 | ripple/peerfinder ripple/ledger ripple/nodestore ripple/net |
|
||||||
| 07 | ripple/shamap ripple/overlay
|
| 07 | ripple/shamap ripple/overlay |
|
||||||
| 08 | ripple/app
|
| 08 | ripple/app |
|
||||||
| 09 | ripple/rpc
|
| 09 | ripple/rpc |
|
||||||
| 10 | ripple/perflog
|
| 10 | ripple/perflog |
|
||||||
| 11 | test/jtx test/beast test/csf
|
| 11 | test/jtx test/beast test/csf |
|
||||||
| 12 | test/unit_test
|
| 12 | test/unit_test |
|
||||||
| 13 | test/crypto test/conditions test/json test/resource test/shamap test/peerfinder test/basics test/overlay
|
| 13 | test/crypto test/conditions test/json test/resource test/shamap test/peerfinder test/basics test/overlay |
|
||||||
| 14 | test
|
| 14 | test |
|
||||||
| 15 | test/net test/protocol test/ledger test/consensus test/core test/server test/nodestore
|
| 15 | test/net test/protocol test/ledger test/consensus test/core test/server test/nodestore |
|
||||||
| 16 | test/rpc test/app
|
| 16 | test/rpc test/app |
|
||||||
|
|
||||||
(Note that `test` levelization is *much* less important and *much* less
|
(Note that `test` levelization is _much_ less important and _much_ less
|
||||||
strictly enforced than `ripple` levelization, other than the requirement
|
strictly enforced than `ripple` levelization, other than the requirement
|
||||||
that `test` code should *never* be included in `ripple` code.)
|
that `test` code should _never_ be included in `ripple` code.)
|
||||||
|
|
||||||
## Validation
|
## Validation
|
||||||
|
|
||||||
The [levelization.sh](levelization.sh) script takes no parameters,
|
The [levelization](generate.sh) script takes no parameters,
|
||||||
reads no environment variables, and can be run from any directory,
|
reads no environment variables, and can be run from any directory,
|
||||||
as long as it is in the expected location in the rippled repo.
|
as long as it is in the expected location in the rippled repo.
|
||||||
It can be run at any time from within a checked out repo, and will
|
It can be run at any time from within a checked out repo, and will
|
||||||
@@ -59,48 +59,48 @@ the rippled source. The only caveat is that it runs much slower
|
|||||||
under Windows than in Linux. It hasn't yet been tested under MacOS.
|
under Windows than in Linux. It hasn't yet been tested under MacOS.
|
||||||
It generates many files of [results](results):
|
It generates many files of [results](results):
|
||||||
|
|
||||||
* `rawincludes.txt`: The raw dump of the `#includes`
|
- `rawincludes.txt`: The raw dump of the `#includes`
|
||||||
* `paths.txt`: A second dump grouping the source module
|
- `paths.txt`: A second dump grouping the source module
|
||||||
to the destination module, deduped, and with frequency counts.
|
to the destination module, deduped, and with frequency counts.
|
||||||
* `includes/`: A directory where each file represents a module and
|
- `includes/`: A directory where each file represents a module and
|
||||||
contains a list of modules and counts that the module _includes_.
|
contains a list of modules and counts that the module _includes_.
|
||||||
* `includedby/`: Similar to `includes/`, but the other way around. Each
|
- `includedby/`: Similar to `includes/`, but the other way around. Each
|
||||||
file represents a module and contains a list of modules and counts
|
file represents a module and contains a list of modules and counts
|
||||||
that _include_ the module.
|
that _include_ the module.
|
||||||
* [`loops.txt`](results/loops.txt): A list of direct loops detected
|
- [`loops.txt`](results/loops.txt): A list of direct loops detected
|
||||||
between modules as they actually exist, as opposed to how they are
|
between modules as they actually exist, as opposed to how they are
|
||||||
desired as described above. In a perfect repo, this file will be
|
desired as described above. In a perfect repo, this file will be
|
||||||
empty.
|
empty.
|
||||||
This file is committed to the repo, and is used by the [levelization
|
This file is committed to the repo, and is used by the [levelization
|
||||||
Github workflow](../../.github/workflows/levelization.yml) to validate
|
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
|
||||||
that nothing changed.
|
that nothing changed.
|
||||||
* [`ordering.txt`](results/ordering.txt): A list showing relationships
|
- [`ordering.txt`](results/ordering.txt): A list showing relationships
|
||||||
between modules where there are no loops as they actually exist, as
|
between modules where there are no loops as they actually exist, as
|
||||||
opposed to how they are desired as described above.
|
opposed to how they are desired as described above.
|
||||||
This file is committed to the repo, and is used by the [levelization
|
This file is committed to the repo, and is used by the [levelization
|
||||||
Github workflow](../../.github/workflows/levelization.yml) to validate
|
Github workflow](../../workflows/reusable-check-levelization.yml) to validate
|
||||||
that nothing changed.
|
that nothing changed.
|
||||||
* [`levelization.yml`](../../.github/workflows/levelization.yml)
|
- [`levelization.yml`](../../workflows/reusable-check-levelization.yml)
|
||||||
Github Actions workflow to test that levelization loops haven't
|
Github Actions workflow to test that levelization loops haven't
|
||||||
changed. Unfortunately, if changes are detected, it can't tell if
|
changed. Unfortunately, if changes are detected, it can't tell if
|
||||||
they are improvements or not, so if you have resolved any issues or
|
they are improvements or not, so if you have resolved any issues or
|
||||||
done anything else to improve levelization, run `levelization.sh`,
|
done anything else to improve levelization, run `levelization.sh`,
|
||||||
and commit the updated results.
|
and commit the updated results.
|
||||||
|
|
||||||
The `loops.txt` and `ordering.txt` files relate the modules
|
The `loops.txt` and `ordering.txt` files relate the modules
|
||||||
using comparison signs, which indicate the number of times each
|
using comparison signs, which indicate the number of times each
|
||||||
module is included in the other.
|
module is included in the other.
|
||||||
|
|
||||||
* `A > B` means that A should probably be at a higher level than B,
|
- `A > B` means that A should probably be at a higher level than B,
|
||||||
because B is included in A significantly more than A is included in B.
|
because B is included in A significantly more than A is included in B.
|
||||||
These results can be included in both `loops.txt` and `ordering.txt`.
|
These results can be included in both `loops.txt` and `ordering.txt`.
|
||||||
Because `ordering.txt`only includes relationships where B is not
|
Because `ordering.txt`only includes relationships where B is not
|
||||||
included in A at all, it will only include these types of results.
|
included in A at all, it will only include these types of results.
|
||||||
* `A ~= B` means that A and B are included in each other a different
|
- `A ~= B` means that A and B are included in each other a different
|
||||||
number of times, but the values are so close that the script can't
|
number of times, but the values are so close that the script can't
|
||||||
definitively say that one should be above the other. These results
|
definitively say that one should be above the other. These results
|
||||||
will only be included in `loops.txt`.
|
will only be included in `loops.txt`.
|
||||||
* `A == B` means that A and B include each other the same number of
|
- `A == B` means that A and B include each other the same number of
|
||||||
times, so the script has no clue which should be higher. These results
|
times, so the script has no clue which should be higher. These results
|
||||||
will only be included in `loops.txt`.
|
will only be included in `loops.txt`.
|
||||||
|
|
||||||
@@ -110,5 +110,5 @@ get those details locally.
|
|||||||
|
|
||||||
1. Run `levelization.sh`
|
1. Run `levelization.sh`
|
||||||
2. Grep the modules in `paths.txt`.
|
2. Grep the modules in `paths.txt`.
|
||||||
* For example, if a cycle is found `A ~= B`, simply `grep -w
|
- For example, if a cycle is found `A ~= B`, simply `grep -w
|
||||||
A Builds/levelization/results/paths.txt | grep -w B`
|
A .github/scripts/levelization/results/paths.txt | grep -w B`
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# Usage: levelization.sh
|
# Usage: generate.sh
|
||||||
# This script takes no parameters, reads no environment variables,
|
# This script takes no parameters, reads no environment variables,
|
||||||
# and can be run from any directory, as long as it is in the expected
|
# and can be run from any directory, as long as it is in the expected
|
||||||
# location in the repo.
|
# location in the repo.
|
||||||
@@ -19,7 +19,7 @@ export LANG=C
|
|||||||
rm -rfv results
|
rm -rfv results
|
||||||
mkdir results
|
mkdir results
|
||||||
includes="$( pwd )/results/rawincludes.txt"
|
includes="$( pwd )/results/rawincludes.txt"
|
||||||
pushd ../..
|
pushd ../../..
|
||||||
echo Raw includes:
|
echo Raw includes:
|
||||||
grep -r '^[ ]*#include.*/.*\.h' include src | \
|
grep -r '^[ ]*#include.*/.*\.h' include src | \
|
||||||
grep -v boost | tee ${includes}
|
grep -v boost | tee ${includes}
|
||||||
@@ -7,12 +7,6 @@ Loop: test.jtx test.unit_test
|
|||||||
Loop: xrpld.app xrpld.core
|
Loop: xrpld.app xrpld.core
|
||||||
xrpld.app > xrpld.core
|
xrpld.app > xrpld.core
|
||||||
|
|
||||||
Loop: xrpld.app xrpld.ledger
|
|
||||||
xrpld.app > xrpld.ledger
|
|
||||||
|
|
||||||
Loop: xrpld.app xrpld.net
|
|
||||||
xrpld.app > xrpld.net
|
|
||||||
|
|
||||||
Loop: xrpld.app xrpld.overlay
|
Loop: xrpld.app xrpld.overlay
|
||||||
xrpld.overlay > xrpld.app
|
xrpld.overlay > xrpld.app
|
||||||
|
|
||||||
@@ -23,17 +17,11 @@ Loop: xrpld.app xrpld.rpc
|
|||||||
xrpld.rpc > xrpld.app
|
xrpld.rpc > xrpld.app
|
||||||
|
|
||||||
Loop: xrpld.app xrpld.shamap
|
Loop: xrpld.app xrpld.shamap
|
||||||
xrpld.app > xrpld.shamap
|
xrpld.shamap ~= xrpld.app
|
||||||
|
|
||||||
Loop: xrpld.core xrpld.net
|
|
||||||
xrpld.net > xrpld.core
|
|
||||||
|
|
||||||
Loop: xrpld.core xrpld.perflog
|
Loop: xrpld.core xrpld.perflog
|
||||||
xrpld.perflog == xrpld.core
|
xrpld.perflog == xrpld.core
|
||||||
|
|
||||||
Loop: xrpld.net xrpld.rpc
|
|
||||||
xrpld.rpc ~= xrpld.net
|
|
||||||
|
|
||||||
Loop: xrpld.overlay xrpld.rpc
|
Loop: xrpld.overlay xrpld.rpc
|
||||||
xrpld.rpc ~= xrpld.overlay
|
xrpld.rpc ~= xrpld.overlay
|
||||||
|
|
||||||
@@ -2,6 +2,16 @@ libxrpl.basics > xrpl.basics
|
|||||||
libxrpl.crypto > xrpl.basics
|
libxrpl.crypto > xrpl.basics
|
||||||
libxrpl.json > xrpl.basics
|
libxrpl.json > xrpl.basics
|
||||||
libxrpl.json > xrpl.json
|
libxrpl.json > xrpl.json
|
||||||
|
libxrpl.ledger > xrpl.basics
|
||||||
|
libxrpl.ledger > xrpl.json
|
||||||
|
libxrpl.ledger > xrpl.ledger
|
||||||
|
libxrpl.ledger > xrpl.protocol
|
||||||
|
libxrpl.net > xrpl.basics
|
||||||
|
libxrpl.net > xrpl.net
|
||||||
|
libxrpl.nodestore > xrpl.basics
|
||||||
|
libxrpl.nodestore > xrpl.json
|
||||||
|
libxrpl.nodestore > xrpl.nodestore
|
||||||
|
libxrpl.nodestore > xrpl.protocol
|
||||||
libxrpl.protocol > xrpl.basics
|
libxrpl.protocol > xrpl.basics
|
||||||
libxrpl.protocol > xrpl.json
|
libxrpl.protocol > xrpl.json
|
||||||
libxrpl.protocol > xrpl.protocol
|
libxrpl.protocol > xrpl.protocol
|
||||||
@@ -12,6 +22,9 @@ libxrpl.server > xrpl.basics
|
|||||||
libxrpl.server > xrpl.json
|
libxrpl.server > xrpl.json
|
||||||
libxrpl.server > xrpl.protocol
|
libxrpl.server > xrpl.protocol
|
||||||
libxrpl.server > xrpl.server
|
libxrpl.server > xrpl.server
|
||||||
|
libxrpl.shamap > xrpl.basics
|
||||||
|
libxrpl.shamap > xrpl.protocol
|
||||||
|
libxrpl.shamap > xrpl.shamap
|
||||||
test.app > test.jtx
|
test.app > test.jtx
|
||||||
test.app > test.rpc
|
test.app > test.rpc
|
||||||
test.app > test.toplevel
|
test.app > test.toplevel
|
||||||
@@ -19,11 +32,11 @@ test.app > test.unit_test
|
|||||||
test.app > xrpl.basics
|
test.app > xrpl.basics
|
||||||
test.app > xrpld.app
|
test.app > xrpld.app
|
||||||
test.app > xrpld.core
|
test.app > xrpld.core
|
||||||
test.app > xrpld.ledger
|
|
||||||
test.app > xrpld.nodestore
|
|
||||||
test.app > xrpld.overlay
|
test.app > xrpld.overlay
|
||||||
test.app > xrpld.rpc
|
test.app > xrpld.rpc
|
||||||
test.app > xrpl.json
|
test.app > xrpl.json
|
||||||
|
test.app > xrpl.ledger
|
||||||
|
test.app > xrpl.nodestore
|
||||||
test.app > xrpl.protocol
|
test.app > xrpl.protocol
|
||||||
test.app > xrpl.resource
|
test.app > xrpl.resource
|
||||||
test.basics > test.jtx
|
test.basics > test.jtx
|
||||||
@@ -42,8 +55,8 @@ test.consensus > test.unit_test
|
|||||||
test.consensus > xrpl.basics
|
test.consensus > xrpl.basics
|
||||||
test.consensus > xrpld.app
|
test.consensus > xrpld.app
|
||||||
test.consensus > xrpld.consensus
|
test.consensus > xrpld.consensus
|
||||||
test.consensus > xrpld.ledger
|
|
||||||
test.consensus > xrpl.json
|
test.consensus > xrpl.json
|
||||||
|
test.consensus > xrpl.ledger
|
||||||
test.core > test.jtx
|
test.core > test.jtx
|
||||||
test.core > test.toplevel
|
test.core > test.toplevel
|
||||||
test.core > test.unit_test
|
test.core > test.unit_test
|
||||||
@@ -61,10 +74,10 @@ test.json > xrpl.json
|
|||||||
test.jtx > xrpl.basics
|
test.jtx > xrpl.basics
|
||||||
test.jtx > xrpld.app
|
test.jtx > xrpld.app
|
||||||
test.jtx > xrpld.core
|
test.jtx > xrpld.core
|
||||||
test.jtx > xrpld.ledger
|
|
||||||
test.jtx > xrpld.net
|
|
||||||
test.jtx > xrpld.rpc
|
test.jtx > xrpld.rpc
|
||||||
test.jtx > xrpl.json
|
test.jtx > xrpl.json
|
||||||
|
test.jtx > xrpl.ledger
|
||||||
|
test.jtx > xrpl.net
|
||||||
test.jtx > xrpl.protocol
|
test.jtx > xrpl.protocol
|
||||||
test.jtx > xrpl.resource
|
test.jtx > xrpl.resource
|
||||||
test.jtx > xrpl.server
|
test.jtx > xrpl.server
|
||||||
@@ -73,15 +86,14 @@ test.ledger > test.toplevel
|
|||||||
test.ledger > xrpl.basics
|
test.ledger > xrpl.basics
|
||||||
test.ledger > xrpld.app
|
test.ledger > xrpld.app
|
||||||
test.ledger > xrpld.core
|
test.ledger > xrpld.core
|
||||||
test.ledger > xrpld.ledger
|
test.ledger > xrpl.ledger
|
||||||
test.ledger > xrpl.protocol
|
test.ledger > xrpl.protocol
|
||||||
test.nodestore > test.jtx
|
test.nodestore > test.jtx
|
||||||
test.nodestore > test.toplevel
|
test.nodestore > test.toplevel
|
||||||
test.nodestore > test.unit_test
|
test.nodestore > test.unit_test
|
||||||
test.nodestore > xrpl.basics
|
test.nodestore > xrpl.basics
|
||||||
test.nodestore > xrpld.core
|
test.nodestore > xrpld.core
|
||||||
test.nodestore > xrpld.nodestore
|
test.nodestore > xrpl.nodestore
|
||||||
test.nodestore > xrpld.unity
|
|
||||||
test.overlay > test.jtx
|
test.overlay > test.jtx
|
||||||
test.overlay > test.toplevel
|
test.overlay > test.toplevel
|
||||||
test.overlay > test.unit_test
|
test.overlay > test.unit_test
|
||||||
@@ -89,8 +101,8 @@ test.overlay > xrpl.basics
|
|||||||
test.overlay > xrpld.app
|
test.overlay > xrpld.app
|
||||||
test.overlay > xrpld.overlay
|
test.overlay > xrpld.overlay
|
||||||
test.overlay > xrpld.peerfinder
|
test.overlay > xrpld.peerfinder
|
||||||
test.overlay > xrpld.shamap
|
|
||||||
test.overlay > xrpl.protocol
|
test.overlay > xrpl.protocol
|
||||||
|
test.overlay > xrpl.shamap
|
||||||
test.peerfinder > test.beast
|
test.peerfinder > test.beast
|
||||||
test.peerfinder > test.unit_test
|
test.peerfinder > test.unit_test
|
||||||
test.peerfinder > xrpl.basics
|
test.peerfinder > xrpl.basics
|
||||||
@@ -109,7 +121,6 @@ test.rpc > test.toplevel
|
|||||||
test.rpc > xrpl.basics
|
test.rpc > xrpl.basics
|
||||||
test.rpc > xrpld.app
|
test.rpc > xrpld.app
|
||||||
test.rpc > xrpld.core
|
test.rpc > xrpld.core
|
||||||
test.rpc > xrpld.net
|
|
||||||
test.rpc > xrpld.overlay
|
test.rpc > xrpld.overlay
|
||||||
test.rpc > xrpld.rpc
|
test.rpc > xrpld.rpc
|
||||||
test.rpc > xrpl.json
|
test.rpc > xrpl.json
|
||||||
@@ -126,13 +137,21 @@ test.server > xrpl.json
|
|||||||
test.server > xrpl.server
|
test.server > xrpl.server
|
||||||
test.shamap > test.unit_test
|
test.shamap > test.unit_test
|
||||||
test.shamap > xrpl.basics
|
test.shamap > xrpl.basics
|
||||||
test.shamap > xrpld.nodestore
|
test.shamap > xrpl.nodestore
|
||||||
test.shamap > xrpld.shamap
|
|
||||||
test.shamap > xrpl.protocol
|
test.shamap > xrpl.protocol
|
||||||
|
test.shamap > xrpl.shamap
|
||||||
test.toplevel > test.csf
|
test.toplevel > test.csf
|
||||||
test.toplevel > xrpl.json
|
test.toplevel > xrpl.json
|
||||||
test.unit_test > xrpl.basics
|
test.unit_test > xrpl.basics
|
||||||
|
tests.libxrpl > xrpl.basics
|
||||||
|
tests.libxrpl > xrpl.json
|
||||||
|
tests.libxrpl > xrpl.net
|
||||||
xrpl.json > xrpl.basics
|
xrpl.json > xrpl.basics
|
||||||
|
xrpl.ledger > xrpl.basics
|
||||||
|
xrpl.ledger > xrpl.protocol
|
||||||
|
xrpl.net > xrpl.basics
|
||||||
|
xrpl.nodestore > xrpl.basics
|
||||||
|
xrpl.nodestore > xrpl.protocol
|
||||||
xrpl.protocol > xrpl.basics
|
xrpl.protocol > xrpl.basics
|
||||||
xrpl.protocol > xrpl.json
|
xrpl.protocol > xrpl.json
|
||||||
xrpl.resource > xrpl.basics
|
xrpl.resource > xrpl.basics
|
||||||
@@ -141,15 +160,21 @@ xrpl.resource > xrpl.protocol
|
|||||||
xrpl.server > xrpl.basics
|
xrpl.server > xrpl.basics
|
||||||
xrpl.server > xrpl.json
|
xrpl.server > xrpl.json
|
||||||
xrpl.server > xrpl.protocol
|
xrpl.server > xrpl.protocol
|
||||||
|
xrpl.shamap > xrpl.basics
|
||||||
|
xrpl.shamap > xrpl.nodestore
|
||||||
|
xrpl.shamap > xrpl.protocol
|
||||||
xrpld.app > test.unit_test
|
xrpld.app > test.unit_test
|
||||||
xrpld.app > xrpl.basics
|
xrpld.app > xrpl.basics
|
||||||
xrpld.app > xrpld.conditions
|
xrpld.app > xrpld.conditions
|
||||||
xrpld.app > xrpld.consensus
|
xrpld.app > xrpld.consensus
|
||||||
xrpld.app > xrpld.nodestore
|
|
||||||
xrpld.app > xrpld.perflog
|
xrpld.app > xrpld.perflog
|
||||||
xrpld.app > xrpl.json
|
xrpld.app > xrpl.json
|
||||||
|
xrpld.app > xrpl.ledger
|
||||||
|
xrpld.app > xrpl.net
|
||||||
|
xrpld.app > xrpl.nodestore
|
||||||
xrpld.app > xrpl.protocol
|
xrpld.app > xrpl.protocol
|
||||||
xrpld.app > xrpl.resource
|
xrpld.app > xrpl.resource
|
||||||
|
xrpld.app > xrpl.shamap
|
||||||
xrpld.conditions > xrpl.basics
|
xrpld.conditions > xrpl.basics
|
||||||
xrpld.conditions > xrpl.protocol
|
xrpld.conditions > xrpl.protocol
|
||||||
xrpld.consensus > xrpl.basics
|
xrpld.consensus > xrpl.basics
|
||||||
@@ -157,19 +182,8 @@ xrpld.consensus > xrpl.json
|
|||||||
xrpld.consensus > xrpl.protocol
|
xrpld.consensus > xrpl.protocol
|
||||||
xrpld.core > xrpl.basics
|
xrpld.core > xrpl.basics
|
||||||
xrpld.core > xrpl.json
|
xrpld.core > xrpl.json
|
||||||
|
xrpld.core > xrpl.net
|
||||||
xrpld.core > xrpl.protocol
|
xrpld.core > xrpl.protocol
|
||||||
xrpld.ledger > xrpl.basics
|
|
||||||
xrpld.ledger > xrpl.json
|
|
||||||
xrpld.ledger > xrpl.protocol
|
|
||||||
xrpld.net > xrpl.basics
|
|
||||||
xrpld.net > xrpl.json
|
|
||||||
xrpld.net > xrpl.protocol
|
|
||||||
xrpld.net > xrpl.resource
|
|
||||||
xrpld.nodestore > xrpl.basics
|
|
||||||
xrpld.nodestore > xrpld.core
|
|
||||||
xrpld.nodestore > xrpld.unity
|
|
||||||
xrpld.nodestore > xrpl.json
|
|
||||||
xrpld.nodestore > xrpl.protocol
|
|
||||||
xrpld.overlay > xrpl.basics
|
xrpld.overlay > xrpl.basics
|
||||||
xrpld.overlay > xrpld.core
|
xrpld.overlay > xrpld.core
|
||||||
xrpld.overlay > xrpld.peerfinder
|
xrpld.overlay > xrpld.peerfinder
|
||||||
@@ -185,12 +199,11 @@ xrpld.perflog > xrpl.basics
|
|||||||
xrpld.perflog > xrpl.json
|
xrpld.perflog > xrpl.json
|
||||||
xrpld.rpc > xrpl.basics
|
xrpld.rpc > xrpl.basics
|
||||||
xrpld.rpc > xrpld.core
|
xrpld.rpc > xrpld.core
|
||||||
xrpld.rpc > xrpld.ledger
|
|
||||||
xrpld.rpc > xrpld.nodestore
|
|
||||||
xrpld.rpc > xrpl.json
|
xrpld.rpc > xrpl.json
|
||||||
|
xrpld.rpc > xrpl.ledger
|
||||||
|
xrpld.rpc > xrpl.net
|
||||||
|
xrpld.rpc > xrpl.nodestore
|
||||||
xrpld.rpc > xrpl.protocol
|
xrpld.rpc > xrpl.protocol
|
||||||
xrpld.rpc > xrpl.resource
|
xrpld.rpc > xrpl.resource
|
||||||
xrpld.rpc > xrpl.server
|
xrpld.rpc > xrpl.server
|
||||||
xrpld.shamap > xrpl.basics
|
xrpld.shamap > xrpl.shamap
|
||||||
xrpld.shamap > xrpld.nodestore
|
|
||||||
xrpld.shamap > xrpl.protocol
|
|
||||||
40
.github/scripts/rename/README.md
vendored
Normal file
40
.github/scripts/rename/README.md
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
## Renaming ripple(d) to xrpl(d)
|
||||||
|
|
||||||
|
In the initial phases of development of the XRPL, the open source codebase was
|
||||||
|
called "rippled" and it remains with that name even today. Today, over 1000
|
||||||
|
nodes run the application, and code contributions have been submitted by
|
||||||
|
developers located around the world. The XRPL community is larger than ever.
|
||||||
|
In light of the decentralized and diversified nature of XRPL, we will rename any
|
||||||
|
references to `ripple` and `rippled` to `xrpl` and `xrpld`, when appropriate.
|
||||||
|
|
||||||
|
See [here](https://xls.xrpl.org/xls/XLS-0095-rename-rippled-to-xrpld.html) for
|
||||||
|
more information.
|
||||||
|
|
||||||
|
### Scripts
|
||||||
|
|
||||||
|
To facilitate this transition, there will be multiple scripts that developers
|
||||||
|
can run on their own PRs and forks to minimize conflicts. Each script should be
|
||||||
|
run from the repository root.
|
||||||
|
|
||||||
|
1. `.github/scripts/rename/definitions.sh`: This script will rename all
|
||||||
|
definitions, such as include guards, from `RIPPLE_XXX` and `RIPPLED_XXX` to
|
||||||
|
`XRPL_XXX`.
|
||||||
|
2. `.github/scripts/rename/copyright.sh`: This script will remove superflous
|
||||||
|
copyright notices.
|
||||||
|
3. `.github/scripts/rename/cmake.sh`: This script will rename all CMake files
|
||||||
|
from `RippleXXX.cmake` or `RippledXXX.cmake` to `XrplXXX.cmake`, and any
|
||||||
|
references to `ripple` and `rippled` (with or without capital letters) to
|
||||||
|
`xrpl` and `xrpld`, respectively. The name of the binary will remain as-is,
|
||||||
|
and will only be renamed to `xrpld` by a later script.
|
||||||
|
4. `.github/scripts/rename/binary.sh`: This script will rename the binary from
|
||||||
|
`rippled` to `xrpld`, and create a symlink named `rippled` that points to the
|
||||||
|
`xrpld` binary.
|
||||||
|
|
||||||
|
You can run all these scripts from the repository root as follows:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
./.github/scripts/rename/definitions.sh .
|
||||||
|
./.github/scripts/rename/copyright.sh .
|
||||||
|
./.github/scripts/rename/cmake.sh .
|
||||||
|
./.github/scripts/rename/binary.sh .
|
||||||
|
```
|
||||||
41
.github/scripts/rename/binary.sh
vendored
Executable file
41
.github/scripts/rename/binary.sh
vendored
Executable file
@@ -0,0 +1,41 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Exit the script as soon as an error occurs.
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# On MacOS, ensure that GNU sed is installed and available as `gsed`.
|
||||||
|
SED_COMMAND=sed
|
||||||
|
if [[ "${OSTYPE}" == 'darwin'* ]]; then
|
||||||
|
if ! command -v gsed &> /dev/null; then
|
||||||
|
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
SED_COMMAND=gsed
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This script changes the binary name from `rippled` to `xrpld`, and creates a
|
||||||
|
# symlink named `rippled` that points to the `xrpld` binary.
|
||||||
|
# Usage: .github/scripts/rename/binary.sh <repository directory>
|
||||||
|
|
||||||
|
if [ "$#" -ne 1 ]; then
|
||||||
|
echo "Usage: $0 <repository directory>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
DIRECTORY=$1
|
||||||
|
echo "Processing directory: ${DIRECTORY}"
|
||||||
|
if [ ! -d "${DIRECTORY}" ]; then
|
||||||
|
echo "Error: Directory '${DIRECTORY}' does not exist."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
FILE="${DIRECTORY}/cmake/XrplCore.cmake"
|
||||||
|
|
||||||
|
# Replace the comment and the command. Handle the situation where this or other
|
||||||
|
# scripts are rerun.
|
||||||
|
echo "Processing file: ${FILE}"
|
||||||
|
${SED_COMMAND} -i -E 's/For the time being.+/Create a symlink named "rippled" for backward compatibility./g' "${FILE}"
|
||||||
|
${SED_COMMAND} -i -E 's/add_custom_command\(TARGET xrpld.+/add_custom_command(TARGET xrpld POST_BUILD COMMAND ${CMAKE_COMMAND} -E create_symlink "xrpld" "rippled")/g' "${FILE}"
|
||||||
|
${SED_COMMAND} -i -E 's/set_target_properties\(xrpld.+/add_custom_command(TARGET xrpld POST_BUILD COMMAND ${CMAKE_COMMAND} -E create_symlink "xrpld" "rippled")/g' "${FILE}"
|
||||||
|
|
||||||
|
echo "Processing complete."
|
||||||
85
.github/scripts/rename/cmake.sh
vendored
Executable file
85
.github/scripts/rename/cmake.sh
vendored
Executable file
@@ -0,0 +1,85 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Exit the script as soon as an error occurs.
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# On MacOS, ensure that GNU sed and head are installed and available as `gsed`
|
||||||
|
# and `ghead`, respectively.
|
||||||
|
SED_COMMAND=sed
|
||||||
|
HEAD_COMMAND=head
|
||||||
|
if [[ "${OSTYPE}" == 'darwin'* ]]; then
|
||||||
|
if ! command -v gsed &> /dev/null; then
|
||||||
|
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
SED_COMMAND=gsed
|
||||||
|
if ! command -v ghead &> /dev/null; then
|
||||||
|
echo "Error: ghead is not installed. Please install it using 'brew install coreutils'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
HEAD_COMMAND=ghead
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This script renames CMake files from `RippleXXX.cmake` or `RippledXXX.cmake`
|
||||||
|
# to `XrplXXX.cmake`, and any references to `ripple` and `rippled` (with or
|
||||||
|
# without capital letters) to `xrpl` and `xrpld`, respectively. The name of the
|
||||||
|
# binary will remain as-is, and will only be renamed to `xrpld` in a different
|
||||||
|
# script, but the proto file will be renamed.
|
||||||
|
# Usage: .github/scripts/rename/cmake.sh <repository directory>
|
||||||
|
|
||||||
|
if [ "$#" -ne 1 ]; then
|
||||||
|
echo "Usage: $0 <repository directory>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
DIRECTORY=$1
|
||||||
|
echo "Processing directory: ${DIRECTORY}"
|
||||||
|
if [ ! -d "${DIRECTORY}" ]; then
|
||||||
|
echo "Error: Directory '${DIRECTORY}' does not exist."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
pushd ${DIRECTORY}
|
||||||
|
|
||||||
|
# Rename the files.
|
||||||
|
find cmake -type f -name 'Rippled*.cmake' -exec bash -c 'mv "${1}" "${1/Rippled/Xrpl}"' - {} \;
|
||||||
|
find cmake -type f -name 'Ripple*.cmake' -exec bash -c 'mv "${1}" "${1/Ripple/Xrpl}"' - {} \;
|
||||||
|
if [ -e cmake/xrpl_add_test.cmake ]; then
|
||||||
|
mv cmake/xrpl_add_test.cmake cmake/XrplAddTest.cmake
|
||||||
|
fi
|
||||||
|
if [ -e include/xrpl/proto/ripple.proto ]; then
|
||||||
|
mv include/xrpl/proto/ripple.proto include/xrpl/proto/xrpl.proto
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Rename inside the files.
|
||||||
|
find cmake -type f -name '*.cmake' | while read -r FILE; do
|
||||||
|
echo "Processing file: ${FILE}"
|
||||||
|
${SED_COMMAND} -i 's/Rippled/Xrpld/g' "${FILE}"
|
||||||
|
${SED_COMMAND} -i 's/Ripple/Xrpl/g' "${FILE}"
|
||||||
|
${SED_COMMAND} -i 's/rippled/xrpld/g' "${FILE}"
|
||||||
|
${SED_COMMAND} -i 's/ripple/xrpl/g' "${FILE}"
|
||||||
|
done
|
||||||
|
${SED_COMMAND} -i -E 's/Rippled?/Xrpl/g' CMakeLists.txt
|
||||||
|
${SED_COMMAND} -i 's/ripple/xrpl/g' CMakeLists.txt
|
||||||
|
${SED_COMMAND} -i 's/include(xrpl_add_test)/include(XrplAddTest)/' src/tests/libxrpl/CMakeLists.txt
|
||||||
|
${SED_COMMAND} -i 's/ripple.pb.h/xrpl.pb.h/' include/xrpl/protocol/messages.h
|
||||||
|
${SED_COMMAND} -i 's/ripple.pb.h/xrpl.pb.h/' BUILD.md
|
||||||
|
${SED_COMMAND} -i 's/ripple.pb.h/xrpl.pb.h/' BUILD.md
|
||||||
|
|
||||||
|
# Restore the name of the validator keys repository.
|
||||||
|
${SED_COMMAND} -i 's@xrpl/validator-keys-tool@ripple/validator-keys-tool@' cmake/XrplValidatorKeys.cmake
|
||||||
|
|
||||||
|
# Ensure the name of the binary and config remain 'rippled' for now.
|
||||||
|
${SED_COMMAND} -i -E 's/xrpld(-example)?\.cfg/rippled\1.cfg/g' cmake/XrplInstall.cmake
|
||||||
|
if grep -q '"xrpld"' cmake/XrplCore.cmake; then
|
||||||
|
# The script has been rerun, so just restore the name of the binary.
|
||||||
|
${SED_COMMAND} -i 's/"xrpld"/"rippled"/' cmake/XrplCore.cmake
|
||||||
|
elif ! grep -q '"rippled"' cmake/XrplCore.cmake; then
|
||||||
|
ghead -n -1 cmake/XrplCore.cmake > cmake.tmp
|
||||||
|
echo ' # For the time being, we will keep the name of the binary as it was.' >> cmake.tmp
|
||||||
|
echo ' set_target_properties(xrpld PROPERTIES OUTPUT_NAME "rippled")' >> cmake.tmp
|
||||||
|
tail -1 cmake/XrplCore.cmake >> cmake.tmp
|
||||||
|
mv cmake.tmp cmake/XrplCore.cmake
|
||||||
|
fi
|
||||||
|
|
||||||
|
popd
|
||||||
|
echo "Renaming complete."
|
||||||
106
.github/scripts/rename/copyright.sh
vendored
Executable file
106
.github/scripts/rename/copyright.sh
vendored
Executable file
@@ -0,0 +1,106 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Exit the script as soon as an error occurs.
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# On MacOS, ensure that GNU sed is installed and available as `gsed`.
|
||||||
|
SED_COMMAND=sed
|
||||||
|
if [[ "${OSTYPE}" == 'darwin'* ]]; then
|
||||||
|
if ! command -v gsed &> /dev/null; then
|
||||||
|
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
SED_COMMAND=gsed
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This script removes superfluous copyright notices in source and header files
|
||||||
|
# in this project. Specifically, it removes all notices referencing Ripple,
|
||||||
|
# XRPLF, and certain individual contributors upon mutual agreement, so the one
|
||||||
|
# in the LICENSE.md file applies throughout. Copyright notices referencing
|
||||||
|
# external contributions, e.g. from Bitcoin, remain as-is.
|
||||||
|
# Usage: .github/scripts/rename/copyright.sh <repository directory>
|
||||||
|
|
||||||
|
if [ "$#" -ne 1 ]; then
|
||||||
|
echo "Usage: $0 <repository directory>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
DIRECTORY=$1
|
||||||
|
echo "Processing directory: ${DIRECTORY}"
|
||||||
|
if [ ! -d "${DIRECTORY}" ]; then
|
||||||
|
echo "Error: Directory '${DIRECTORY}' does not exist."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
pushd ${DIRECTORY}
|
||||||
|
|
||||||
|
# Prevent sed and echo from removing newlines and tabs in string literals by
|
||||||
|
# temporarily replacing them with placeholders. This only affects one file.
|
||||||
|
PLACEHOLDER_NEWLINE="__NEWLINE__"
|
||||||
|
PLACEHOLDER_TAB="__TAB__"
|
||||||
|
${SED_COMMAND} -i -E "s@\\\n@${PLACEHOLDER_NEWLINE}@g" src/test/rpc/ValidatorInfo_test.cpp
|
||||||
|
${SED_COMMAND} -i -E "s@\\\t@${PLACEHOLDER_TAB}@g" src/test/rpc/ValidatorInfo_test.cpp
|
||||||
|
|
||||||
|
# Process the include/ and src/ directories.
|
||||||
|
DIRECTORIES=("include" "src")
|
||||||
|
for DIRECTORY in "${DIRECTORIES[@]}"; do
|
||||||
|
echo "Processing directory: ${DIRECTORY}"
|
||||||
|
|
||||||
|
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" -o -name "*.macro" \) | while read -r FILE; do
|
||||||
|
echo "Processing file: ${FILE}"
|
||||||
|
# Handle the cases where the copyright notice is enclosed in /* ... */
|
||||||
|
# and usually surrounded by //---- and //======.
|
||||||
|
${SED_COMMAND} -z -i -E 's@^//-------+\n+@@' "${FILE}"
|
||||||
|
${SED_COMMAND} -z -i -E 's@^.*Copyright.+(Ripple|Bougalis|Falco|Hinnant|Null|Ritchford|XRPLF).+PERFORMANCE OF THIS SOFTWARE\.\n\*/\n+@@' "${FILE}"
|
||||||
|
${SED_COMMAND} -z -i -E 's@^//=======+\n+@@' "${FILE}"
|
||||||
|
|
||||||
|
# Handle the cases where the copyright notice is commented out with //.
|
||||||
|
${SED_COMMAND} -z -i -E 's@^//\n// Copyright.+Falco \(vinnie dot falco at gmail dot com\)\n//\n+@@' "${FILE}"
|
||||||
|
done
|
||||||
|
done
|
||||||
|
|
||||||
|
# Restore copyright notices that were removed from specific files, without
|
||||||
|
# restoring the verbiage that is already present in LICENSE.md. Ensure that if
|
||||||
|
# the script is run multiple times, duplicate notices are not added.
|
||||||
|
if ! grep -q 'Raw Material Software' include/xrpl/beast/core/CurrentThreadName.h; then
|
||||||
|
echo -e "// Portions of this file are from JUCE (http://www.juce.com).\n// Copyright (c) 2013 - Raw Material Software Ltd.\n// Please visit http://www.juce.com\n\n$(cat include/xrpl/beast/core/CurrentThreadName.h)" > include/xrpl/beast/core/CurrentThreadName.h
|
||||||
|
fi
|
||||||
|
if ! grep -q 'Dev Null' src/test/app/NetworkID_test.cpp; then
|
||||||
|
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/app/NetworkID_test.cpp)" > src/test/app/NetworkID_test.cpp
|
||||||
|
fi
|
||||||
|
if ! grep -q 'Dev Null' src/test/app/tx/apply_test.cpp; then
|
||||||
|
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/app/tx/apply_test.cpp)" > src/test/app/tx/apply_test.cpp
|
||||||
|
fi
|
||||||
|
if ! grep -q 'Dev Null' src/test/app/NetworkOPs_test.cpp; then
|
||||||
|
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/app/NetworkOPs_test.cpp)" > src/test/app/NetworkOPs_test.cpp
|
||||||
|
fi
|
||||||
|
if ! grep -q 'Dev Null' src/test/rpc/ManifestRPC_test.cpp; then
|
||||||
|
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/rpc/ManifestRPC_test.cpp)" > src/test/rpc/ManifestRPC_test.cpp
|
||||||
|
fi
|
||||||
|
if ! grep -q 'Dev Null' src/test/rpc/ValidatorInfo_test.cpp; then
|
||||||
|
echo -e "// Copyright (c) 2020 Dev Null Productions\n\n$(cat src/test/rpc/ValidatorInfo_test.cpp)" > src/test/rpc/ValidatorInfo_test.cpp
|
||||||
|
fi
|
||||||
|
if ! grep -q 'Dev Null' src/xrpld/rpc/handlers/DoManifest.cpp; then
|
||||||
|
echo -e "// Copyright (c) 2019 Dev Null Productions\n\n$(cat src/xrpld/rpc/handlers/DoManifest.cpp)" > src/xrpld/rpc/handlers/DoManifest.cpp
|
||||||
|
fi
|
||||||
|
if ! grep -q 'Dev Null' src/xrpld/rpc/handlers/ValidatorInfo.cpp; then
|
||||||
|
echo -e "// Copyright (c) 2019 Dev Null Productions\n\n$(cat src/xrpld/rpc/handlers/ValidatorInfo.cpp)" > src/xrpld/rpc/handlers/ValidatorInfo.cpp
|
||||||
|
fi
|
||||||
|
if ! grep -q 'Bougalis' include/xrpl/basics/SlabAllocator.h; then
|
||||||
|
echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/SlabAllocator.h)" > include/xrpl/basics/SlabAllocator.h
|
||||||
|
fi
|
||||||
|
if ! grep -q 'Bougalis' include/xrpl/basics/spinlock.h; then
|
||||||
|
echo -e "// Copyright (c) 2022, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/spinlock.h)" > include/xrpl/basics/spinlock.h
|
||||||
|
fi
|
||||||
|
if ! grep -q 'Bougalis' include/xrpl/basics/tagged_integer.h; then
|
||||||
|
echo -e "// Copyright (c) 2014, Nikolaos D. Bougalis <nikb@bougalis.net>\n\n$(cat include/xrpl/basics/tagged_integer.h)" > include/xrpl/basics/tagged_integer.h
|
||||||
|
fi
|
||||||
|
if ! grep -q 'Ritchford' include/xrpl/beast/utility/Zero.h; then
|
||||||
|
echo -e "// Copyright (c) 2014, Tom Ritchford <tom@swirly.com>\n\n$(cat include/xrpl/beast/utility/Zero.h)" > include/xrpl/beast/utility/Zero.h
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Restore newlines and tabs in string literals in the affected file.
|
||||||
|
${SED_COMMAND} -i -E "s@${PLACEHOLDER_NEWLINE}@\\\n@g" src/test/rpc/ValidatorInfo_test.cpp
|
||||||
|
${SED_COMMAND} -i -E "s@${PLACEHOLDER_TAB}@\\\t@g" src/test/rpc/ValidatorInfo_test.cpp
|
||||||
|
|
||||||
|
popd
|
||||||
|
echo "Removal complete."
|
||||||
42
.github/scripts/rename/definitions.sh
vendored
Executable file
42
.github/scripts/rename/definitions.sh
vendored
Executable file
@@ -0,0 +1,42 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Exit the script as soon as an error occurs.
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# On MacOS, ensure that GNU sed is installed and available as `gsed`.
|
||||||
|
SED_COMMAND=sed
|
||||||
|
if [[ "${OSTYPE}" == 'darwin'* ]]; then
|
||||||
|
if ! command -v gsed &> /dev/null; then
|
||||||
|
echo "Error: gsed is not installed. Please install it using 'brew install gnu-sed'."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
SED_COMMAND=gsed
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This script renames definitions, such as include guards, in this project.
|
||||||
|
# Specifically, it renames "RIPPLED_XXX" and "RIPPLE_XXX" to "XRPL_XXX" by
|
||||||
|
# scanning all cmake, header, and source files in the specified directory and
|
||||||
|
# its subdirectories.
|
||||||
|
# Usage: .github/scripts/rename/definitions.sh <repository directory>
|
||||||
|
|
||||||
|
if [ "$#" -ne 1 ]; then
|
||||||
|
echo "Usage: $0 <repository directory>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
DIRECTORY=$1
|
||||||
|
echo "Processing directory: ${DIRECTORY}"
|
||||||
|
if [ ! -d "${DIRECTORY}" ]; then
|
||||||
|
echo "Error: Directory '${DIRECTORY}' does not exist."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
find "${DIRECTORY}" -type f \( -name "*.h" -o -name "*.hpp" -o -name "*.ipp" -o -name "*.cpp" \) | while read -r FILE; do
|
||||||
|
echo "Processing file: ${FILE}"
|
||||||
|
${SED_COMMAND} -i -E 's@#(define|endif|if|ifdef|ifndef)(.*)(RIPPLED_|RIPPLE_)([A-Z0-9_]+)@#\1\2XRPL_\4@g' "${FILE}"
|
||||||
|
done
|
||||||
|
find "${DIRECTORY}" -type f \( -name "*.cmake" -o -name "*.txt" \) | while read -r FILE; do
|
||||||
|
echo "Processing file: ${FILE}"
|
||||||
|
${SED_COMMAND} -i -E 's@(RIPPLED_|RIPPLE_)([A-Z0-9_]+)@XRPL_\2@g' "${FILE}"
|
||||||
|
done
|
||||||
|
echo "Renaming complete."
|
||||||
197
.github/scripts/strategy-matrix/generate.py
vendored
Executable file
197
.github/scripts/strategy-matrix/generate.py
vendored
Executable file
@@ -0,0 +1,197 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import argparse
|
||||||
|
import itertools
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
THIS_DIR = Path(__file__).parent.resolve()
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Config:
|
||||||
|
architecture: list[dict]
|
||||||
|
os: list[dict]
|
||||||
|
build_type: list[str]
|
||||||
|
cmake_args: list[str]
|
||||||
|
|
||||||
|
'''
|
||||||
|
Generate a strategy matrix for GitHub Actions CI.
|
||||||
|
|
||||||
|
On each PR commit we will build a selection of Debian, RHEL, Ubuntu, MacOS, and
|
||||||
|
Windows configurations, while upon merge into the develop, release, or master
|
||||||
|
branches, we will build all configurations, and test most of them.
|
||||||
|
|
||||||
|
We will further set additional CMake arguments as follows:
|
||||||
|
- All builds will have the `tests`, `werr`, and `xrpld` options.
|
||||||
|
- All builds will have the `wextra` option except for GCC 12 and Clang 16.
|
||||||
|
- All release builds will have the `assert` option.
|
||||||
|
- Certain Debian Bookworm configurations will change the reference fee, enable
|
||||||
|
codecov, and enable voidstar in PRs.
|
||||||
|
'''
|
||||||
|
def generate_strategy_matrix(all: bool, config: Config) -> list:
|
||||||
|
configurations = []
|
||||||
|
for architecture, os, build_type, cmake_args in itertools.product(config.architecture, config.os, config.build_type, config.cmake_args):
|
||||||
|
# The default CMake target is 'all' for Linux and MacOS and 'install'
|
||||||
|
# for Windows, but it can get overridden for certain configurations.
|
||||||
|
cmake_target = 'install' if os["distro_name"] == 'windows' else 'all'
|
||||||
|
|
||||||
|
# We build and test all configurations by default, except for Windows in
|
||||||
|
# Debug, because it is too slow, as well as when code coverage is
|
||||||
|
# enabled as that mode already runs the tests.
|
||||||
|
build_only = False
|
||||||
|
if os['distro_name'] == 'windows' and build_type == 'Debug':
|
||||||
|
build_only = True
|
||||||
|
|
||||||
|
# Only generate a subset of configurations in PRs.
|
||||||
|
if not all:
|
||||||
|
# Debian:
|
||||||
|
# - Bookworm using GCC 13: Release and Unity on linux/amd64, set
|
||||||
|
# the reference fee to 500.
|
||||||
|
# - Bookworm using GCC 15: Debug and no Unity on linux/amd64, enable
|
||||||
|
# code coverage (which will be done below).
|
||||||
|
# - Bookworm using Clang 16: Debug and no Unity on linux/arm64,
|
||||||
|
# enable voidstar.
|
||||||
|
# - Bookworm using Clang 17: Release and no Unity on linux/amd64,
|
||||||
|
# set the reference fee to 1000.
|
||||||
|
# - Bookworm using Clang 20: Debug and Unity on linux/amd64.
|
||||||
|
if os['distro_name'] == 'debian':
|
||||||
|
skip = True
|
||||||
|
if os['distro_version'] == 'bookworm':
|
||||||
|
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-13' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||||
|
cmake_args = f'-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}'
|
||||||
|
skip = False
|
||||||
|
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-15' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||||
|
skip = False
|
||||||
|
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-16' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/arm64':
|
||||||
|
cmake_args = f'-Dvoidstar=ON {cmake_args}'
|
||||||
|
skip = False
|
||||||
|
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-17' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||||
|
cmake_args = f'-DUNIT_TEST_REFERENCE_FEE=1000 {cmake_args}'
|
||||||
|
skip = False
|
||||||
|
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-20' and build_type == 'Debug' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||||
|
skip = False
|
||||||
|
if skip:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# RHEL:
|
||||||
|
# - 9 using GCC 12: Debug and Unity on linux/amd64.
|
||||||
|
# - 10 using Clang: Release and no Unity on linux/amd64.
|
||||||
|
if os['distro_name'] == 'rhel':
|
||||||
|
skip = True
|
||||||
|
if os['distro_version'] == '9':
|
||||||
|
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-12' and build_type == 'Debug' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||||
|
skip = False
|
||||||
|
elif os['distro_version'] == '10':
|
||||||
|
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-any' and build_type == 'Release' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||||
|
skip = False
|
||||||
|
if skip:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Ubuntu:
|
||||||
|
# - Jammy using GCC 12: Debug and no Unity on linux/arm64.
|
||||||
|
# - Noble using GCC 14: Release and Unity on linux/amd64.
|
||||||
|
# - Noble using Clang 18: Debug and no Unity on linux/amd64.
|
||||||
|
# - Noble using Clang 19: Release and Unity on linux/arm64.
|
||||||
|
if os['distro_name'] == 'ubuntu':
|
||||||
|
skip = True
|
||||||
|
if os['distro_version'] == 'jammy':
|
||||||
|
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-12' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/arm64':
|
||||||
|
skip = False
|
||||||
|
elif os['distro_version'] == 'noble':
|
||||||
|
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-14' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||||
|
skip = False
|
||||||
|
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-18' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||||
|
skip = False
|
||||||
|
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-19' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/arm64':
|
||||||
|
skip = False
|
||||||
|
if skip:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# MacOS:
|
||||||
|
# - Debug and no Unity on macos/arm64.
|
||||||
|
if os['distro_name'] == 'macos' and not (build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'macos/arm64'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Windows:
|
||||||
|
# - Release and Unity on windows/amd64.
|
||||||
|
if os['distro_name'] == 'windows' and not (build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'windows/amd64'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
# Additional CMake arguments.
|
||||||
|
cmake_args = f'{cmake_args} -Dtests=ON -Dwerr=ON -Dxrpld=ON'
|
||||||
|
if not f'{os['compiler_name']}-{os['compiler_version']}' in ['gcc-12', 'clang-16']:
|
||||||
|
cmake_args = f'{cmake_args} -Dwextra=ON'
|
||||||
|
if build_type == 'Release':
|
||||||
|
cmake_args = f'{cmake_args} -Dassert=ON'
|
||||||
|
|
||||||
|
# We skip all RHEL on arm64 due to a build failure that needs further
|
||||||
|
# investigation.
|
||||||
|
if os['distro_name'] == 'rhel' and architecture['platform'] == 'linux/arm64':
|
||||||
|
continue
|
||||||
|
|
||||||
|
# We skip all clang-20 on arm64 due to boost 1.86 build error
|
||||||
|
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-20' and architecture['platform'] == 'linux/arm64':
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Enable code coverage for Debian Bookworm using GCC 15 in Debug and no
|
||||||
|
# Unity on linux/amd64
|
||||||
|
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-15' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||||
|
cmake_args = f'-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0 {cmake_args}'
|
||||||
|
cmake_target = 'coverage'
|
||||||
|
build_only = True
|
||||||
|
|
||||||
|
# Generate a unique name for the configuration, e.g. macos-arm64-debug
|
||||||
|
# or debian-bookworm-gcc-12-amd64-release-unity.
|
||||||
|
config_name = os['distro_name']
|
||||||
|
if (n := os['distro_version']) != '':
|
||||||
|
config_name += f'-{n}'
|
||||||
|
if (n := os['compiler_name']) != '':
|
||||||
|
config_name += f'-{n}'
|
||||||
|
if (n := os['compiler_version']) != '':
|
||||||
|
config_name += f'-{n}'
|
||||||
|
config_name += f'-{architecture['platform'][architecture['platform'].find('/')+1:]}'
|
||||||
|
config_name += f'-{build_type.lower()}'
|
||||||
|
if '-Dunity=ON' in cmake_args:
|
||||||
|
config_name += '-unity'
|
||||||
|
|
||||||
|
# Add the configuration to the list, with the most unique fields first,
|
||||||
|
# so that they are easier to identify in the GitHub Actions UI, as long
|
||||||
|
# names get truncated.
|
||||||
|
configurations.append({
|
||||||
|
'config_name': config_name,
|
||||||
|
'cmake_args': cmake_args,
|
||||||
|
'cmake_target': cmake_target,
|
||||||
|
'build_only': build_only,
|
||||||
|
'build_type': build_type,
|
||||||
|
'os': os,
|
||||||
|
'architecture': architecture,
|
||||||
|
})
|
||||||
|
|
||||||
|
return configurations
|
||||||
|
|
||||||
|
|
||||||
|
def read_config(file: Path) -> Config:
|
||||||
|
config = json.loads(file.read_text())
|
||||||
|
if config['architecture'] is None or config['os'] is None or config['build_type'] is None or config['cmake_args'] is None:
|
||||||
|
raise Exception('Invalid configuration file.')
|
||||||
|
|
||||||
|
return Config(**config)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('-a', '--all', help='Set to generate all configurations (generally used when merging a PR) or leave unset to generate a subset of configurations (generally used when committing to a PR).', action="store_true")
|
||||||
|
parser.add_argument('-c', '--config', help='Path to the JSON file containing the strategy matrix configurations.', required=False, type=Path)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
matrix = []
|
||||||
|
if args.config is None or args.config == '':
|
||||||
|
matrix += generate_strategy_matrix(args.all, read_config(THIS_DIR / "linux.json"))
|
||||||
|
matrix += generate_strategy_matrix(args.all, read_config(THIS_DIR / "macos.json"))
|
||||||
|
matrix += generate_strategy_matrix(args.all, read_config(THIS_DIR / "windows.json"))
|
||||||
|
else:
|
||||||
|
matrix += generate_strategy_matrix(args.all, read_config(args.config))
|
||||||
|
|
||||||
|
# Generate the strategy matrix.
|
||||||
|
print(f'matrix={json.dumps({"include": matrix})}')
|
||||||
184
.github/scripts/strategy-matrix/linux.json
vendored
Normal file
184
.github/scripts/strategy-matrix/linux.json
vendored
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
{
|
||||||
|
"architecture": [
|
||||||
|
{
|
||||||
|
"platform": "linux/amd64",
|
||||||
|
"runner": ["self-hosted", "Linux", "X64", "heavy"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"platform": "linux/arm64",
|
||||||
|
"runner": ["self-hosted", "Linux", "ARM64", "heavy-arm64"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"os": [
|
||||||
|
{
|
||||||
|
"distro_name": "debian",
|
||||||
|
"distro_version": "bookworm",
|
||||||
|
"compiler_name": "gcc",
|
||||||
|
"compiler_version": "12",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "debian",
|
||||||
|
"distro_version": "bookworm",
|
||||||
|
"compiler_name": "gcc",
|
||||||
|
"compiler_version": "13",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "debian",
|
||||||
|
"distro_version": "bookworm",
|
||||||
|
"compiler_name": "gcc",
|
||||||
|
"compiler_version": "14",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "debian",
|
||||||
|
"distro_version": "bookworm",
|
||||||
|
"compiler_name": "gcc",
|
||||||
|
"compiler_version": "15",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "debian",
|
||||||
|
"distro_version": "bookworm",
|
||||||
|
"compiler_name": "clang",
|
||||||
|
"compiler_version": "16",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "debian",
|
||||||
|
"distro_version": "bookworm",
|
||||||
|
"compiler_name": "clang",
|
||||||
|
"compiler_version": "17",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "debian",
|
||||||
|
"distro_version": "bookworm",
|
||||||
|
"compiler_name": "clang",
|
||||||
|
"compiler_version": "18",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "debian",
|
||||||
|
"distro_version": "bookworm",
|
||||||
|
"compiler_name": "clang",
|
||||||
|
"compiler_version": "19",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "debian",
|
||||||
|
"distro_version": "bookworm",
|
||||||
|
"compiler_name": "clang",
|
||||||
|
"compiler_version": "20",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "rhel",
|
||||||
|
"distro_version": "8",
|
||||||
|
"compiler_name": "gcc",
|
||||||
|
"compiler_version": "14",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "rhel",
|
||||||
|
"distro_version": "8",
|
||||||
|
"compiler_name": "clang",
|
||||||
|
"compiler_version": "any",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "rhel",
|
||||||
|
"distro_version": "9",
|
||||||
|
"compiler_name": "gcc",
|
||||||
|
"compiler_version": "12",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "rhel",
|
||||||
|
"distro_version": "9",
|
||||||
|
"compiler_name": "gcc",
|
||||||
|
"compiler_version": "13",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "rhel",
|
||||||
|
"distro_version": "9",
|
||||||
|
"compiler_name": "gcc",
|
||||||
|
"compiler_version": "14",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "rhel",
|
||||||
|
"distro_version": "9",
|
||||||
|
"compiler_name": "clang",
|
||||||
|
"compiler_version": "any",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "rhel",
|
||||||
|
"distro_version": "10",
|
||||||
|
"compiler_name": "gcc",
|
||||||
|
"compiler_version": "14",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "rhel",
|
||||||
|
"distro_version": "10",
|
||||||
|
"compiler_name": "clang",
|
||||||
|
"compiler_version": "any",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "ubuntu",
|
||||||
|
"distro_version": "jammy",
|
||||||
|
"compiler_name": "gcc",
|
||||||
|
"compiler_version": "12",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "ubuntu",
|
||||||
|
"distro_version": "noble",
|
||||||
|
"compiler_name": "gcc",
|
||||||
|
"compiler_version": "13",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "ubuntu",
|
||||||
|
"distro_version": "noble",
|
||||||
|
"compiler_name": "gcc",
|
||||||
|
"compiler_version": "14",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "ubuntu",
|
||||||
|
"distro_version": "noble",
|
||||||
|
"compiler_name": "clang",
|
||||||
|
"compiler_version": "16",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "ubuntu",
|
||||||
|
"distro_version": "noble",
|
||||||
|
"compiler_name": "clang",
|
||||||
|
"compiler_version": "17",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "ubuntu",
|
||||||
|
"distro_version": "noble",
|
||||||
|
"compiler_name": "clang",
|
||||||
|
"compiler_version": "18",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"distro_name": "ubuntu",
|
||||||
|
"distro_version": "noble",
|
||||||
|
"compiler_name": "clang",
|
||||||
|
"compiler_version": "19",
|
||||||
|
"image_sha": "97ba375"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"build_type": ["Debug", "Release"],
|
||||||
|
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
|
||||||
|
}
|
||||||
22
.github/scripts/strategy-matrix/macos.json
vendored
Normal file
22
.github/scripts/strategy-matrix/macos.json
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
{
|
||||||
|
"architecture": [
|
||||||
|
{
|
||||||
|
"platform": "macos/arm64",
|
||||||
|
"runner": ["self-hosted", "macOS", "ARM64", "mac-runner-m1"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"os": [
|
||||||
|
{
|
||||||
|
"distro_name": "macos",
|
||||||
|
"distro_version": "",
|
||||||
|
"compiler_name": "",
|
||||||
|
"compiler_version": "",
|
||||||
|
"image_sha": ""
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"build_type": ["Debug", "Release"],
|
||||||
|
"cmake_args": [
|
||||||
|
"-Dunity=OFF -DCMAKE_POLICY_VERSION_MINIMUM=3.5",
|
||||||
|
"-Dunity=ON -DCMAKE_POLICY_VERSION_MINIMUM=3.5"
|
||||||
|
]
|
||||||
|
}
|
||||||
19
.github/scripts/strategy-matrix/windows.json
vendored
Normal file
19
.github/scripts/strategy-matrix/windows.json
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
{
|
||||||
|
"architecture": [
|
||||||
|
{
|
||||||
|
"platform": "windows/amd64",
|
||||||
|
"runner": ["self-hosted", "Windows", "devbox"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"os": [
|
||||||
|
{
|
||||||
|
"distro_name": "windows",
|
||||||
|
"distro_version": "",
|
||||||
|
"compiler_name": "",
|
||||||
|
"compiler_version": "",
|
||||||
|
"image_sha": ""
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"build_type": ["Debug", "Release"],
|
||||||
|
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
|
||||||
|
}
|
||||||
63
.github/workflows/clang-format.yml
vendored
63
.github/workflows/clang-format.yml
vendored
@@ -1,63 +0,0 @@
|
|||||||
name: clang-format
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
pull_request:
|
|
||||||
types: [opened, reopened, synchronize, ready_for_review]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check:
|
|
||||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
env:
|
|
||||||
CLANG_VERSION: 18
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Install clang-format
|
|
||||||
run: |
|
|
||||||
codename=$( lsb_release --codename --short )
|
|
||||||
sudo tee /etc/apt/sources.list.d/llvm.list >/dev/null <<EOF
|
|
||||||
deb http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
|
||||||
deb-src http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
|
|
||||||
EOF
|
|
||||||
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install clang-format-${CLANG_VERSION}
|
|
||||||
- name: Format first-party sources
|
|
||||||
run: find include src tests -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format-${CLANG_VERSION} -i {} +
|
|
||||||
- name: Check for differences
|
|
||||||
id: assert
|
|
||||||
run: |
|
|
||||||
set -o pipefail
|
|
||||||
git diff --exit-code | tee "clang-format.patch"
|
|
||||||
- name: Upload patch
|
|
||||||
if: failure() && steps.assert.outcome == 'failure'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
continue-on-error: true
|
|
||||||
with:
|
|
||||||
name: clang-format.patch
|
|
||||||
if-no-files-found: ignore
|
|
||||||
path: clang-format.patch
|
|
||||||
- name: What happened?
|
|
||||||
if: failure() && steps.assert.outcome == 'failure'
|
|
||||||
env:
|
|
||||||
PREAMBLE: |
|
|
||||||
If you are reading this, you are looking at a failed Github Actions
|
|
||||||
job. That means you pushed one or more files that did not conform
|
|
||||||
to the formatting specified in .clang-format. That may be because
|
|
||||||
you neglected to run 'git clang-format' or 'clang-format' before
|
|
||||||
committing, or that your version of clang-format has an
|
|
||||||
incompatibility with the one on this
|
|
||||||
machine, which is:
|
|
||||||
SUGGESTION: |
|
|
||||||
|
|
||||||
To fix it, you can do one of two things:
|
|
||||||
1. Download and apply the patch generated as an artifact of this
|
|
||||||
job to your repo, commit, and push.
|
|
||||||
2. Run 'git-clang-format --extensions cpp,h,hpp,ipp develop'
|
|
||||||
in your repo, commit, and push.
|
|
||||||
run: |
|
|
||||||
echo "${PREAMBLE}"
|
|
||||||
clang-format-${CLANG_VERSION} --version
|
|
||||||
echo "${SUGGESTION}"
|
|
||||||
exit 1
|
|
||||||
37
.github/workflows/doxygen.yml
vendored
37
.github/workflows/doxygen.yml
vendored
@@ -1,37 +0,0 @@
|
|||||||
name: Build and publish Doxygen documentation
|
|
||||||
# To test this workflow, push your changes to your fork's `develop` branch.
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
- doxygen
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
documentation:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
|
||||||
steps:
|
|
||||||
- name: checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: check environment
|
|
||||||
run: |
|
|
||||||
echo ${PATH} | tr ':' '\n'
|
|
||||||
cmake --version
|
|
||||||
doxygen --version
|
|
||||||
env | sort
|
|
||||||
- name: build
|
|
||||||
run: |
|
|
||||||
mkdir build
|
|
||||||
cd build
|
|
||||||
cmake -Donly_docs=TRUE ..
|
|
||||||
cmake --build . --target docs --parallel $(nproc)
|
|
||||||
- name: publish
|
|
||||||
uses: peaceiris/actions-gh-pages@v3
|
|
||||||
with:
|
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
publish_dir: build/docs/html
|
|
||||||
53
.github/workflows/levelization.yml
vendored
53
.github/workflows/levelization.yml
vendored
@@ -1,53 +0,0 @@
|
|||||||
name: levelization
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
pull_request:
|
|
||||||
types: [opened, reopened, synchronize, ready_for_review]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check:
|
|
||||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
CLANG_VERSION: 10
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Check levelization
|
|
||||||
run: Builds/levelization/levelization.sh
|
|
||||||
- name: Check for differences
|
|
||||||
id: assert
|
|
||||||
run: |
|
|
||||||
set -o pipefail
|
|
||||||
git diff --exit-code | tee "levelization.patch"
|
|
||||||
- name: Upload patch
|
|
||||||
if: failure() && steps.assert.outcome == 'failure'
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
continue-on-error: true
|
|
||||||
with:
|
|
||||||
name: levelization.patch
|
|
||||||
if-no-files-found: ignore
|
|
||||||
path: levelization.patch
|
|
||||||
- name: What happened?
|
|
||||||
if: failure() && steps.assert.outcome == 'failure'
|
|
||||||
env:
|
|
||||||
MESSAGE: |
|
|
||||||
If you are reading this, you are looking at a failed Github
|
|
||||||
Actions job. That means you changed the dependency relationships
|
|
||||||
between the modules in rippled. That may be an improvement or a
|
|
||||||
regression. This check doesn't judge.
|
|
||||||
|
|
||||||
A rule of thumb, though, is that if your changes caused
|
|
||||||
something to be removed from loops.txt, that's probably an
|
|
||||||
improvement. If something was added, it's probably a regression.
|
|
||||||
|
|
||||||
To fix it, you can do one of two things:
|
|
||||||
1. Download and apply the patch generated as an artifact of this
|
|
||||||
job to your repo, commit, and push.
|
|
||||||
2. Run './Builds/levelization/levelization.sh' in your repo,
|
|
||||||
commit, and push.
|
|
||||||
|
|
||||||
See Builds/levelization/README.md for more info.
|
|
||||||
run: |
|
|
||||||
echo "${MESSAGE}"
|
|
||||||
exit 1
|
|
||||||
91
.github/workflows/libxrpl.yml
vendored
91
.github/workflows/libxrpl.yml
vendored
@@ -1,91 +0,0 @@
|
|||||||
name: Check libXRPL compatibility with Clio
|
|
||||||
env:
|
|
||||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
|
||||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
|
||||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- 'src/libxrpl/protocol/BuildInfo.cpp'
|
|
||||||
- '.github/workflows/libxrpl.yml'
|
|
||||||
types: [opened, reopened, synchronize, ready_for_review]
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
publish:
|
|
||||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
|
||||||
name: Publish libXRPL
|
|
||||||
outputs:
|
|
||||||
outcome: ${{ steps.upload.outputs.outcome }}
|
|
||||||
version: ${{ steps.version.outputs.version }}
|
|
||||||
channel: ${{ steps.channel.outputs.channel }}
|
|
||||||
runs-on: [self-hosted, heavy]
|
|
||||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
|
||||||
steps:
|
|
||||||
- name: Wait for essential checks to succeed
|
|
||||||
uses: lewagon/wait-on-check-action@v1.3.4
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.sha || github.sha }}
|
|
||||||
running-workflow-name: wait-for-check-regexp
|
|
||||||
check-regexp: '(dependencies|test).*linux.*' # Ignore windows and mac tests but make sure linux passes
|
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
wait-interval: 10
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: Generate channel
|
|
||||||
id: channel
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo channel="clio/pr_${{ github.event.pull_request.number }}" | tee ${GITHUB_OUTPUT}
|
|
||||||
- name: Export new package
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
conan export . ${{ steps.channel.outputs.channel }}
|
|
||||||
- name: Add Ripple Conan remote
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
conan remote list
|
|
||||||
conan remote remove ripple || true
|
|
||||||
# Do not quote the URL. An empty string will be accepted (with a non-fatal warning), but a missing argument will not.
|
|
||||||
conan remote add ripple ${{ env.CONAN_URL }} --insert 0
|
|
||||||
- name: Parse new version
|
|
||||||
id: version
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo version="$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" \
|
|
||||||
| awk -F '"' '{print $2}')" | tee ${GITHUB_OUTPUT}
|
|
||||||
- name: Try to authenticate to Ripple Conan remote
|
|
||||||
id: remote
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
# `conan user` implicitly uses the environment variables CONAN_LOGIN_USERNAME_<REMOTE> and CONAN_PASSWORD_<REMOTE>.
|
|
||||||
# https://docs.conan.io/1/reference/commands/misc/user.html#using-environment-variables
|
|
||||||
# https://docs.conan.io/1/reference/env_vars.html#conan-login-username-conan-login-username-remote-name
|
|
||||||
# https://docs.conan.io/1/reference/env_vars.html#conan-password-conan-password-remote-name
|
|
||||||
echo outcome=$(conan user --remote ripple --password >&2 \
|
|
||||||
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
|
|
||||||
- name: Upload new package
|
|
||||||
id: upload
|
|
||||||
if: (steps.remote.outputs.outcome == 'success')
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "conan upload version ${{ steps.version.outputs.version }} on channel ${{ steps.channel.outputs.channel }}"
|
|
||||||
echo outcome=$(conan upload xrpl/${{ steps.version.outputs.version }}@${{ steps.channel.outputs.channel }} --remote ripple --confirm >&2 \
|
|
||||||
&& echo success || echo failure) | tee ${GITHUB_OUTPUT}
|
|
||||||
notify_clio:
|
|
||||||
name: Notify Clio
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: publish
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
|
||||||
steps:
|
|
||||||
- name: Notify Clio about new version
|
|
||||||
if: (needs.publish.outputs.outcome == 'success')
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
|
|
||||||
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
|
|
||||||
-F "client_payload[version]=${{ needs.publish.outputs.version }}@${{ needs.publish.outputs.channel }}" \
|
|
||||||
-F "client_payload[pr]=${{ github.event.pull_request.number }}"
|
|
||||||
99
.github/workflows/macos.yml
vendored
99
.github/workflows/macos.yml
vendored
@@ -1,99 +0,0 @@
|
|||||||
name: macos
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types: [opened, reopened, synchronize, ready_for_review]
|
|
||||||
push:
|
|
||||||
# If the branches list is ever changed, be sure to change it on all
|
|
||||||
# build/test jobs (nix, macos, windows, instrumentation)
|
|
||||||
branches:
|
|
||||||
# Always build the package branches
|
|
||||||
- develop
|
|
||||||
- release
|
|
||||||
- master
|
|
||||||
# Branches that opt-in to running
|
|
||||||
- 'ci/**'
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
test:
|
|
||||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
platform:
|
|
||||||
- macos
|
|
||||||
generator:
|
|
||||||
- Ninja
|
|
||||||
configuration:
|
|
||||||
- Release
|
|
||||||
runs-on: [self-hosted, macOS]
|
|
||||||
env:
|
|
||||||
# The `build` action requires these variables.
|
|
||||||
build_dir: .build
|
|
||||||
NUM_PROCESSORS: 12
|
|
||||||
steps:
|
|
||||||
- name: checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: install Conan
|
|
||||||
run: |
|
|
||||||
brew install conan@1
|
|
||||||
echo '/opt/homebrew/opt/conan@1/bin' >> $GITHUB_PATH
|
|
||||||
- name: install Ninja
|
|
||||||
if: matrix.generator == 'Ninja'
|
|
||||||
run: brew install ninja
|
|
||||||
- name: install python
|
|
||||||
run: |
|
|
||||||
if which python > /dev/null 2>&1; then
|
|
||||||
echo "Python executable exists"
|
|
||||||
else
|
|
||||||
brew install python@3.13
|
|
||||||
ln -s /opt/homebrew/bin/python3 /opt/homebrew/bin/python
|
|
||||||
fi
|
|
||||||
- name: install cmake
|
|
||||||
run: |
|
|
||||||
if which cmake > /dev/null 2>&1; then
|
|
||||||
echo "cmake executable exists"
|
|
||||||
else
|
|
||||||
brew install cmake
|
|
||||||
fi
|
|
||||||
- name: install nproc
|
|
||||||
run: |
|
|
||||||
brew install coreutils
|
|
||||||
- name: check environment
|
|
||||||
run: |
|
|
||||||
env | sort
|
|
||||||
echo ${PATH} | tr ':' '\n'
|
|
||||||
python --version
|
|
||||||
conan --version
|
|
||||||
cmake --version
|
|
||||||
nproc --version
|
|
||||||
echo -n "nproc returns: "
|
|
||||||
nproc
|
|
||||||
system_profiler SPHardwareDataType
|
|
||||||
sysctl -n hw.logicalcpu
|
|
||||||
clang --version
|
|
||||||
- name: configure Conan
|
|
||||||
run : |
|
|
||||||
conan profile new default --detect || true
|
|
||||||
conan profile update settings.compiler.cppstd=20 default
|
|
||||||
- name: build dependencies
|
|
||||||
uses: ./.github/actions/dependencies
|
|
||||||
env:
|
|
||||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
|
||||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
|
||||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
|
||||||
with:
|
|
||||||
configuration: ${{ matrix.configuration }}
|
|
||||||
- name: build
|
|
||||||
uses: ./.github/actions/build
|
|
||||||
with:
|
|
||||||
generator: ${{ matrix.generator }}
|
|
||||||
configuration: ${{ matrix.configuration }}
|
|
||||||
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
|
|
||||||
- name: test
|
|
||||||
run: |
|
|
||||||
n=$(nproc)
|
|
||||||
echo "Using $n test jobs"
|
|
||||||
${build_dir}/rippled --unittest --unittest-jobs $n
|
|
||||||
60
.github/workflows/missing-commits.yml
vendored
60
.github/workflows/missing-commits.yml
vendored
@@ -1,60 +0,0 @@
|
|||||||
name: missing-commits
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
# Only check that the branches are up to date when updating the
|
|
||||||
# relevant branches.
|
|
||||||
- develop
|
|
||||||
- release
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
up_to_date:
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Check for missing commits
|
|
||||||
id: commits
|
|
||||||
env:
|
|
||||||
SUGGESTION: |
|
|
||||||
|
|
||||||
If you are reading this, then the commits indicated above are
|
|
||||||
missing from "develop" and/or "release". Do a reverse-merge
|
|
||||||
as soon as possible. See CONTRIBUTING.md for instructions.
|
|
||||||
run: |
|
|
||||||
set -o pipefail
|
|
||||||
# Branches ordered by how "canonical" they are. Every commit in
|
|
||||||
# one branch should be in all the branches behind it
|
|
||||||
order=( master release develop )
|
|
||||||
branches=()
|
|
||||||
for branch in "${order[@]}"
|
|
||||||
do
|
|
||||||
# Check that the branches exist so that this job will work on
|
|
||||||
# forked repos, which don't necessarily have master and
|
|
||||||
# release branches.
|
|
||||||
if git ls-remote --exit-code --heads origin \
|
|
||||||
refs/heads/${branch} > /dev/null
|
|
||||||
then
|
|
||||||
branches+=( origin/${branch} )
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
prior=()
|
|
||||||
for branch in "${branches[@]}"
|
|
||||||
do
|
|
||||||
if [[ ${#prior[@]} -ne 0 ]]
|
|
||||||
then
|
|
||||||
echo "Checking ${prior[@]} for commits missing from ${branch}"
|
|
||||||
git log --oneline --no-merges "${prior[@]}" \
|
|
||||||
^$branch | tee -a "missing-commits.txt"
|
|
||||||
echo
|
|
||||||
fi
|
|
||||||
prior+=( "${branch}" )
|
|
||||||
done
|
|
||||||
if [[ $( cat missing-commits.txt | wc -l ) -ne 0 ]]
|
|
||||||
then
|
|
||||||
echo "${SUGGESTION}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
443
.github/workflows/nix.yml
vendored
443
.github/workflows/nix.yml
vendored
@@ -1,443 +0,0 @@
|
|||||||
name: nix
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types: [opened, reopened, synchronize, ready_for_review]
|
|
||||||
push:
|
|
||||||
# If the branches list is ever changed, be sure to change it on all
|
|
||||||
# build/test jobs (nix, macos, windows)
|
|
||||||
branches:
|
|
||||||
# Always build the package branches
|
|
||||||
- develop
|
|
||||||
- release
|
|
||||||
- master
|
|
||||||
# Branches that opt-in to running
|
|
||||||
- "ci/**"
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
# This workflow has multiple job matrixes.
|
|
||||||
# They can be considered phases because most of the matrices ("test",
|
|
||||||
# "coverage", "conan", ) depend on the first ("dependencies").
|
|
||||||
#
|
|
||||||
# The first phase has a job in the matrix for each combination of
|
|
||||||
# variables that affects dependency ABI:
|
|
||||||
# platform, compiler, and configuration.
|
|
||||||
# It creates a GitHub artifact holding the Conan profile,
|
|
||||||
# and builds and caches binaries for all the dependencies.
|
|
||||||
# If an Artifactory remote is configured, they are cached there.
|
|
||||||
# If not, they are added to the GitHub artifact.
|
|
||||||
# GitHub's "cache" action has a size limit (10 GB) that is too small
|
|
||||||
# to hold the binaries if they are built locally.
|
|
||||||
# We must use the "{upload,download}-artifact" actions instead.
|
|
||||||
#
|
|
||||||
# The remaining phases have a job in the matrix for each test
|
|
||||||
# configuration. They install dependency binaries from the cache,
|
|
||||||
# whichever was used, and build and test rippled.
|
|
||||||
#
|
|
||||||
# "instrumentation" is independent, but is included here because it also
|
|
||||||
# builds on linux in the same "on:" conditions.
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
dependencies:
|
|
||||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
platform:
|
|
||||||
- linux
|
|
||||||
compiler:
|
|
||||||
- gcc
|
|
||||||
- clang
|
|
||||||
configuration:
|
|
||||||
- Debug
|
|
||||||
- Release
|
|
||||||
include:
|
|
||||||
- compiler: gcc
|
|
||||||
profile:
|
|
||||||
version: 11
|
|
||||||
cc: /usr/bin/gcc
|
|
||||||
cxx: /usr/bin/g++
|
|
||||||
- compiler: clang
|
|
||||||
profile:
|
|
||||||
version: 14
|
|
||||||
cc: /usr/bin/clang-14
|
|
||||||
cxx: /usr/bin/clang++-14
|
|
||||||
runs-on: [self-hosted, heavy]
|
|
||||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
|
||||||
env:
|
|
||||||
build_dir: .build
|
|
||||||
steps:
|
|
||||||
- name: upgrade conan
|
|
||||||
run: |
|
|
||||||
pip install --upgrade "conan<2"
|
|
||||||
- name: checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: check environment
|
|
||||||
run: |
|
|
||||||
echo ${PATH} | tr ':' '\n'
|
|
||||||
lsb_release -a || true
|
|
||||||
${{ matrix.profile.cc }} --version
|
|
||||||
conan --version
|
|
||||||
cmake --version
|
|
||||||
env | sort
|
|
||||||
- name: configure Conan
|
|
||||||
run: |
|
|
||||||
conan profile new default --detect
|
|
||||||
conan profile update settings.compiler.cppstd=20 default
|
|
||||||
conan profile update settings.compiler=${{ matrix.compiler }} default
|
|
||||||
conan profile update settings.compiler.version=${{ matrix.profile.version }} default
|
|
||||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
|
||||||
conan profile update env.CC=${{ matrix.profile.cc }} default
|
|
||||||
conan profile update env.CXX=${{ matrix.profile.cxx }} default
|
|
||||||
conan profile update conf.tools.build:compiler_executables='{"c": "${{ matrix.profile.cc }}", "cpp": "${{ matrix.profile.cxx }}"}' default
|
|
||||||
- name: archive profile
|
|
||||||
# Create this archive before dependencies are added to the local cache.
|
|
||||||
run: tar -czf conan.tar -C ~/.conan .
|
|
||||||
- name: build dependencies
|
|
||||||
uses: ./.github/actions/dependencies
|
|
||||||
env:
|
|
||||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
|
||||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
|
||||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
|
||||||
with:
|
|
||||||
configuration: ${{ matrix.configuration }}
|
|
||||||
- name: upload archive
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
|
|
||||||
path: conan.tar
|
|
||||||
if-no-files-found: error
|
|
||||||
|
|
||||||
test:
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
platform:
|
|
||||||
- linux
|
|
||||||
compiler:
|
|
||||||
- gcc
|
|
||||||
- clang
|
|
||||||
configuration:
|
|
||||||
- Debug
|
|
||||||
- Release
|
|
||||||
cmake-args:
|
|
||||||
-
|
|
||||||
- "-Dunity=ON"
|
|
||||||
needs: dependencies
|
|
||||||
runs-on: [self-hosted, heavy]
|
|
||||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
|
||||||
env:
|
|
||||||
build_dir: .build
|
|
||||||
steps:
|
|
||||||
- name: upgrade conan
|
|
||||||
run: |
|
|
||||||
pip install --upgrade "conan<2"
|
|
||||||
- name: download cache
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
|
|
||||||
- name: extract cache
|
|
||||||
run: |
|
|
||||||
mkdir -p ~/.conan
|
|
||||||
tar -xzf conan.tar -C ~/.conan
|
|
||||||
- name: check environment
|
|
||||||
run: |
|
|
||||||
env | sort
|
|
||||||
echo ${PATH} | tr ':' '\n'
|
|
||||||
conan --version
|
|
||||||
cmake --version
|
|
||||||
- name: checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: dependencies
|
|
||||||
uses: ./.github/actions/dependencies
|
|
||||||
env:
|
|
||||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
|
||||||
with:
|
|
||||||
configuration: ${{ matrix.configuration }}
|
|
||||||
- name: build
|
|
||||||
uses: ./.github/actions/build
|
|
||||||
with:
|
|
||||||
generator: Ninja
|
|
||||||
configuration: ${{ matrix.configuration }}
|
|
||||||
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
|
|
||||||
- name: test
|
|
||||||
run: |
|
|
||||||
${build_dir}/rippled --unittest --unittest-jobs $(nproc)
|
|
||||||
|
|
||||||
reference-fee-test:
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
platform:
|
|
||||||
- linux
|
|
||||||
compiler:
|
|
||||||
- gcc
|
|
||||||
configuration:
|
|
||||||
- Debug
|
|
||||||
cmake-args:
|
|
||||||
- "-DUNIT_TEST_REFERENCE_FEE=200"
|
|
||||||
- "-DUNIT_TEST_REFERENCE_FEE=1000"
|
|
||||||
needs: dependencies
|
|
||||||
runs-on: [self-hosted, heavy]
|
|
||||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
|
||||||
env:
|
|
||||||
build_dir: .build
|
|
||||||
steps:
|
|
||||||
- name: upgrade conan
|
|
||||||
run: |
|
|
||||||
pip install --upgrade "conan<2"
|
|
||||||
- name: download cache
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
|
|
||||||
- name: extract cache
|
|
||||||
run: |
|
|
||||||
mkdir -p ~/.conan
|
|
||||||
tar -xzf conan.tar -C ~/.conan
|
|
||||||
- name: check environment
|
|
||||||
run: |
|
|
||||||
env | sort
|
|
||||||
echo ${PATH} | tr ':' '\n'
|
|
||||||
conan --version
|
|
||||||
cmake --version
|
|
||||||
- name: checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: dependencies
|
|
||||||
uses: ./.github/actions/dependencies
|
|
||||||
env:
|
|
||||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
|
||||||
with:
|
|
||||||
configuration: ${{ matrix.configuration }}
|
|
||||||
- name: build
|
|
||||||
uses: ./.github/actions/build
|
|
||||||
with:
|
|
||||||
generator: Ninja
|
|
||||||
configuration: ${{ matrix.configuration }}
|
|
||||||
cmake-args: "-Dassert=TRUE -Dwerr=TRUE ${{ matrix.cmake-args }}"
|
|
||||||
- name: test
|
|
||||||
run: |
|
|
||||||
${build_dir}/rippled --unittest --unittest-jobs $(nproc)
|
|
||||||
|
|
||||||
coverage:
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
platform:
|
|
||||||
- linux
|
|
||||||
compiler:
|
|
||||||
- gcc
|
|
||||||
configuration:
|
|
||||||
- Debug
|
|
||||||
needs: dependencies
|
|
||||||
runs-on: [self-hosted, heavy]
|
|
||||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
|
||||||
env:
|
|
||||||
build_dir: .build
|
|
||||||
steps:
|
|
||||||
- name: upgrade conan
|
|
||||||
run: |
|
|
||||||
pip install --upgrade "conan<2"
|
|
||||||
- name: download cache
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ matrix.platform }}-${{ matrix.compiler }}-${{ matrix.configuration }}
|
|
||||||
- name: extract cache
|
|
||||||
run: |
|
|
||||||
mkdir -p ~/.conan
|
|
||||||
tar -xzf conan.tar -C ~/.conan
|
|
||||||
- name: install gcovr
|
|
||||||
run: pip install "gcovr>=7,<9"
|
|
||||||
- name: check environment
|
|
||||||
run: |
|
|
||||||
echo ${PATH} | tr ':' '\n'
|
|
||||||
conan --version
|
|
||||||
cmake --version
|
|
||||||
gcovr --version
|
|
||||||
env | sort
|
|
||||||
ls ~/.conan
|
|
||||||
- name: checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: dependencies
|
|
||||||
uses: ./.github/actions/dependencies
|
|
||||||
env:
|
|
||||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
|
||||||
with:
|
|
||||||
configuration: ${{ matrix.configuration }}
|
|
||||||
- name: build
|
|
||||||
uses: ./.github/actions/build
|
|
||||||
with:
|
|
||||||
generator: Ninja
|
|
||||||
configuration: ${{ matrix.configuration }}
|
|
||||||
cmake-args: >-
|
|
||||||
-Dassert=TRUE
|
|
||||||
-Dwerr=TRUE
|
|
||||||
-Dcoverage=ON
|
|
||||||
-Dcoverage_format=xml
|
|
||||||
-DCODE_COVERAGE_VERBOSE=ON
|
|
||||||
-DCMAKE_CXX_FLAGS="-O0"
|
|
||||||
-DCMAKE_C_FLAGS="-O0"
|
|
||||||
cmake-target: coverage
|
|
||||||
- name: move coverage report
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mv "${build_dir}/coverage.xml" ./
|
|
||||||
- name: archive coverage report
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: coverage.xml
|
|
||||||
path: coverage.xml
|
|
||||||
retention-days: 30
|
|
||||||
- name: upload coverage report
|
|
||||||
uses: wandalen/wretry.action@v1.4.10
|
|
||||||
with:
|
|
||||||
action: codecov/codecov-action@v4.5.0
|
|
||||||
with: |
|
|
||||||
files: coverage.xml
|
|
||||||
fail_ci_if_error: true
|
|
||||||
disable_search: true
|
|
||||||
verbose: true
|
|
||||||
plugin: noop
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
attempt_limit: 5
|
|
||||||
attempt_delay: 210000 # in milliseconds
|
|
||||||
|
|
||||||
conan:
|
|
||||||
needs: dependencies
|
|
||||||
runs-on: [self-hosted, heavy]
|
|
||||||
container: ghcr.io/xrplf/rippled-build-ubuntu:aaf5e3e
|
|
||||||
env:
|
|
||||||
build_dir: .build
|
|
||||||
configuration: Release
|
|
||||||
steps:
|
|
||||||
- name: upgrade conan
|
|
||||||
run: |
|
|
||||||
pip install --upgrade "conan<2"
|
|
||||||
- name: download cache
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: linux-gcc-${{ env.configuration }}
|
|
||||||
- name: extract cache
|
|
||||||
run: |
|
|
||||||
mkdir -p ~/.conan
|
|
||||||
tar -xzf conan.tar -C ~/.conan
|
|
||||||
- name: check environment
|
|
||||||
run: |
|
|
||||||
env | sort
|
|
||||||
echo ${PATH} | tr ':' '\n'
|
|
||||||
conan --version
|
|
||||||
cmake --version
|
|
||||||
- name: checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: dependencies
|
|
||||||
uses: ./.github/actions/dependencies
|
|
||||||
env:
|
|
||||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
|
||||||
with:
|
|
||||||
configuration: ${{ env.configuration }}
|
|
||||||
- name: export
|
|
||||||
run: |
|
|
||||||
version=$(conan inspect --raw version .)
|
|
||||||
reference="xrpl/${version}@local/test"
|
|
||||||
conan remove -f ${reference} || true
|
|
||||||
conan export . local/test
|
|
||||||
echo "reference=${reference}" >> "${GITHUB_ENV}"
|
|
||||||
- name: build
|
|
||||||
run: |
|
|
||||||
cd tests/conan
|
|
||||||
mkdir ${build_dir}
|
|
||||||
cd ${build_dir}
|
|
||||||
conan install .. --output-folder . \
|
|
||||||
--require-override ${reference} --build missing
|
|
||||||
cmake .. \
|
|
||||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=./build/${configuration}/generators/conan_toolchain.cmake \
|
|
||||||
-DCMAKE_BUILD_TYPE=${configuration}
|
|
||||||
cmake --build .
|
|
||||||
./example | grep '^[[:digit:]]\+\.[[:digit:]]\+\.[[:digit:]]\+'
|
|
||||||
|
|
||||||
# NOTE we are not using dependencies built above because it lags with
|
|
||||||
# compiler versions. Instrumentation requires clang version 16 or
|
|
||||||
# later
|
|
||||||
|
|
||||||
instrumentation-build:
|
|
||||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
|
||||||
env:
|
|
||||||
CLANG_RELEASE: 16
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
runs-on: [self-hosted, heavy]
|
|
||||||
container: debian:bookworm
|
|
||||||
steps:
|
|
||||||
- name: install prerequisites
|
|
||||||
env:
|
|
||||||
DEBIAN_FRONTEND: noninteractive
|
|
||||||
run: |
|
|
||||||
apt-get update
|
|
||||||
apt-get install --yes --no-install-recommends \
|
|
||||||
clang-${CLANG_RELEASE} clang++-${CLANG_RELEASE} \
|
|
||||||
python3-pip python-is-python3 make cmake git wget
|
|
||||||
apt-get clean
|
|
||||||
update-alternatives --install \
|
|
||||||
/usr/bin/clang clang /usr/bin/clang-${CLANG_RELEASE} 100 \
|
|
||||||
--slave /usr/bin/clang++ clang++ /usr/bin/clang++-${CLANG_RELEASE}
|
|
||||||
update-alternatives --auto clang
|
|
||||||
pip install --no-cache --break-system-packages "conan<2"
|
|
||||||
|
|
||||||
- name: checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: prepare environment
|
|
||||||
run: |
|
|
||||||
mkdir ${GITHUB_WORKSPACE}/.build
|
|
||||||
echo "SOURCE_DIR=$GITHUB_WORKSPACE" >> $GITHUB_ENV
|
|
||||||
echo "BUILD_DIR=$GITHUB_WORKSPACE/.build" >> $GITHUB_ENV
|
|
||||||
echo "CC=/usr/bin/clang" >> $GITHUB_ENV
|
|
||||||
echo "CXX=/usr/bin/clang++" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: configure Conan
|
|
||||||
run: |
|
|
||||||
conan profile new --detect default
|
|
||||||
conan profile update settings.compiler=clang default
|
|
||||||
conan profile update settings.compiler.version=${CLANG_RELEASE} default
|
|
||||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
|
||||||
conan profile update settings.compiler.cppstd=20 default
|
|
||||||
conan profile update options.rocksdb=False default
|
|
||||||
conan profile update \
|
|
||||||
'conf.tools.build:compiler_executables={"c": "/usr/bin/clang", "cpp": "/usr/bin/clang++"}' default
|
|
||||||
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS"' default
|
|
||||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
|
|
||||||
conan export external/snappy snappy/1.1.10@
|
|
||||||
conan export external/soci soci/4.0.3@
|
|
||||||
|
|
||||||
- name: build dependencies
|
|
||||||
run: |
|
|
||||||
cd ${BUILD_DIR}
|
|
||||||
conan install ${SOURCE_DIR} \
|
|
||||||
--output-folder ${BUILD_DIR} \
|
|
||||||
--install-folder ${BUILD_DIR} \
|
|
||||||
--build missing \
|
|
||||||
--settings build_type=Debug
|
|
||||||
|
|
||||||
- name: build with instrumentation
|
|
||||||
run: |
|
|
||||||
cd ${BUILD_DIR}
|
|
||||||
cmake -S ${SOURCE_DIR} -B ${BUILD_DIR} \
|
|
||||||
-Dvoidstar=ON \
|
|
||||||
-Dtests=ON \
|
|
||||||
-Dxrpld=ON \
|
|
||||||
-DCMAKE_BUILD_TYPE=Debug \
|
|
||||||
-DSECP256K1_BUILD_BENCHMARK=OFF \
|
|
||||||
-DSECP256K1_BUILD_TESTS=OFF \
|
|
||||||
-DSECP256K1_BUILD_EXHAUSTIVE_TESTS=OFF \
|
|
||||||
-DCMAKE_TOOLCHAIN_FILE=${BUILD_DIR}/build/generators/conan_toolchain.cmake
|
|
||||||
cmake --build . --parallel $(nproc)
|
|
||||||
|
|
||||||
- name: verify instrumentation enabled
|
|
||||||
run: |
|
|
||||||
cd ${BUILD_DIR}
|
|
||||||
./rippled --version | grep libvoidstar
|
|
||||||
|
|
||||||
- name: run unit tests
|
|
||||||
run: |
|
|
||||||
cd ${BUILD_DIR}
|
|
||||||
./rippled -u --unittest-jobs $(( $(nproc)/4 ))
|
|
||||||
140
.github/workflows/on-pr.yml
vendored
Normal file
140
.github/workflows/on-pr.yml
vendored
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
# This workflow runs all workflows to check, build and test the project on
|
||||||
|
# various Linux flavors, as well as on MacOS and Windows, on every push to a
|
||||||
|
# user branch. However, it will not run if the pull request is a draft unless it
|
||||||
|
# has the 'DraftRunCI' label.
|
||||||
|
name: PR
|
||||||
|
|
||||||
|
on:
|
||||||
|
merge_group:
|
||||||
|
types:
|
||||||
|
- checks_requested
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- reopened
|
||||||
|
- synchronize
|
||||||
|
- ready_for_review
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# This job determines whether the rest of the workflow should run. It runs
|
||||||
|
# when the PR is not a draft (which should also cover merge-group) or
|
||||||
|
# has the 'DraftRunCI' label.
|
||||||
|
should-run:
|
||||||
|
if: ${{ !github.event.pull_request.draft || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||||
|
- name: Determine changed files
|
||||||
|
# This step checks whether any files have changed that should
|
||||||
|
# cause the next jobs to run. We do it this way rather than
|
||||||
|
# using `paths` in the `on:` section, because all required
|
||||||
|
# checks must pass, even for changes that do not modify anything
|
||||||
|
# that affects those checks. We would therefore like to make the
|
||||||
|
# checks required only if the job runs, but GitHub does not
|
||||||
|
# support that directly. By always executing the workflow on new
|
||||||
|
# commits and by using the changed-files action below, we ensure
|
||||||
|
# that Github considers any skipped jobs to have passed, and in
|
||||||
|
# turn the required checks as well.
|
||||||
|
id: changes
|
||||||
|
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||||
|
with:
|
||||||
|
files: |
|
||||||
|
# These paths are unique to `on-pr.yml`.
|
||||||
|
.github/scripts/levelization/**
|
||||||
|
.github/scripts/rename/**
|
||||||
|
.github/workflows/reusable-check-levelization.yml
|
||||||
|
.github/workflows/reusable-check-rename.yml
|
||||||
|
.github/workflows/reusable-notify-clio.yml
|
||||||
|
.github/workflows/on-pr.yml
|
||||||
|
|
||||||
|
# Keep the paths below in sync with those in `on-trigger.yml`.
|
||||||
|
.github/actions/build-deps/**
|
||||||
|
.github/actions/build-test/**
|
||||||
|
.github/actions/setup-conan/**
|
||||||
|
.github/scripts/strategy-matrix/**
|
||||||
|
.github/workflows/reusable-build.yml
|
||||||
|
.github/workflows/reusable-build-test-config.yml
|
||||||
|
.github/workflows/reusable-build-test.yml
|
||||||
|
.github/workflows/reusable-strategy-matrix.yml
|
||||||
|
.github/workflows/reusable-test.yml
|
||||||
|
.codecov.yml
|
||||||
|
cmake/**
|
||||||
|
conan/**
|
||||||
|
external/**
|
||||||
|
include/**
|
||||||
|
src/**
|
||||||
|
tests/**
|
||||||
|
CMakeLists.txt
|
||||||
|
conanfile.py
|
||||||
|
conan.lock
|
||||||
|
- name: Check whether to run
|
||||||
|
# This step determines whether the rest of the workflow should
|
||||||
|
# run. The rest of the workflow will run if this job runs AND at
|
||||||
|
# least one of:
|
||||||
|
# * Any of the files checked in the `changes` step were modified
|
||||||
|
# * The PR is NOT a draft and is labeled "Ready to merge"
|
||||||
|
# * The workflow is running from the merge queue
|
||||||
|
id: go
|
||||||
|
env:
|
||||||
|
FILES: ${{ steps.changes.outputs.any_changed }}
|
||||||
|
DRAFT: ${{ github.event.pull_request.draft }}
|
||||||
|
READY: ${{ contains(github.event.pull_request.labels.*.name, 'Ready to merge') }}
|
||||||
|
MERGE: ${{ github.event_name == 'merge_group' }}
|
||||||
|
run: |
|
||||||
|
echo "go=${{ (env.DRAFT != 'true' && env.READY == 'true') || env.FILES == 'true' || env.MERGE == 'true' }}" >> "${GITHUB_OUTPUT}"
|
||||||
|
cat "${GITHUB_OUTPUT}"
|
||||||
|
outputs:
|
||||||
|
go: ${{ steps.go.outputs.go == 'true' }}
|
||||||
|
|
||||||
|
check-levelization:
|
||||||
|
needs: should-run
|
||||||
|
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||||
|
uses: ./.github/workflows/reusable-check-levelization.yml
|
||||||
|
|
||||||
|
check-rename:
|
||||||
|
needs: should-run
|
||||||
|
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||||
|
uses: ./.github/workflows/reusable-check-rename.yml
|
||||||
|
|
||||||
|
build-test:
|
||||||
|
needs: should-run
|
||||||
|
if: ${{ needs.should-run.outputs.go == 'true' }}
|
||||||
|
uses: ./.github/workflows/reusable-build-test.yml
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [linux, macos, windows]
|
||||||
|
with:
|
||||||
|
os: ${{ matrix.os }}
|
||||||
|
secrets:
|
||||||
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
|
notify-clio:
|
||||||
|
needs:
|
||||||
|
- should-run
|
||||||
|
- build-test
|
||||||
|
if: ${{ needs.should-run.outputs.go == 'true' && contains(fromJSON('["release", "master"]'), github.ref_name) }}
|
||||||
|
uses: ./.github/workflows/reusable-notify-clio.yml
|
||||||
|
secrets:
|
||||||
|
clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
||||||
|
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||||
|
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||||
|
|
||||||
|
passed:
|
||||||
|
if: failure() || cancelled()
|
||||||
|
needs:
|
||||||
|
- build-test
|
||||||
|
- check-levelization
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Fail
|
||||||
|
run: false
|
||||||
80
.github/workflows/on-trigger.yml
vendored
Normal file
80
.github/workflows/on-trigger.yml
vendored
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
# This workflow runs all workflows to build the dependencies required for the
|
||||||
|
# project on various Linux flavors, as well as on MacOS and Windows, on a
|
||||||
|
# scheduled basis, on merge into the 'develop', 'release', or 'master' branches,
|
||||||
|
# or manually. The missing commits check is only run when the code is merged
|
||||||
|
# into the 'develop' or 'release' branches, and the documentation is built when
|
||||||
|
# the code is merged into the 'develop' branch.
|
||||||
|
name: Trigger
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "develop"
|
||||||
|
- "release*"
|
||||||
|
- "master"
|
||||||
|
paths:
|
||||||
|
# These paths are unique to `on-trigger.yml`.
|
||||||
|
- ".github/workflows/reusable-check-missing-commits.yml"
|
||||||
|
- ".github/workflows/on-trigger.yml"
|
||||||
|
- ".github/workflows/publish-docs.yml"
|
||||||
|
|
||||||
|
# Keep the paths below in sync with those in `on-pr.yml`.
|
||||||
|
- ".github/actions/build-deps/**"
|
||||||
|
- ".github/actions/build-test/**"
|
||||||
|
- ".github/actions/setup-conan/**"
|
||||||
|
- ".github/scripts/strategy-matrix/**"
|
||||||
|
- ".github/workflows/reusable-build.yml"
|
||||||
|
- ".github/workflows/reusable-build-test-config.yml"
|
||||||
|
- ".github/workflows/reusable-build-test.yml"
|
||||||
|
- ".github/workflows/reusable-strategy-matrix.yml"
|
||||||
|
- ".github/workflows/reusable-test.yml"
|
||||||
|
- ".codecov.yml"
|
||||||
|
- "cmake/**"
|
||||||
|
- "conan/**"
|
||||||
|
- "external/**"
|
||||||
|
- "include/**"
|
||||||
|
- "src/**"
|
||||||
|
- "tests/**"
|
||||||
|
- "CMakeLists.txt"
|
||||||
|
- "conanfile.py"
|
||||||
|
- "conan.lock"
|
||||||
|
|
||||||
|
# Run at 06:32 UTC on every day of the week from Monday through Friday. This
|
||||||
|
# will force all dependencies to be rebuilt, which is useful to verify that
|
||||||
|
# all dependencies can be built successfully. Only the dependencies that
|
||||||
|
# are actually missing from the remote will be uploaded.
|
||||||
|
schedule:
|
||||||
|
- cron: "32 6 * * 1-5"
|
||||||
|
|
||||||
|
# Run when manually triggered via the GitHub UI or API.
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
# When a PR is merged into the develop branch it will be assigned a unique
|
||||||
|
# group identifier, so execution will continue even if another PR is merged
|
||||||
|
# while it is still running. In all other cases the group identifier is shared
|
||||||
|
# per branch, so that any in-progress runs are cancelled when a new commit is
|
||||||
|
# pushed.
|
||||||
|
group: ${{ github.workflow }}-${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' && github.sha || github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-missing-commits:
|
||||||
|
if: ${{ github.event_name == 'push' && github.ref_type == 'branch' && contains(fromJSON('["develop", "release"]'), github.ref_name) }}
|
||||||
|
uses: ./.github/workflows/reusable-check-missing-commits.yml
|
||||||
|
|
||||||
|
build-test:
|
||||||
|
uses: ./.github/workflows/reusable-build-test.yml
|
||||||
|
strategy:
|
||||||
|
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||||
|
matrix:
|
||||||
|
os: [linux, macos, windows]
|
||||||
|
with:
|
||||||
|
os: ${{ matrix.os }}
|
||||||
|
strategy_matrix: ${{ github.event_name == 'schedule' && 'all' || 'minimal' }}
|
||||||
|
secrets:
|
||||||
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
15
.github/workflows/pre-commit.yml
vendored
Normal file
15
.github/workflows/pre-commit.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
name: Run pre-commit hooks
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
push:
|
||||||
|
branches: [develop, release, master]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Call the workflow in the XRPLF/actions repo that runs the pre-commit hooks.
|
||||||
|
run-hooks:
|
||||||
|
uses: XRPLF/actions/.github/workflows/pre-commit.yml@34790936fae4c6c751f62ec8c06696f9c1a5753a
|
||||||
|
with:
|
||||||
|
runs_on: ubuntu-latest
|
||||||
|
container: '{ "image": "ghcr.io/xrplf/ci/tools-rippled-pre-commit:sha-a8c7be1" }'
|
||||||
72
.github/workflows/publish-docs.yml
vendored
Normal file
72
.github/workflows/publish-docs.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
# This workflow builds the documentation for the repository, and publishes it to
|
||||||
|
# GitHub Pages when changes are merged into the default branch.
|
||||||
|
name: Build and publish documentation
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- ".github/workflows/publish-docs.yml"
|
||||||
|
- "*.md"
|
||||||
|
- "**/*.md"
|
||||||
|
- "docs/**"
|
||||||
|
- "include/**"
|
||||||
|
- "src/libxrpl/**"
|
||||||
|
- "src/xrpld/**"
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
env:
|
||||||
|
BUILD_DIR: .build
|
||||||
|
NPROC_SUBTRACT: 2
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
publish:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container: ghcr.io/xrplf/ci/tools-rippled-documentation:sha-a8c7be1
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||||
|
|
||||||
|
- name: Get number of processors
|
||||||
|
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||||
|
id: nproc
|
||||||
|
with:
|
||||||
|
subtract: ${{ env.NPROC_SUBTRACT }}
|
||||||
|
|
||||||
|
- name: Check configuration
|
||||||
|
run: |
|
||||||
|
echo 'Checking path.'
|
||||||
|
echo ${PATH} | tr ':' '\n'
|
||||||
|
|
||||||
|
echo 'Checking environment variables.'
|
||||||
|
env | sort
|
||||||
|
|
||||||
|
echo 'Checking CMake version.'
|
||||||
|
cmake --version
|
||||||
|
|
||||||
|
echo 'Checking Doxygen version.'
|
||||||
|
doxygen --version
|
||||||
|
|
||||||
|
- name: Build documentation
|
||||||
|
env:
|
||||||
|
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||||
|
run: |
|
||||||
|
mkdir -p "${BUILD_DIR}"
|
||||||
|
cd "${BUILD_DIR}"
|
||||||
|
cmake -Donly_docs=ON ..
|
||||||
|
cmake --build . --target docs --parallel ${BUILD_NPROC}
|
||||||
|
|
||||||
|
- name: Publish documentation
|
||||||
|
if: ${{ github.ref_type == 'branch' && github.ref_name == github.event.repository.default_branch }}
|
||||||
|
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
publish_dir: ${{ env.BUILD_DIR }}/docs/html
|
||||||
77
.github/workflows/reusable-build-test-config.yml
vendored
Normal file
77
.github/workflows/reusable-build-test-config.yml
vendored
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
name: Build and test configuration
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
build_dir:
|
||||||
|
description: "The directory where to build."
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
build_only:
|
||||||
|
description: 'Whether to only build or to build and test the code ("true", "false").'
|
||||||
|
required: true
|
||||||
|
type: boolean
|
||||||
|
build_type:
|
||||||
|
description: 'The build type to use ("Debug", "Release").'
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
cmake_args:
|
||||||
|
description: "Additional arguments to pass to CMake."
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ""
|
||||||
|
cmake_target:
|
||||||
|
description: "The CMake target to build."
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
|
||||||
|
runs_on:
|
||||||
|
description: Runner to run the job on as a JSON string
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
image:
|
||||||
|
description: "The image to run in (leave empty to run natively)"
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
config_name:
|
||||||
|
description: "The configuration string (used for naming artifacts and such)."
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
nproc_subtract:
|
||||||
|
description: "The number of processors to subtract when calculating parallelism."
|
||||||
|
required: false
|
||||||
|
type: number
|
||||||
|
default: 2
|
||||||
|
|
||||||
|
secrets:
|
||||||
|
CODECOV_TOKEN:
|
||||||
|
description: "The Codecov token to use for uploading coverage reports."
|
||||||
|
required: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
uses: ./.github/workflows/reusable-build.yml
|
||||||
|
with:
|
||||||
|
build_dir: ${{ inputs.build_dir }}
|
||||||
|
build_type: ${{ inputs.build_type }}
|
||||||
|
cmake_args: ${{ inputs.cmake_args }}
|
||||||
|
cmake_target: ${{ inputs.cmake_target }}
|
||||||
|
runs_on: ${{ inputs.runs_on }}
|
||||||
|
image: ${{ inputs.image }}
|
||||||
|
config_name: ${{ inputs.config_name }}
|
||||||
|
nproc_subtract: ${{ inputs.nproc_subtract }}
|
||||||
|
secrets:
|
||||||
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
|
test:
|
||||||
|
needs: build
|
||||||
|
uses: ./.github/workflows/reusable-test.yml
|
||||||
|
with:
|
||||||
|
run_tests: ${{ !inputs.build_only }}
|
||||||
|
verify_voidstar: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
|
||||||
|
runs_on: ${{ inputs.runs_on }}
|
||||||
|
image: ${{ inputs.image }}
|
||||||
|
config_name: ${{ inputs.config_name }}
|
||||||
|
nproc_subtract: ${{ inputs.nproc_subtract }}
|
||||||
58
.github/workflows/reusable-build-test.yml
vendored
Normal file
58
.github/workflows/reusable-build-test.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
# This workflow builds and tests the binary for various configurations.
|
||||||
|
name: Build and test
|
||||||
|
|
||||||
|
# This workflow can only be triggered by other workflows. Note that the
|
||||||
|
# workflow_call event does not support the 'choice' input type, see
|
||||||
|
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
|
||||||
|
# so we use 'string' instead.
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
build_dir:
|
||||||
|
description: "The directory where to build."
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ".build"
|
||||||
|
os:
|
||||||
|
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
strategy_matrix:
|
||||||
|
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||||
|
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: "minimal"
|
||||||
|
secrets:
|
||||||
|
CODECOV_TOKEN:
|
||||||
|
description: "The Codecov token to use for uploading coverage reports."
|
||||||
|
required: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Generate the strategy matrix to be used by the following job.
|
||||||
|
generate-matrix:
|
||||||
|
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||||
|
with:
|
||||||
|
os: ${{ inputs.os }}
|
||||||
|
strategy_matrix: ${{ inputs.strategy_matrix }}
|
||||||
|
|
||||||
|
# Build and test the binary for each configuration.
|
||||||
|
build-test-config:
|
||||||
|
needs:
|
||||||
|
- generate-matrix
|
||||||
|
uses: ./.github/workflows/reusable-build-test-config.yml
|
||||||
|
strategy:
|
||||||
|
fail-fast: ${{ github.event_name == 'merge_group' }}
|
||||||
|
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||||
|
max-parallel: 10
|
||||||
|
with:
|
||||||
|
build_dir: ${{ inputs.build_dir }}
|
||||||
|
build_only: ${{ matrix.build_only }}
|
||||||
|
build_type: ${{ matrix.build_type }}
|
||||||
|
cmake_args: ${{ matrix.cmake_args }}
|
||||||
|
cmake_target: ${{ matrix.cmake_target }}
|
||||||
|
runs_on: ${{ toJSON(matrix.architecture.runner) }}
|
||||||
|
image: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || '' }}
|
||||||
|
config_name: ${{ matrix.config_name }}
|
||||||
|
secrets:
|
||||||
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
154
.github/workflows/reusable-build.yml
vendored
Normal file
154
.github/workflows/reusable-build.yml
vendored
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
name: Build rippled
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
build_dir:
|
||||||
|
description: "The directory where to build."
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
build_type:
|
||||||
|
description: 'The build type to use ("Debug", "Release").'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
cmake_args:
|
||||||
|
description: "Additional arguments to pass to CMake."
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
cmake_target:
|
||||||
|
description: "The CMake target to build."
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
runs_on:
|
||||||
|
description: Runner to run the job on as a JSON string
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
image:
|
||||||
|
description: "The image to run in (leave empty to run natively)"
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
config_name:
|
||||||
|
description: "The name of the configuration."
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
nproc_subtract:
|
||||||
|
description: "The number of processors to subtract when calculating parallelism."
|
||||||
|
required: true
|
||||||
|
type: number
|
||||||
|
|
||||||
|
secrets:
|
||||||
|
CODECOV_TOKEN:
|
||||||
|
description: "The Codecov token to use for uploading coverage reports."
|
||||||
|
required: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: Build ${{ inputs.config_name }}
|
||||||
|
runs-on: ${{ fromJSON(inputs.runs_on) }}
|
||||||
|
container: ${{ inputs.image != '' && inputs.image || null }}
|
||||||
|
timeout-minutes: 60
|
||||||
|
steps:
|
||||||
|
- name: Cleanup workspace
|
||||||
|
if: ${{ runner.os == 'macOS' }}
|
||||||
|
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||||
|
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||||
|
|
||||||
|
- name: Prepare runner
|
||||||
|
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
|
||||||
|
with:
|
||||||
|
disable_ccache: false
|
||||||
|
|
||||||
|
- name: Print build environment
|
||||||
|
uses: ./.github/actions/print-env
|
||||||
|
|
||||||
|
- name: Get number of processors
|
||||||
|
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||||
|
id: nproc
|
||||||
|
with:
|
||||||
|
subtract: ${{ inputs.nproc_subtract }}
|
||||||
|
|
||||||
|
- name: Setup Conan
|
||||||
|
uses: ./.github/actions/setup-conan
|
||||||
|
|
||||||
|
- name: Build dependencies
|
||||||
|
uses: ./.github/actions/build-deps
|
||||||
|
with:
|
||||||
|
build_dir: ${{ inputs.build_dir }}
|
||||||
|
build_nproc: ${{ steps.nproc.outputs.nproc }}
|
||||||
|
build_type: ${{ inputs.build_type }}
|
||||||
|
# Set the verbosity to "quiet" for Windows to avoid an excessive
|
||||||
|
# amount of logs. For other OSes, the "verbose" logs are more useful.
|
||||||
|
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
|
||||||
|
|
||||||
|
- name: Configure CMake
|
||||||
|
shell: bash
|
||||||
|
working-directory: ${{ inputs.build_dir }}
|
||||||
|
env:
|
||||||
|
BUILD_TYPE: ${{ inputs.build_type }}
|
||||||
|
CMAKE_ARGS: ${{ inputs.cmake_args }}
|
||||||
|
run: |
|
||||||
|
cmake \
|
||||||
|
-G '${{ runner.os == 'Windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
|
||||||
|
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||||
|
-DCMAKE_BUILD_TYPE="${BUILD_TYPE}" \
|
||||||
|
${CMAKE_ARGS} \
|
||||||
|
..
|
||||||
|
|
||||||
|
- name: Build the binary
|
||||||
|
shell: bash
|
||||||
|
working-directory: ${{ inputs.build_dir }}
|
||||||
|
env:
|
||||||
|
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||||
|
BUILD_TYPE: ${{ inputs.build_type }}
|
||||||
|
CMAKE_TARGET: ${{ inputs.cmake_target }}
|
||||||
|
run: |
|
||||||
|
cmake \
|
||||||
|
--build . \
|
||||||
|
--config "${BUILD_TYPE}" \
|
||||||
|
--parallel ${BUILD_NPROC} \
|
||||||
|
--target "${CMAKE_TARGET}"
|
||||||
|
|
||||||
|
- name: Put built binaries in one location
|
||||||
|
shell: bash
|
||||||
|
working-directory: ${{ inputs.build_dir }}
|
||||||
|
env:
|
||||||
|
BUILD_TYPE_DIR: ${{ runner.os == 'Windows' && inputs.build_type || '' }}
|
||||||
|
CMAKE_TARGET: ${{ inputs.cmake_target }}
|
||||||
|
run: |
|
||||||
|
mkdir -p ./binaries/doctest/
|
||||||
|
|
||||||
|
cp ./${BUILD_TYPE_DIR}/rippled* ./binaries/
|
||||||
|
if [ "${CMAKE_TARGET}" != 'coverage' ]; then
|
||||||
|
cp ./src/tests/libxrpl/${BUILD_TYPE_DIR}/xrpl.test.* ./binaries/doctest/
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload rippled artifact
|
||||||
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
|
env:
|
||||||
|
BUILD_DIR: ${{ inputs.build_dir }}
|
||||||
|
with:
|
||||||
|
name: rippled-${{ inputs.config_name }}
|
||||||
|
path: ${{ env.BUILD_DIR }}/binaries/
|
||||||
|
retention-days: 3
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
- name: Upload coverage report
|
||||||
|
if: ${{ github.repository_owner == 'XRPLF' && inputs.cmake_target == 'coverage' }}
|
||||||
|
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||||
|
with:
|
||||||
|
disable_search: true
|
||||||
|
disable_telem: true
|
||||||
|
fail_ci_if_error: true
|
||||||
|
files: ${{ inputs.build_dir }}/coverage.xml
|
||||||
|
plugins: noop
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
verbose: true
|
||||||
46
.github/workflows/reusable-check-levelization.yml
vendored
Normal file
46
.github/workflows/reusable-check-levelization.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
# This workflow checks if the dependencies between the modules are correctly
|
||||||
|
# indexed.
|
||||||
|
name: Check levelization
|
||||||
|
|
||||||
|
# This workflow can only be triggered by other workflows.
|
||||||
|
on: workflow_call
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}-levelization
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
levelization:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||||
|
- name: Check levelization
|
||||||
|
run: .github/scripts/levelization/generate.sh
|
||||||
|
- name: Check for differences
|
||||||
|
env:
|
||||||
|
MESSAGE: |
|
||||||
|
|
||||||
|
The dependency relationships between the modules in rippled have
|
||||||
|
changed, which may be an improvement or a regression.
|
||||||
|
|
||||||
|
A rule of thumb is that if your changes caused something to be
|
||||||
|
removed from loops.txt, it's probably an improvement, while if
|
||||||
|
something was added, it's probably a regression.
|
||||||
|
|
||||||
|
Run '.github/scripts/levelization/generate.sh' in your repo, commit
|
||||||
|
and push the changes. See .github/scripts/levelization/README.md for
|
||||||
|
more info.
|
||||||
|
run: |
|
||||||
|
DIFF=$(git status --porcelain)
|
||||||
|
if [ -n "${DIFF}" ]; then
|
||||||
|
# Print the differences to give the contributor a hint about what to
|
||||||
|
# expect when running levelization on their own machine.
|
||||||
|
git diff
|
||||||
|
echo "${MESSAGE}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
62
.github/workflows/reusable-check-missing-commits.yml
vendored
Normal file
62
.github/workflows/reusable-check-missing-commits.yml
vendored
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
# This workflow checks that all commits in the "master" branch are also in the
|
||||||
|
# "release" and "develop" branches, and that all commits in the "release" branch
|
||||||
|
# are also in the "develop" branch.
|
||||||
|
name: Check for missing commits
|
||||||
|
|
||||||
|
# This workflow can only be triggered by other workflows.
|
||||||
|
on: workflow_call
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}-missing-commits
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Check for missing commits
|
||||||
|
env:
|
||||||
|
MESSAGE: |
|
||||||
|
|
||||||
|
If you are reading this, then the commits indicated above are missing
|
||||||
|
from the "develop" and/or "release" branch. Do a reverse-merge as soon
|
||||||
|
as possible. See CONTRIBUTING.md for instructions.
|
||||||
|
run: |
|
||||||
|
set -o pipefail
|
||||||
|
# Branches are ordered by how "canonical" they are. Every commit in one
|
||||||
|
# branch should be in all the branches behind it.
|
||||||
|
order=(master release develop)
|
||||||
|
branches=()
|
||||||
|
for branch in "${order[@]}"; do
|
||||||
|
# Check that the branches exist so that this job will work on forked
|
||||||
|
# repos, which don't necessarily have master and release branches.
|
||||||
|
echo "Checking if ${branch} exists."
|
||||||
|
if git ls-remote --exit-code --heads origin \
|
||||||
|
refs/heads/${branch} > /dev/null; then
|
||||||
|
branches+=(origin/${branch})
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
prior=()
|
||||||
|
for branch in "${branches[@]}"; do
|
||||||
|
if [[ ${#prior[@]} -ne 0 ]]; then
|
||||||
|
echo "Checking ${prior[@]} for commits missing from ${branch}."
|
||||||
|
git log --oneline --no-merges "${prior[@]}" \
|
||||||
|
^$branch | tee -a "missing-commits.txt"
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
prior+=("${branch}")
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ $(cat missing-commits.txt | wc -l) -ne 0 ]]; then
|
||||||
|
echo "${MESSAGE}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
48
.github/workflows/reusable-check-rename.yml
vendored
Normal file
48
.github/workflows/reusable-check-rename.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# This workflow checks if the codebase is properly renamed, see more info in
|
||||||
|
# .github/scripts/rename/README.md.
|
||||||
|
name: Check rename
|
||||||
|
|
||||||
|
# This workflow can only be triggered by other workflows.
|
||||||
|
on: workflow_call
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}-rename
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
rename:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||||
|
- name: Check definitions
|
||||||
|
run: .github/scripts/rename/definitions.sh .
|
||||||
|
- name: Check copyright notices
|
||||||
|
run: .github/scripts/rename/copyright.sh .
|
||||||
|
- name: Check CMake configs
|
||||||
|
run: .github/scripts/rename/cmake.sh .
|
||||||
|
- name: Check binary name
|
||||||
|
run: .github/scripts/rename/binary.sh .
|
||||||
|
- name: Check for differences
|
||||||
|
env:
|
||||||
|
MESSAGE: |
|
||||||
|
|
||||||
|
One or more files contain changes that do not adhere to new naming
|
||||||
|
conventions.
|
||||||
|
|
||||||
|
Run the scripts in '.github/scripts/rename/' in your repo, commit
|
||||||
|
and push the changes. See .github/scripts/rename/README.md for
|
||||||
|
more info.
|
||||||
|
run: |
|
||||||
|
DIFF=$(git status --porcelain)
|
||||||
|
if [ -n "${DIFF}" ]; then
|
||||||
|
# Print the differences to give the contributor a hint about what to
|
||||||
|
# expect when running the renaming scripts on their own machine.
|
||||||
|
git diff
|
||||||
|
echo "${MESSAGE}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
91
.github/workflows/reusable-notify-clio.yml
vendored
Normal file
91
.github/workflows/reusable-notify-clio.yml
vendored
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
# This workflow exports the built libxrpl package to the Conan remote on a
|
||||||
|
# a channel named after the pull request, and notifies the Clio repository about
|
||||||
|
# the new version so it can check for compatibility.
|
||||||
|
name: Notify Clio
|
||||||
|
|
||||||
|
# This workflow can only be triggered by other workflows.
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
conan_remote_name:
|
||||||
|
description: "The name of the Conan remote to use."
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: xrplf
|
||||||
|
conan_remote_url:
|
||||||
|
description: "The URL of the Conan endpoint to use."
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: https://conan.ripplex.io
|
||||||
|
secrets:
|
||||||
|
clio_notify_token:
|
||||||
|
description: "The GitHub token to notify Clio about new versions."
|
||||||
|
required: true
|
||||||
|
conan_remote_username:
|
||||||
|
description: "The username for logging into the Conan remote."
|
||||||
|
required: true
|
||||||
|
conan_remote_password:
|
||||||
|
description: "The password for logging into the Conan remote."
|
||||||
|
required: true
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}-clio
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
upload:
|
||||||
|
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13-sha-5dd7158
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||||
|
- name: Generate outputs
|
||||||
|
id: generate
|
||||||
|
env:
|
||||||
|
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||||
|
run: |
|
||||||
|
echo 'Generating user and channel.'
|
||||||
|
echo "user=clio" >> "${GITHUB_OUTPUT}"
|
||||||
|
echo "channel=pr_${PR_NUMBER}" >> "${GITHUB_OUTPUT}"
|
||||||
|
echo 'Extracting version.'
|
||||||
|
echo "version=$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')" >> "${GITHUB_OUTPUT}"
|
||||||
|
- name: Calculate conan reference
|
||||||
|
id: conan_ref
|
||||||
|
run: |
|
||||||
|
echo "conan_ref=${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}" >> "${GITHUB_OUTPUT}"
|
||||||
|
- name: Set up Conan
|
||||||
|
uses: ./.github/actions/setup-conan
|
||||||
|
with:
|
||||||
|
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||||
|
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||||
|
- name: Log into Conan remote
|
||||||
|
env:
|
||||||
|
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||||
|
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
|
||||||
|
- name: Upload package
|
||||||
|
env:
|
||||||
|
CONAN_REMOTE_NAME: ${{ inputs.conan_remote_name }}
|
||||||
|
run: |
|
||||||
|
conan export --user=${{ steps.generate.outputs.user }} --channel=${{ steps.generate.outputs.channel }} .
|
||||||
|
conan upload --confirm --check --remote="${CONAN_REMOTE_NAME}" xrpl/${{ steps.conan_ref.outputs.conan_ref }}
|
||||||
|
outputs:
|
||||||
|
conan_ref: ${{ steps.conan_ref.outputs.conan_ref }}
|
||||||
|
|
||||||
|
notify:
|
||||||
|
needs: upload
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Notify Clio
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.clio_notify_token }}
|
||||||
|
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||||
|
run: |
|
||||||
|
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
|
||||||
|
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
|
||||||
|
-F "client_payload[conan_ref]=${{ needs.upload.outputs.conan_ref }}" \
|
||||||
|
-F "client_payload[pr_url]=${PR_URL}"
|
||||||
41
.github/workflows/reusable-strategy-matrix.yml
vendored
Normal file
41
.github/workflows/reusable-strategy-matrix.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
name: Generate strategy matrix
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
os:
|
||||||
|
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
strategy_matrix:
|
||||||
|
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||||
|
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: "minimal"
|
||||||
|
outputs:
|
||||||
|
matrix:
|
||||||
|
description: "The generated strategy matrix."
|
||||||
|
value: ${{ jobs.generate-matrix.outputs.matrix }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
generate-matrix:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
matrix: ${{ steps.generate.outputs.matrix }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||||
|
with:
|
||||||
|
python-version: 3.13
|
||||||
|
|
||||||
|
- name: Generate strategy matrix
|
||||||
|
working-directory: .github/scripts/strategy-matrix
|
||||||
|
id: generate
|
||||||
|
env:
|
||||||
|
GENERATE_CONFIG: ${{ inputs.os != '' && format('--config={0}.json', inputs.os) || '' }}
|
||||||
|
GENERATE_OPTION: ${{ inputs.strategy_matrix == 'all' && '--all' || '' }}
|
||||||
|
run: ./generate.py ${GENERATE_OPTION} ${GENERATE_CONFIG} >> "${GITHUB_OUTPUT}"
|
||||||
111
.github/workflows/reusable-test.yml
vendored
Normal file
111
.github/workflows/reusable-test.yml
vendored
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
name: Test rippled
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
verify_voidstar:
|
||||||
|
description: "Whether to verify the presence of voidstar instrumentation."
|
||||||
|
required: true
|
||||||
|
type: boolean
|
||||||
|
run_tests:
|
||||||
|
description: "Whether to run unit tests"
|
||||||
|
required: true
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
runs_on:
|
||||||
|
description: Runner to run the job on as a JSON string
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
image:
|
||||||
|
description: "The image to run in (leave empty to run natively)"
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
config_name:
|
||||||
|
description: "The name of the configuration."
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
nproc_subtract:
|
||||||
|
description: "The number of processors to subtract when calculating parallelism."
|
||||||
|
required: true
|
||||||
|
type: number
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
name: Test ${{ inputs.config_name }}
|
||||||
|
runs-on: ${{ fromJSON(inputs.runs_on) }}
|
||||||
|
container: ${{ inputs.image != '' && inputs.image || null }}
|
||||||
|
timeout-minutes: 30
|
||||||
|
steps:
|
||||||
|
- name: Cleanup workspace
|
||||||
|
if: ${{ runner.os == 'macOS' }}
|
||||||
|
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||||
|
|
||||||
|
- name: Get number of processors
|
||||||
|
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||||
|
id: nproc
|
||||||
|
with:
|
||||||
|
subtract: ${{ inputs.nproc_subtract }}
|
||||||
|
|
||||||
|
- name: Download rippled artifact
|
||||||
|
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||||
|
with:
|
||||||
|
name: rippled-${{ inputs.config_name }}
|
||||||
|
|
||||||
|
- name: Make binary executable (Linux and macOS)
|
||||||
|
shell: bash
|
||||||
|
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
|
||||||
|
run: |
|
||||||
|
chmod +x ./rippled
|
||||||
|
|
||||||
|
- name: Check linking (Linux)
|
||||||
|
if: ${{ runner.os == 'Linux' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
ldd ./rippled
|
||||||
|
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
|
||||||
|
echo 'The binary is statically linked.'
|
||||||
|
else
|
||||||
|
echo 'The binary is dynamically linked.'
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Verifying presence of instrumentation
|
||||||
|
if: ${{ inputs.verify_voidstar }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
./rippled --version | grep libvoidstar
|
||||||
|
|
||||||
|
- name: Run the embedded tests
|
||||||
|
if: ${{ inputs.run_tests }}
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
BUILD_NPROC: ${{ steps.nproc.outputs.nproc }}
|
||||||
|
run: |
|
||||||
|
./rippled --unittest --unittest-jobs ${BUILD_NPROC}
|
||||||
|
|
||||||
|
- name: Run the separate tests
|
||||||
|
if: ${{ inputs.run_tests }}
|
||||||
|
env:
|
||||||
|
EXT: ${{ runner.os == 'Windows' && '.exe' || '' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
for test_file in ./doctest/*${EXT}; do
|
||||||
|
echo "Executing $test_file"
|
||||||
|
chmod +x "$test_file"
|
||||||
|
if [[ "${{ runner.os }}" == "Windows" && "$test_file" == "./doctest/xrpl.test.net.exe" ]]; then
|
||||||
|
echo "Skipping $test_file on Windows"
|
||||||
|
else
|
||||||
|
"$test_file"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Debug failure (Linux)
|
||||||
|
if: ${{ failure() && runner.os == 'Linux' && inputs.run_tests }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "IPv4 local port range:"
|
||||||
|
cat /proc/sys/net/ipv4/ip_local_port_range
|
||||||
|
echo "Netstat:"
|
||||||
|
netstat -an
|
||||||
107
.github/workflows/upload-conan-deps.yml
vendored
Normal file
107
.github/workflows/upload-conan-deps.yml
vendored
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
name: Upload Conan Dependencies
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 3 * * 2-6"
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
force_source_build:
|
||||||
|
description: "Force source build of all dependencies"
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
force_upload:
|
||||||
|
description: "Force upload of all dependencies"
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
|
pull_request:
|
||||||
|
branches: [develop]
|
||||||
|
paths:
|
||||||
|
# This allows testing changes to the upload workflow in a PR
|
||||||
|
- .github/workflows/upload-conan-deps.yml
|
||||||
|
push:
|
||||||
|
branches: [develop]
|
||||||
|
paths:
|
||||||
|
- .github/workflows/upload-conan-deps.yml
|
||||||
|
- .github/workflows/reusable-strategy-matrix.yml
|
||||||
|
- .github/actions/build-deps/action.yml
|
||||||
|
- .github/actions/setup-conan/action.yml
|
||||||
|
- ".github/scripts/strategy-matrix/**"
|
||||||
|
- conanfile.py
|
||||||
|
- conan.lock
|
||||||
|
|
||||||
|
env:
|
||||||
|
CONAN_REMOTE_NAME: xrplf
|
||||||
|
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||||
|
NPROC_SUBTRACT: 2
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Generate the strategy matrix to be used by the following job.
|
||||||
|
generate-matrix:
|
||||||
|
uses: ./.github/workflows/reusable-strategy-matrix.yml
|
||||||
|
with:
|
||||||
|
strategy_matrix: ${{ github.event_name == 'pull_request' && 'minimal' || 'all' }}
|
||||||
|
|
||||||
|
# Build and upload the dependencies for each configuration.
|
||||||
|
run-upload-conan-deps:
|
||||||
|
needs:
|
||||||
|
- generate-matrix
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||||
|
max-parallel: 10
|
||||||
|
runs-on: ${{ matrix.architecture.runner }}
|
||||||
|
container: ${{ contains(matrix.architecture.platform, 'linux') && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}-sha-{4}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version, matrix.os.image_sha) || null }}
|
||||||
|
steps:
|
||||||
|
- name: Cleanup workspace
|
||||||
|
if: ${{ runner.os == 'macOS' }}
|
||||||
|
uses: XRPLF/actions/.github/actions/cleanup-workspace@3f044c7478548e3c32ff68980eeb36ece02b364e
|
||||||
|
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||||
|
|
||||||
|
- name: Prepare runner
|
||||||
|
uses: XRPLF/actions/.github/actions/prepare-runner@99685816bb60a95a66852f212f382580e180df3a
|
||||||
|
with:
|
||||||
|
disable_ccache: false
|
||||||
|
|
||||||
|
- name: Print build environment
|
||||||
|
uses: ./.github/actions/print-env
|
||||||
|
|
||||||
|
- name: Get number of processors
|
||||||
|
uses: XRPLF/actions/.github/actions/get-nproc@046b1620f6bfd6cd0985dc82c3df02786801fe0a
|
||||||
|
id: nproc
|
||||||
|
with:
|
||||||
|
subtract: ${{ env.NPROC_SUBTRACT }}
|
||||||
|
|
||||||
|
- name: Setup Conan
|
||||||
|
uses: ./.github/actions/setup-conan
|
||||||
|
with:
|
||||||
|
conan_remote_name: ${{ env.CONAN_REMOTE_NAME }}
|
||||||
|
conan_remote_url: ${{ env.CONAN_REMOTE_URL }}
|
||||||
|
|
||||||
|
- name: Build dependencies
|
||||||
|
uses: ./.github/actions/build-deps
|
||||||
|
with:
|
||||||
|
build_dir: .build
|
||||||
|
build_nproc: ${{ steps.nproc.outputs.nproc }}
|
||||||
|
build_type: ${{ matrix.build_type }}
|
||||||
|
force_build: ${{ github.event_name == 'schedule' || github.event.inputs.force_source_build == 'true' }}
|
||||||
|
# Set the verbosity to "quiet" for Windows to avoid an excessive
|
||||||
|
# amount of logs. For other OSes, the "verbose" logs are more useful.
|
||||||
|
log_verbosity: ${{ runner.os == 'Windows' && 'quiet' || 'verbose' }}
|
||||||
|
|
||||||
|
- name: Log into Conan remote
|
||||||
|
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
|
||||||
|
run: conan remote login "${CONAN_REMOTE_NAME}" "${{ secrets.CONAN_REMOTE_USERNAME }}" --password "${{ secrets.CONAN_REMOTE_PASSWORD }}"
|
||||||
|
|
||||||
|
- name: Upload Conan packages
|
||||||
|
if: ${{ github.repository_owner == 'XRPLF' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch') }}
|
||||||
|
env:
|
||||||
|
FORCE_OPTION: ${{ github.event.inputs.force_upload == 'true' && '--force' || '' }}
|
||||||
|
run: conan upload "*" --remote="${CONAN_REMOTE_NAME}" --confirm ${FORCE_OPTION}
|
||||||
99
.github/workflows/windows.yml
vendored
99
.github/workflows/windows.yml
vendored
@@ -1,99 +0,0 @@
|
|||||||
name: windows
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types: [opened, reopened, synchronize, ready_for_review]
|
|
||||||
push:
|
|
||||||
# If the branches list is ever changed, be sure to change it on all
|
|
||||||
# build/test jobs (nix, macos, windows, instrumentation)
|
|
||||||
branches:
|
|
||||||
# Always build the package branches
|
|
||||||
- develop
|
|
||||||
- release
|
|
||||||
- master
|
|
||||||
# Branches that opt-in to running
|
|
||||||
- 'ci/**'
|
|
||||||
|
|
||||||
# https://docs.github.com/en/actions/using-jobs/using-concurrency
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
test:
|
|
||||||
if: ${{ github.event_name == 'push' || github.event.pull_request.draft != true || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
version:
|
|
||||||
- generator: Visual Studio 17 2022
|
|
||||||
runs-on: windows-2022
|
|
||||||
configuration:
|
|
||||||
- type: Release
|
|
||||||
tests: true
|
|
||||||
- type: Debug
|
|
||||||
# Skip running unit tests on debug builds, because they
|
|
||||||
# take an unreasonable amount of time
|
|
||||||
tests: false
|
|
||||||
runtime: d
|
|
||||||
runs-on: ${{ matrix.version.runs-on }}
|
|
||||||
env:
|
|
||||||
build_dir: .build
|
|
||||||
steps:
|
|
||||||
- name: checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: choose Python
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
- name: learn Python cache directory
|
|
||||||
id: pip-cache
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
echo "dir=$(pip cache dir)" | tee ${GITHUB_OUTPUT}
|
|
||||||
- name: restore Python cache directory
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: ${{ steps.pip-cache.outputs.dir }}
|
|
||||||
key: ${{ runner.os }}-${{ hashFiles('.github/workflows/windows.yml') }}
|
|
||||||
- name: install Conan
|
|
||||||
run: pip install wheel 'conan<2'
|
|
||||||
- name: check environment
|
|
||||||
run: |
|
|
||||||
dir env:
|
|
||||||
$env:PATH -split ';'
|
|
||||||
python --version
|
|
||||||
conan --version
|
|
||||||
cmake --version
|
|
||||||
- name: configure Conan
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
conan profile new default --detect
|
|
||||||
conan profile update settings.compiler.cppstd=20 default
|
|
||||||
conan profile update \
|
|
||||||
settings.compiler.runtime=MT${{ matrix.configuration.runtime }} \
|
|
||||||
default
|
|
||||||
- name: build dependencies
|
|
||||||
uses: ./.github/actions/dependencies
|
|
||||||
env:
|
|
||||||
CONAN_URL: http://18.143.149.228:8081/artifactory/api/conan/conan-non-prod
|
|
||||||
CONAN_LOGIN_USERNAME_RIPPLE: ${{ secrets.CONAN_USERNAME }}
|
|
||||||
CONAN_PASSWORD_RIPPLE: ${{ secrets.CONAN_TOKEN }}
|
|
||||||
with:
|
|
||||||
configuration: ${{ matrix.configuration.type }}
|
|
||||||
- name: build
|
|
||||||
uses: ./.github/actions/build
|
|
||||||
with:
|
|
||||||
generator: '${{ matrix.version.generator }}'
|
|
||||||
configuration: ${{ matrix.configuration.type }}
|
|
||||||
# Hard code for now. Move to the matrix if varied options are needed
|
|
||||||
cmake-args: '-Dassert=TRUE -Dwerr=TRUE -Dreporting=OFF -Dunity=ON'
|
|
||||||
cmake-target: install
|
|
||||||
- name: test
|
|
||||||
shell: bash
|
|
||||||
if: ${{ matrix.configuration.tests }}
|
|
||||||
run: |
|
|
||||||
${build_dir}/${{ matrix.configuration.type }}/rippled --unittest \
|
|
||||||
--unittest-jobs $(nproc)
|
|
||||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -37,10 +37,9 @@ Release/*.*
|
|||||||
*.gcov
|
*.gcov
|
||||||
|
|
||||||
# Levelization checking
|
# Levelization checking
|
||||||
Builds/levelization/results/rawincludes.txt
|
.github/scripts/levelization/results/*
|
||||||
Builds/levelization/results/paths.txt
|
!.github/scripts/levelization/results/loops.txt
|
||||||
Builds/levelization/results/includes/
|
!.github/scripts/levelization/results/ordering.txt
|
||||||
Builds/levelization/results/includedby/
|
|
||||||
|
|
||||||
# Ignore tmp directory.
|
# Ignore tmp directory.
|
||||||
tmp
|
tmp
|
||||||
@@ -111,4 +110,4 @@ bld.rippled/
|
|||||||
.vscode
|
.vscode
|
||||||
|
|
||||||
# Suggested in-tree build directory
|
# Suggested in-tree build directory
|
||||||
/.build/
|
/.build*/
|
||||||
|
|||||||
@@ -1,6 +1,39 @@
|
|||||||
# .pre-commit-config.yaml
|
# To run pre-commit hooks, first install pre-commit:
|
||||||
|
# - `pip install pre-commit==${PRE_COMMIT_VERSION}`
|
||||||
|
#
|
||||||
|
# Then, run the following command to install the git hook scripts:
|
||||||
|
# - `pre-commit install`
|
||||||
|
# You can run all configured hooks against all files with:
|
||||||
|
# - `pre-commit run --all-files`
|
||||||
|
# To manually run a specific hook, use:
|
||||||
|
# - `pre-commit run <hook_id> --all-files`
|
||||||
|
# To run the hooks against only the staged files, use:
|
||||||
|
# - `pre-commit run`
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v18.1.3
|
rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # frozen: v6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: clang-format
|
- id: trailing-whitespace
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
- id: mixed-line-ending
|
||||||
|
- id: check-merge-conflict
|
||||||
|
args: [--assume-in-merge]
|
||||||
|
|
||||||
|
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||||
|
rev: 7d85583be209cb547946c82fbe51f4bc5dd1d017 # frozen: v18.1.8
|
||||||
|
hooks:
|
||||||
|
- id: clang-format
|
||||||
|
args: [--style=file]
|
||||||
|
"types_or": [c++, c, proto]
|
||||||
|
|
||||||
|
- repo: https://github.com/rbubley/mirrors-prettier
|
||||||
|
rev: 5ba47274f9b181bce26a5150a725577f3c336011 # frozen: v3.6.2
|
||||||
|
hooks:
|
||||||
|
- id: prettier
|
||||||
|
|
||||||
|
exclude: |
|
||||||
|
(?x)^(
|
||||||
|
external/.*|
|
||||||
|
.github/scripts/levelization/results/.*\.txt|
|
||||||
|
conan\.lock
|
||||||
|
)$
|
||||||
|
|||||||
1
.prettierignore
Normal file
1
.prettierignore
Normal file
@@ -0,0 +1 @@
|
|||||||
|
external
|
||||||
628
BUILD.md
628
BUILD.md
@@ -3,29 +3,29 @@
|
|||||||
| These instructions assume you have a C++ development environment ready with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up on Linux, macOS, or Windows, [see this guide](./docs/build/environment.md). |
|
| These instructions assume you have a C++ development environment ready with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up on Linux, macOS, or Windows, [see this guide](./docs/build/environment.md). |
|
||||||
|
|
||||||
> These instructions also assume a basic familiarity with Conan and CMake.
|
> These instructions also assume a basic familiarity with Conan and CMake.
|
||||||
> If you are unfamiliar with Conan,
|
> If you are unfamiliar with Conan, you can read our
|
||||||
> you can read our [crash course](./docs/build/conan.md)
|
> [crash course](./docs/build/conan.md) or the official [Getting Started][3]
|
||||||
> or the official [Getting Started][3] walkthrough.
|
> walkthrough.
|
||||||
|
|
||||||
## Branches
|
## Branches
|
||||||
|
|
||||||
For a stable release, choose the `master` branch or one of the [tagged
|
For a stable release, choose the `master` branch or one of the [tagged
|
||||||
releases](https://github.com/ripple/rippled/releases).
|
releases](https://github.com/ripple/rippled/releases).
|
||||||
|
|
||||||
```
|
```bash
|
||||||
git checkout master
|
git checkout master
|
||||||
```
|
```
|
||||||
|
|
||||||
For the latest release candidate, choose the `release` branch.
|
For the latest release candidate, choose the `release` branch.
|
||||||
|
|
||||||
```
|
```bash
|
||||||
git checkout release
|
git checkout release
|
||||||
```
|
```
|
||||||
|
|
||||||
For the latest set of untested features, or to contribute, choose the `develop`
|
For the latest set of untested features, or to contribute, choose the `develop`
|
||||||
branch.
|
branch.
|
||||||
|
|
||||||
```
|
```bash
|
||||||
git checkout develop
|
git checkout develop
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -33,176 +33,307 @@ git checkout develop
|
|||||||
|
|
||||||
See [System Requirements](https://xrpl.org/system-requirements.html).
|
See [System Requirements](https://xrpl.org/system-requirements.html).
|
||||||
|
|
||||||
Building rippled generally requires git, Python, Conan, CMake, and a C++ compiler. Some guidance on setting up such a [C++ development environment can be found here](./docs/build/environment.md).
|
Building rippled generally requires git, Python, Conan, CMake, and a C++
|
||||||
|
compiler. Some guidance on setting up such a [C++ development environment can be
|
||||||
|
found here](./docs/build/environment.md).
|
||||||
|
|
||||||
- [Python 3.7](https://www.python.org/downloads/)
|
- [Python 3.11](https://www.python.org/downloads/), or higher
|
||||||
- [Conan 1.60](https://conan.io/downloads.html)[^1]
|
- [Conan 2.17](https://conan.io/downloads.html)[^1], or higher
|
||||||
- [CMake 3.16](https://cmake.org/download/)
|
- [CMake 3.22](https://cmake.org/download/), or higher
|
||||||
|
|
||||||
[^1]: It is possible to build with Conan 2.x,
|
[^1]:
|
||||||
but the instructions are significantly different,
|
It is possible to build with Conan 1.60+, but the instructions are
|
||||||
which is why we are not recommending it yet.
|
significantly different, which is why we are not recommending it.
|
||||||
Notably, the `conan profile update` command is removed in 2.x.
|
|
||||||
Profiles must be edited by hand.
|
|
||||||
|
|
||||||
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||||
The [minimum compiler versions][2] required are:
|
The [minimum compiler versions][2] required are:
|
||||||
|
|
||||||
| Compiler | Version |
|
| Compiler | Version |
|
||||||
|-------------|---------|
|
| ----------- | --------- |
|
||||||
| GCC | 11 |
|
| GCC | 12 |
|
||||||
| Clang | 13 |
|
| Clang | 16 |
|
||||||
| Apple Clang | 13.1.6 |
|
| Apple Clang | 16 |
|
||||||
| MSVC | 19.23 |
|
| MSVC | 19.44[^3] |
|
||||||
|
|
||||||
### Linux
|
### Linux
|
||||||
|
|
||||||
The Ubuntu operating system has received the highest level of
|
The Ubuntu Linux distribution has received the highest level of quality
|
||||||
quality assurance, testing, and support.
|
assurance, testing, and support. We also support Red Hat and use Debian
|
||||||
|
internally.
|
||||||
|
|
||||||
Here are [sample instructions for setting up a C++ development environment on Linux](./docs/build/environment.md#linux).
|
Here are [sample instructions for setting up a C++ development environment on
|
||||||
|
Linux](./docs/build/environment.md#linux).
|
||||||
|
|
||||||
### Mac
|
### Mac
|
||||||
|
|
||||||
Many rippled engineers use macOS for development.
|
Many rippled engineers use macOS for development.
|
||||||
|
|
||||||
Here are [sample instructions for setting up a C++ development environment on macOS](./docs/build/environment.md#macos).
|
Here are [sample instructions for setting up a C++ development environment on
|
||||||
|
macOS](./docs/build/environment.md#macos).
|
||||||
|
|
||||||
### Windows
|
### Windows
|
||||||
|
|
||||||
Windows is not recommended for production use at this time.
|
Windows is used by some engineers for development only.
|
||||||
|
|
||||||
- Additionally, 32-bit Windows development is not supported.
|
[^3]: Windows is not recommended for production use.
|
||||||
|
|
||||||
[Boost]: https://www.boost.org/
|
|
||||||
|
|
||||||
## Steps
|
## Steps
|
||||||
|
|
||||||
### Set Up Conan
|
### Set Up Conan
|
||||||
|
|
||||||
After you have a [C++ development environment](./docs/build/environment.md) ready with Git, Python, Conan, CMake, and a C++ compiler, you may need to set up your Conan profile.
|
After you have a [C++ development environment](./docs/build/environment.md) ready with Git, Python,
|
||||||
|
Conan, CMake, and a C++ compiler, you may need to set up your Conan profile.
|
||||||
|
|
||||||
These instructions assume a basic familiarity with Conan and CMake.
|
These instructions assume a basic familiarity with Conan and CMake. If you are
|
||||||
|
unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official
|
||||||
|
[Getting Started][3] walkthrough.
|
||||||
|
|
||||||
If you are unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official [Getting Started][3] walkthrough.
|
#### Default profile
|
||||||
|
|
||||||
You'll need at least one Conan profile:
|
We recommend that you import the provided `conan/profiles/default` profile:
|
||||||
|
|
||||||
```
|
```bash
|
||||||
conan profile new default --detect
|
conan config install conan/profiles/ -tf $(conan config home)/profiles/
|
||||||
```
|
|
||||||
|
|
||||||
Update the compiler settings:
|
|
||||||
|
|
||||||
```
|
|
||||||
conan profile update settings.compiler.cppstd=20 default
|
|
||||||
```
|
|
||||||
|
|
||||||
Configure Conan (1.x only) to use recipe revisions:
|
|
||||||
|
|
||||||
```
|
|
||||||
conan config set general.revisions_enabled=1
|
|
||||||
```
|
|
||||||
|
|
||||||
**Linux** developers will commonly have a default Conan [profile][] that compiles
|
|
||||||
with GCC and links with libstdc++.
|
|
||||||
If you are linking with libstdc++ (see profile setting `compiler.libcxx`),
|
|
||||||
then you will need to choose the `libstdc++11` ABI:
|
|
||||||
|
|
||||||
```
|
|
||||||
conan profile update settings.compiler.libcxx=libstdc++11 default
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
Ensure inter-operability between `boost::string_view` and `std::string_view` types:
|
|
||||||
|
|
||||||
```
|
|
||||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_BEAST_USE_STD_STRING_VIEW"]' default
|
|
||||||
conan profile update 'env.CXXFLAGS="-DBOOST_BEAST_USE_STD_STRING_VIEW"' default
|
|
||||||
```
|
```
|
||||||
|
|
||||||
If you have other flags in the `conf.tools.build` or `env.CXXFLAGS` sections, make sure to retain the existing flags and append the new ones. You can check them with:
|
You can check your Conan profile by running:
|
||||||
```
|
|
||||||
conan profile show default
|
```bash
|
||||||
|
conan profile show
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Custom profile
|
||||||
|
|
||||||
**Windows** developers may need to use the x64 native build tools.
|
If the default profile does not work for you and you do not yet have a Conan
|
||||||
An easy way to do that is to run the shortcut "x64 Native Tools Command
|
profile, you can create one by running:
|
||||||
Prompt" for the version of Visual Studio that you have installed.
|
|
||||||
|
|
||||||
Windows developers must also build `rippled` and its dependencies for the x64
|
```bash
|
||||||
architecture:
|
conan profile detect
|
||||||
|
|
||||||
```
|
|
||||||
conan profile update settings.arch=x86_64 default
|
|
||||||
```
|
|
||||||
|
|
||||||
### Multiple compilers
|
|
||||||
|
|
||||||
When `/usr/bin/g++` exists on a platform, it is the default cpp compiler. This
|
|
||||||
default works for some users.
|
|
||||||
|
|
||||||
However, if this compiler cannot build rippled or its dependencies, then you can
|
|
||||||
install another compiler and set Conan and CMake to use it.
|
|
||||||
Update the `conf.tools.build:compiler_executables` setting in order to set the correct variables (`CMAKE_<LANG>_COMPILER`) in the
|
|
||||||
generated CMake toolchain file.
|
|
||||||
For example, on Ubuntu 20, you may have gcc at `/usr/bin/gcc` and g++ at `/usr/bin/g++`; if that is the case, you can select those compilers with:
|
|
||||||
```
|
|
||||||
conan profile update 'conf.tools.build:compiler_executables={"c": "/usr/bin/gcc", "cpp": "/usr/bin/g++"}' default
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Replace `/usr/bin/gcc` and `/usr/bin/g++` with paths to the desired compilers.
|
You may need to make changes to the profile to suit your environment. You can
|
||||||
|
refer to the provided `conan/profiles/default` profile for inspiration, and you
|
||||||
|
may also need to apply the required [tweaks](#conan-profile-tweaks) to this
|
||||||
|
default profile.
|
||||||
|
|
||||||
It should choose the compiler for dependencies as well,
|
### Patched recipes
|
||||||
but not all of them have a Conan recipe that respects this setting (yet).
|
|
||||||
For the rest, you can set these environment variables.
|
|
||||||
Replace `<path>` with paths to the desired compilers:
|
|
||||||
|
|
||||||
- `conan profile update env.CC=<path> default`
|
The recipes in Conan Center occasionally need to be patched for compatibility
|
||||||
- `conan profile update env.CXX=<path> default`
|
with the latest version of `rippled`. We maintain a fork of the Conan Center
|
||||||
|
[here](https://github.com/XRPLF/conan-center-index/) containing the patches.
|
||||||
|
|
||||||
Export our [Conan recipe for Snappy](./external/snappy).
|
To ensure our patched recipes are used, you must add our Conan remote at a
|
||||||
It does not explicitly link the C++ standard library,
|
higher index than the default Conan Center remote, so it is consulted first. You
|
||||||
which allows you to statically link it with GCC, if you want.
|
can do this by running:
|
||||||
|
|
||||||
```
|
```bash
|
||||||
# Conan 1.x
|
conan remote add --index 0 xrplf https://conan.ripplex.io
|
||||||
conan export external/snappy snappy/1.1.10@
|
```
|
||||||
# Conan 2.x
|
|
||||||
conan export --version 1.1.10 external/snappy
|
|
||||||
```
|
|
||||||
|
|
||||||
Export our [Conan recipe for RocksDB](./external/rocksdb).
|
Alternatively, you can pull the patched recipes into the repository and use them
|
||||||
It does not override paths to dependencies when building with Visual Studio.
|
locally:
|
||||||
|
|
||||||
```
|
```bash
|
||||||
# Conan 1.x
|
cd external
|
||||||
conan export external/rocksdb rocksdb/9.7.3@
|
git init
|
||||||
# Conan 2.x
|
git remote add origin git@github.com:XRPLF/conan-center-index.git
|
||||||
conan export --version 9.7.3 external/rocksdb
|
git sparse-checkout init
|
||||||
```
|
git sparse-checkout set recipes/snappy
|
||||||
|
git sparse-checkout add recipes/soci
|
||||||
|
git fetch origin master
|
||||||
|
git checkout master
|
||||||
|
conan export --version 1.1.10 recipes/snappy/all
|
||||||
|
conan export --version 4.0.3 recipes/soci/all
|
||||||
|
rm -rf .git
|
||||||
|
```
|
||||||
|
|
||||||
Export our [Conan recipe for SOCI](./external/soci).
|
In the case we switch to a newer version of a dependency that still requires a
|
||||||
It patches their CMake to correctly import its dependencies.
|
patch, it will be necessary for you to pull in the changes and re-export the
|
||||||
|
updated dependencies with the newer version. However, if we switch to a newer
|
||||||
|
version that no longer requires a patch, no action is required on your part, as
|
||||||
|
the new recipe will be automatically pulled from the official Conan Center.
|
||||||
|
|
||||||
```
|
> [!NOTE]
|
||||||
# Conan 1.x
|
> You might need to add `--lockfile=""` to your `conan install` command
|
||||||
conan export external/soci soci/4.0.3@
|
> to avoid automatic use of the existing `conan.lock` file when you run `conan export` manually on your machine
|
||||||
# Conan 2.x
|
|
||||||
conan export --version 4.0.3 external/soci
|
|
||||||
```
|
|
||||||
|
|
||||||
Export our [Conan recipe for NuDB](./external/nudb).
|
### Conan profile tweaks
|
||||||
It fixes some source files to add missing `#include`s.
|
|
||||||
|
|
||||||
|
#### Missing compiler version
|
||||||
|
|
||||||
```
|
If you see an error similar to the following after running `conan profile show`:
|
||||||
# Conan 1.x
|
|
||||||
conan export external/nudb nudb/2.0.8@
|
```bash
|
||||||
# Conan 2.x
|
ERROR: Invalid setting '17' is not a valid 'settings.compiler.version' value.
|
||||||
conan export --version 2.0.8 external/nudb
|
Possible values are ['5.0', '5.1', '6.0', '6.1', '7.0', '7.3', '8.0', '8.1',
|
||||||
```
|
'9.0', '9.1', '10.0', '11.0', '12.0', '13', '13.0', '13.1', '14', '14.0', '15',
|
||||||
|
'15.0', '16', '16.0']
|
||||||
|
Read "http://docs.conan.io/2/knowledge/faq.html#error-invalid-setting"
|
||||||
|
```
|
||||||
|
|
||||||
|
you need to amend the list of compiler versions in
|
||||||
|
`$(conan config home)/settings.yml`, by appending the required version number(s)
|
||||||
|
to the `version` array specific for your compiler. For example:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
apple-clang:
|
||||||
|
version:
|
||||||
|
[
|
||||||
|
"5.0",
|
||||||
|
"5.1",
|
||||||
|
"6.0",
|
||||||
|
"6.1",
|
||||||
|
"7.0",
|
||||||
|
"7.3",
|
||||||
|
"8.0",
|
||||||
|
"8.1",
|
||||||
|
"9.0",
|
||||||
|
"9.1",
|
||||||
|
"10.0",
|
||||||
|
"11.0",
|
||||||
|
"12.0",
|
||||||
|
"13",
|
||||||
|
"13.0",
|
||||||
|
"13.1",
|
||||||
|
"14",
|
||||||
|
"14.0",
|
||||||
|
"15",
|
||||||
|
"15.0",
|
||||||
|
"16",
|
||||||
|
"16.0",
|
||||||
|
"17",
|
||||||
|
"17.0",
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Multiple compilers
|
||||||
|
|
||||||
|
If you have multiple compilers installed, make sure to select the one to use in
|
||||||
|
your default Conan configuration **before** running `conan profile detect`, by
|
||||||
|
setting the `CC` and `CXX` environment variables.
|
||||||
|
|
||||||
|
For example, if you are running MacOS and have [homebrew
|
||||||
|
LLVM@18](https://formulae.brew.sh/formula/llvm@18), and want to use it as a
|
||||||
|
compiler in the new Conan profile:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export CC=$(brew --prefix llvm@18)/bin/clang
|
||||||
|
export CXX=$(brew --prefix llvm@18)/bin/clang++
|
||||||
|
conan profile detect
|
||||||
|
```
|
||||||
|
|
||||||
|
You should also explicitly set the path to the compiler in the profile file,
|
||||||
|
which helps to avoid errors when `CC` and/or `CXX` are set and disagree with the
|
||||||
|
selected Conan profile. For example:
|
||||||
|
|
||||||
|
```text
|
||||||
|
[conf]
|
||||||
|
tools.build:compiler_executables={'c':'/usr/bin/gcc','cpp':'/usr/bin/g++'}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Multiple profiles
|
||||||
|
|
||||||
|
You can manage multiple Conan profiles in the directory
|
||||||
|
`$(conan config home)/profiles`, for example renaming `default` to a different
|
||||||
|
name and then creating a new `default` profile for a different compiler.
|
||||||
|
|
||||||
|
#### Select language
|
||||||
|
|
||||||
|
The default profile created by Conan will typically select different C++ dialect
|
||||||
|
than C++20 used by this project. You should set `20` in the profile line
|
||||||
|
starting with `compiler.cppstd=`. For example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sed -i.bak -e 's|^compiler\.cppstd=.*$|compiler.cppstd=20|' $(conan config home)/profiles/default
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Select standard library in Linux
|
||||||
|
|
||||||
|
**Linux** developers will commonly have a default Conan [profile][] that
|
||||||
|
compiles with GCC and links with libstdc++. If you are linking with libstdc++
|
||||||
|
(see profile setting `compiler.libcxx`), then you will need to choose the
|
||||||
|
`libstdc++11` ABI:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sed -i.bak -e 's|^compiler\.libcxx=.*$|compiler.libcxx=libstdc++11|' $(conan config home)/profiles/default
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Select architecture and runtime in Windows
|
||||||
|
|
||||||
|
**Windows** developers may need to use the x64 native build tools. An easy way
|
||||||
|
to do that is to run the shortcut "x64 Native Tools Command Prompt" for the
|
||||||
|
version of Visual Studio that you have installed.
|
||||||
|
|
||||||
|
Windows developers must also build `rippled` and its dependencies for the x64
|
||||||
|
architecture:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sed -i.bak -e 's|^arch=.*$|arch=x86_64|' $(conan config home)/profiles/default
|
||||||
|
```
|
||||||
|
|
||||||
|
**Windows** developers also must select static runtime:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sed -i.bak -e 's|^compiler\.runtime=.*$|compiler.runtime=static|' $(conan config home)/profiles/default
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Clang workaround for grpc
|
||||||
|
|
||||||
|
If your compiler is clang, version 19 or later, or apple-clang, version 17 or
|
||||||
|
later, you may encounter a compilation error while building the `grpc`
|
||||||
|
dependency:
|
||||||
|
|
||||||
|
```text
|
||||||
|
In file included from .../lib/promise/try_seq.h:26:
|
||||||
|
.../lib/promise/detail/basic_seq.h:499:38: error: a template argument list is expected after a name prefixed by the template keyword [-Wmissing-template-arg-list-after-template-kw]
|
||||||
|
499 | Traits::template CallSeqFactory(f_, *cur_, std::move(arg)));
|
||||||
|
| ^
|
||||||
|
```
|
||||||
|
|
||||||
|
The workaround for this error is to add two lines to profile:
|
||||||
|
|
||||||
|
```text
|
||||||
|
[conf]
|
||||||
|
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Workaround for gcc 12
|
||||||
|
|
||||||
|
If your compiler is gcc, version 12, and you have enabled `werr` option, you may
|
||||||
|
encounter a compilation error such as:
|
||||||
|
|
||||||
|
```text
|
||||||
|
/usr/include/c++/12/bits/char_traits.h:435:56: error: 'void* __builtin_memcpy(void*, const void*, long unsigned int)' accessing 9223372036854775810 or more bytes at offsets [2, 9223372036854775807] and 1 may overlap up to 9223372036854775813 bytes at offset -3 [-Werror=restrict]
|
||||||
|
435 | return static_cast<char_type*>(__builtin_memcpy(__s1, __s2, __n));
|
||||||
|
| ~~~~~~~~~~~~~~~~^~~~~~~~~~~~~~~~~
|
||||||
|
cc1plus: all warnings being treated as errors
|
||||||
|
```
|
||||||
|
|
||||||
|
The workaround for this error is to add two lines to your profile:
|
||||||
|
|
||||||
|
```text
|
||||||
|
[conf]
|
||||||
|
tools.build:cxxflags=['-Wno-restrict']
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Workaround for clang 16
|
||||||
|
|
||||||
|
If your compiler is clang, version 16, you may encounter compilation error such
|
||||||
|
as:
|
||||||
|
|
||||||
|
```text
|
||||||
|
In file included from .../boost/beast/websocket/stream.hpp:2857:
|
||||||
|
.../boost/beast/websocket/impl/read.hpp:695:17: error: call to 'async_teardown' is ambiguous
|
||||||
|
async_teardown(impl.role, impl.stream(),
|
||||||
|
^~~~~~~~~~~~~~
|
||||||
|
```
|
||||||
|
|
||||||
|
The workaround for this error is to add two lines to your profile:
|
||||||
|
|
||||||
|
```text
|
||||||
|
[conf]
|
||||||
|
tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
|
||||||
|
```
|
||||||
|
|
||||||
### Build and Test
|
### Build and Test
|
||||||
|
|
||||||
@@ -224,71 +355,70 @@ It fixes some source files to add missing `#include`s.
|
|||||||
|
|
||||||
2. Use conan to generate CMake files for every configuration you want to build:
|
2. Use conan to generate CMake files for every configuration you want to build:
|
||||||
|
|
||||||
```
|
```
|
||||||
conan install .. --output-folder . --build missing --settings build_type=Release
|
conan install .. --output-folder . --build missing --settings build_type=Release
|
||||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||||
```
|
```
|
||||||
|
|
||||||
To build Debug, in the next step, be sure to set `-DCMAKE_BUILD_TYPE=Debug`
|
To build Debug, in the next step, be sure to set `-DCMAKE_BUILD_TYPE=Debug`
|
||||||
|
|
||||||
For a single-configuration generator, e.g. `Unix Makefiles` or `Ninja`,
|
For a single-configuration generator, e.g. `Unix Makefiles` or `Ninja`,
|
||||||
you only need to run this command once.
|
you only need to run this command once.
|
||||||
For a multi-configuration generator, e.g. `Visual Studio`, you may want to
|
For a multi-configuration generator, e.g. `Visual Studio`, you may want to
|
||||||
run it more than once.
|
run it more than once.
|
||||||
|
|
||||||
Each of these commands should also have a different `build_type` setting.
|
Each of these commands should also have a different `build_type` setting.
|
||||||
A second command with the same `build_type` setting will overwrite the files
|
A second command with the same `build_type` setting will overwrite the files
|
||||||
generated by the first. You can pass the build type on the command line with
|
generated by the first. You can pass the build type on the command line with
|
||||||
`--settings build_type=$BUILD_TYPE` or in the profile itself,
|
`--settings build_type=$BUILD_TYPE` or in the profile itself,
|
||||||
under the section `[settings]` with the key `build_type`.
|
under the section `[settings]` with the key `build_type`.
|
||||||
|
|
||||||
If you are using a Microsoft Visual C++ compiler,
|
If you are using a Microsoft Visual C++ compiler,
|
||||||
then you will need to ensure consistency between the `build_type` setting
|
then you will need to ensure consistency between the `build_type` setting
|
||||||
and the `compiler.runtime` setting.
|
and the `compiler.runtime` setting.
|
||||||
|
|
||||||
When `build_type` is `Release`, `compiler.runtime` should be `MT`.
|
When `build_type` is `Release`, `compiler.runtime` should be `MT`.
|
||||||
|
|
||||||
When `build_type` is `Debug`, `compiler.runtime` should be `MTd`.
|
When `build_type` is `Debug`, `compiler.runtime` should be `MTd`.
|
||||||
|
|
||||||
```
|
```
|
||||||
conan install .. --output-folder . --build missing --settings build_type=Release --settings compiler.runtime=MT
|
conan install .. --output-folder . --build missing --settings build_type=Release --settings compiler.runtime=MT
|
||||||
conan install .. --output-folder . --build missing --settings build_type=Debug --settings compiler.runtime=MTd
|
conan install .. --output-folder . --build missing --settings build_type=Debug --settings compiler.runtime=MTd
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Configure CMake and pass the toolchain file generated by Conan, located at
|
3. Configure CMake and pass the toolchain file generated by Conan, located at
|
||||||
`$OUTPUT_FOLDER/build/generators/conan_toolchain.cmake`.
|
`$OUTPUT_FOLDER/build/generators/conan_toolchain.cmake`.
|
||||||
|
|
||||||
Single-config generators:
|
Single-config generators:
|
||||||
|
|
||||||
Pass the CMake variable [`CMAKE_BUILD_TYPE`][build_type]
|
Pass the CMake variable [`CMAKE_BUILD_TYPE`][build_type]
|
||||||
and make sure it matches the one of the `build_type` settings
|
and make sure it matches the one of the `build_type` settings
|
||||||
you chose in the previous step.
|
you chose in the previous step.
|
||||||
|
|
||||||
For example, to build Debug, in the next command, replace "Release" with "Debug"
|
For example, to build Debug, in the next command, replace "Release" with "Debug"
|
||||||
|
|
||||||
```
|
```
|
||||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release -Dxrpld=ON -Dtests=ON ..
|
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release -Dxrpld=ON -Dtests=ON ..
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Multi-config generators:
|
||||||
|
|
||||||
Multi-config generators:
|
```
|
||||||
|
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -Dxrpld=ON -Dtests=ON ..
|
||||||
|
```
|
||||||
|
|
||||||
```
|
**Note:** You can pass build options for `rippled` in this step.
|
||||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -Dxrpld=ON -Dtests=ON ..
|
|
||||||
```
|
|
||||||
|
|
||||||
**Note:** You can pass build options for `rippled` in this step.
|
4. Build `rippled`.
|
||||||
|
|
||||||
5. Build `rippled`.
|
|
||||||
|
|
||||||
For a single-configuration generator, it will build whatever configuration
|
For a single-configuration generator, it will build whatever configuration
|
||||||
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator,
|
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator, you
|
||||||
you must pass the option `--config` to select the build configuration.
|
must pass the option `--config` to select the build configuration.
|
||||||
|
|
||||||
Single-config generators:
|
Single-config generators:
|
||||||
|
|
||||||
```
|
```
|
||||||
cmake --build . -j $(nproc)
|
cmake --build .
|
||||||
```
|
```
|
||||||
|
|
||||||
Multi-config generators:
|
Multi-config generators:
|
||||||
@@ -298,24 +428,54 @@ It fixes some source files to add missing `#include`s.
|
|||||||
cmake --build . --config Debug
|
cmake --build . --config Debug
|
||||||
```
|
```
|
||||||
|
|
||||||
6. Test rippled.
|
5. Test rippled.
|
||||||
|
|
||||||
Single-config generators:
|
Single-config generators:
|
||||||
|
|
||||||
```
|
```
|
||||||
./rippled --unittest
|
./rippled --unittest --unittest-jobs N
|
||||||
```
|
```
|
||||||
|
|
||||||
Multi-config generators:
|
Multi-config generators:
|
||||||
|
|
||||||
```
|
```
|
||||||
./Release/rippled --unittest
|
./Release/rippled --unittest --unittest-jobs N
|
||||||
./Debug/rippled --unittest
|
./Debug/rippled --unittest --unittest-jobs N
|
||||||
```
|
```
|
||||||
|
|
||||||
The location of `rippled` in your build directory depends on your CMake
|
Replace the `--unittest-jobs` parameter N with the desired unit tests
|
||||||
generator. Pass `--help` to see the rest of the command line options.
|
concurrency. Recommended setting is half of the number of available CPU
|
||||||
|
cores.
|
||||||
|
|
||||||
|
The location of `rippled` binary in your build directory depends on your
|
||||||
|
CMake generator. Pass `--help` to see the rest of the command line options.
|
||||||
|
|
||||||
|
#### Conan lockfile
|
||||||
|
|
||||||
|
To achieve reproducible dependencies, we use [Conan lockfile](https://docs.conan.io/2/tutorial/versioning/lockfiles.html).
|
||||||
|
|
||||||
|
The `conan.lock` file in the repository contains a "snapshot" of the current dependencies.
|
||||||
|
It is implicitly used when running `conan` commands, you don't need to specify it.
|
||||||
|
|
||||||
|
You have to update this file every time you add a new dependency or change a revision or version of an existing dependency.
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> Conan uses local cache by default when creating a lockfile.
|
||||||
|
>
|
||||||
|
> To ensure, that lockfile creation works the same way on all developer machines, you should clear the local cache before creating a new lockfile.
|
||||||
|
|
||||||
|
To create a new lockfile, run the following commands in the repository root:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
conan remove '*' --confirm
|
||||||
|
rm conan.lock
|
||||||
|
# This ensure that xrplf remote is the first to be consulted
|
||||||
|
conan remote add --force --index 0 xrplf https://conan.ripplex.io
|
||||||
|
conan lock create . -o '&:jemalloc=True' -o '&:rocksdb=True'
|
||||||
|
```
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> If some dependencies are exclusive for some OS, you may need to run the last command for them adding `--profile:all <PROFILE>`.
|
||||||
|
|
||||||
## Coverage report
|
## Coverage report
|
||||||
|
|
||||||
@@ -356,7 +516,7 @@ variable in `cmake`. The specific command line used to run the `gcovr` tool will
|
|||||||
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
|
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
|
||||||
|
|
||||||
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
|
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
|
||||||
set to the number of available CPU cores. This may cause spurious test
|
set to the number of available CPU cores. This may cause spurious test
|
||||||
errors on Apple. Developers can override the number of unit test jobs with
|
errors on Apple. Developers can override the number of unit test jobs with
|
||||||
the `coverage_test_parallelism` variable in `cmake`.
|
the `coverage_test_parallelism` variable in `cmake`.
|
||||||
|
|
||||||
@@ -372,48 +532,65 @@ cmake --build . --target coverage
|
|||||||
After the `coverage` target is completed, the generated coverage report will be
|
After the `coverage` target is completed, the generated coverage report will be
|
||||||
stored inside the build directory, as either of:
|
stored inside the build directory, as either of:
|
||||||
|
|
||||||
- file named `coverage.`_extension_ , with a suitable extension for the report format, or
|
- file named `coverage.`_extension_, with a suitable extension for the report format, or
|
||||||
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
|
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
|
||||||
|
|
||||||
|
|
||||||
## Options
|
## Options
|
||||||
|
|
||||||
| Option | Default Value | Description |
|
| Option | Default Value | Description |
|
||||||
| --- | ---| ---|
|
| ---------- | ------------- | -------------------------------------------------------------------------- |
|
||||||
| `assert` | OFF | Enable assertions.
|
| `assert` | OFF | Enable assertions. |
|
||||||
| `coverage` | OFF | Prepare the coverage report. |
|
| `coverage` | OFF | Prepare the coverage report. |
|
||||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||||
| `tests` | OFF | Build tests. |
|
| `tests` | OFF | Build tests. |
|
||||||
| `unity` | ON | Configure a unity build. |
|
| `unity` | OFF | Configure a unity build. |
|
||||||
| `xrpld` | OFF | Build the xrpld (`rippled`) application, and not just the libxrpl library. |
|
| `xrpld` | OFF | Build the xrpld (`rippled`) application, and not just the libxrpl library. |
|
||||||
|
| `werr` | OFF | Treat compilation warnings as errors |
|
||||||
|
| `wextra` | OFF | Enable additional compilation warnings |
|
||||||
|
|
||||||
[Unity builds][5] may be faster for the first build
|
[Unity builds][5] may be faster for the first build
|
||||||
(at the cost of much more memory) since they concatenate sources into fewer
|
(at the cost of much more memory) since they concatenate sources into fewer
|
||||||
translation units. Non-unity builds may be faster for incremental builds,
|
translation units. Non-unity builds may be faster for incremental builds,
|
||||||
and can be helpful for detecting `#include` omissions.
|
and can be helpful for detecting `#include` omissions.
|
||||||
|
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
|
|
||||||
### Conan
|
### Conan
|
||||||
|
|
||||||
After any updates or changes to dependencies, you may need to do the following:
|
After any updates or changes to dependencies, you may need to do the following:
|
||||||
|
|
||||||
1. Remove your build directory.
|
1. Remove your build directory.
|
||||||
2. Remove the Conan cache:
|
2. Remove individual libraries from the Conan cache, e.g.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
conan remove 'grpc/*'
|
||||||
```
|
```
|
||||||
rm -rf ~/.conan/data
|
|
||||||
|
**or**
|
||||||
|
|
||||||
|
Remove all libraries from Conan cache:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
conan remove '*'
|
||||||
```
|
```
|
||||||
4. Re-run [conan install](#build-and-test).
|
|
||||||
|
|
||||||
|
3. Re-run [conan export](#patched-recipes) if needed.
|
||||||
|
4. [Regenerate lockfile](#conan-lockfile).
|
||||||
|
5. Re-run [conan install](#build-and-test).
|
||||||
|
|
||||||
### 'protobuf/port_def.inc' file not found
|
#### ERROR: Package not resolved
|
||||||
|
|
||||||
If `cmake --build .` results in an error due to a missing a protobuf file, then you might have generated CMake files for a different `build_type` than the `CMAKE_BUILD_TYPE` you passed to conan.
|
If you're seeing an error like `ERROR: Package 'snappy/1.1.10' not resolved: Unable to find 'snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1756234314.246' in remotes.`,
|
||||||
|
please add `xrplf` remote or re-run `conan export` for [patched recipes](#patched-recipes).
|
||||||
|
|
||||||
|
### `protobuf/port_def.inc` file not found
|
||||||
|
|
||||||
|
If `cmake --build .` results in an error due to a missing a protobuf file, then
|
||||||
|
you might have generated CMake files for a different `build_type` than the
|
||||||
|
`CMAKE_BUILD_TYPE` you passed to Conan.
|
||||||
|
|
||||||
```
|
```
|
||||||
/rippled/.build/pb-xrpl.libpb/xrpl/proto/ripple.pb.h:10:10: fatal error: 'google/protobuf/port_def.inc' file not found
|
/rippled/.build/pb-xrpl.libpb/xrpl/proto/xrpl.pb.h:10:10: fatal error: 'google/protobuf/port_def.inc' file not found
|
||||||
10 | #include <google/protobuf/port_def.inc>
|
10 | #include <google/protobuf/port_def.inc>
|
||||||
| ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
| ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
1 error generated.
|
1 error generated.
|
||||||
@@ -424,70 +601,21 @@ For example, if you want to build Debug:
|
|||||||
1. For conan install, pass `--settings build_type=Debug`
|
1. For conan install, pass `--settings build_type=Debug`
|
||||||
2. For cmake, pass `-DCMAKE_BUILD_TYPE=Debug`
|
2. For cmake, pass `-DCMAKE_BUILD_TYPE=Debug`
|
||||||
|
|
||||||
|
|
||||||
### no std::result_of
|
|
||||||
|
|
||||||
If your compiler version is recent enough to have removed `std::result_of` as
|
|
||||||
part of C++20, e.g. Apple Clang 15.0, then you might need to add a preprocessor
|
|
||||||
definition to your build.
|
|
||||||
|
|
||||||
```
|
|
||||||
conan profile update 'options.boost:extra_b2_flags="define=BOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
|
||||||
conan profile update 'env.CFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
|
||||||
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default
|
|
||||||
conan profile update 'conf.tools.build:cflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
|
|
||||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
### call to 'async_teardown' is ambiguous
|
|
||||||
|
|
||||||
If you are compiling with an early version of Clang 16, then you might hit
|
|
||||||
a [regression][6] when compiling C++20 that manifests as an [error in a Boost
|
|
||||||
header][7]. You can workaround it by adding this preprocessor definition:
|
|
||||||
|
|
||||||
```
|
|
||||||
conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_DISABLE_CONCEPTS"' default
|
|
||||||
conan profile update 'conf.tools.build:cxxflags+=["-DBOOST_ASIO_DISABLE_CONCEPTS"]' default
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
### recompile with -fPIC
|
|
||||||
|
|
||||||
If you get a linker error suggesting that you recompile Boost with
|
|
||||||
position-independent code, such as:
|
|
||||||
|
|
||||||
```
|
|
||||||
/usr/bin/ld.gold: error: /home/username/.conan/data/boost/1.77.0/_/_/package/.../lib/libboost_container.a(alloc_lib.o):
|
|
||||||
requires unsupported dynamic reloc 11; recompile with -fPIC
|
|
||||||
```
|
|
||||||
|
|
||||||
Conan most likely downloaded a bad binary distribution of the dependency.
|
|
||||||
This seems to be a [bug][1] in Conan just for Boost 1.77.0 compiled with GCC
|
|
||||||
for Linux. The solution is to build the dependency locally by passing
|
|
||||||
`--build boost` when calling `conan install`.
|
|
||||||
|
|
||||||
```
|
|
||||||
conan install --build boost ...
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Add a Dependency
|
## Add a Dependency
|
||||||
|
|
||||||
If you want to experiment with a new package, follow these steps:
|
If you want to experiment with a new package, follow these steps:
|
||||||
|
|
||||||
1. Search for the package on [Conan Center](https://conan.io/center/).
|
1. Search for the package on [Conan Center](https://conan.io/center/).
|
||||||
2. Modify [`conanfile.py`](./conanfile.py):
|
2. Modify [`conanfile.py`](./conanfile.py):
|
||||||
- Add a version of the package to the `requires` property.
|
- Add a version of the package to the `requires` property.
|
||||||
- Change any default options for the package by adding them to the
|
- Change any default options for the package by adding them to the
|
||||||
`default_options` property (with syntax `'$package:$option': $value`).
|
`default_options` property (with syntax `'$package:$option': $value`).
|
||||||
3. Modify [`CMakeLists.txt`](./CMakeLists.txt):
|
3. Modify [`CMakeLists.txt`](./CMakeLists.txt):
|
||||||
- Add a call to `find_package($package REQUIRED)`.
|
- Add a call to `find_package($package REQUIRED)`.
|
||||||
- Link a library from the package to the target `ripple_libs`
|
- Link a library from the package to the target `ripple_libs`
|
||||||
(search for the existing call to `target_link_libraries(ripple_libs INTERFACE ...)`).
|
(search for the existing call to `target_link_libraries(ripple_libs INTERFACE ...)`).
|
||||||
4. Start coding! Don't forget to include whatever headers you need from the package.
|
4. Start coding! Don't forget to include whatever headers you need from the package.
|
||||||
|
|
||||||
|
|
||||||
[1]: https://github.com/conan-io/conan-center-index/issues/13168
|
[1]: https://github.com/conan-io/conan-center-index/issues/13168
|
||||||
[2]: https://en.cppreference.com/w/cpp/compiler_support/20
|
[2]: https://en.cppreference.com/w/cpp/compiler_support/20
|
||||||
[3]: https://docs.conan.io/en/latest/getting_started.html
|
[3]: https://docs.conan.io/en/latest/getting_started.html
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ if(Git_FOUND)
|
|||||||
endif() #git
|
endif() #git
|
||||||
|
|
||||||
if(thread_safety_analysis)
|
if(thread_safety_analysis)
|
||||||
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS)
|
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DXRPL_ENABLE_THREAD_SAFETY_ANNOTATIONS)
|
||||||
add_compile_options("-stdlib=libc++")
|
add_compile_options("-stdlib=libc++")
|
||||||
add_link_options("-stdlib=libc++")
|
add_link_options("-stdlib=libc++")
|
||||||
endif()
|
endif()
|
||||||
@@ -62,9 +62,9 @@ if (target)
|
|||||||
message (FATAL_ERROR "The target option has been removed - use native cmake options to control build")
|
message (FATAL_ERROR "The target option has been removed - use native cmake options to control build")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
include(RippledSanity)
|
include(XrplSanity)
|
||||||
include(RippledVersion)
|
include(XrplVersion)
|
||||||
include(RippledSettings)
|
include(XrplSettings)
|
||||||
# this check has to remain in the top-level cmake
|
# this check has to remain in the top-level cmake
|
||||||
# because of the early return statement
|
# because of the early return statement
|
||||||
if (packages_only)
|
if (packages_only)
|
||||||
@@ -73,11 +73,11 @@ if (packages_only)
|
|||||||
endif()
|
endif()
|
||||||
return ()
|
return ()
|
||||||
endif ()
|
endif ()
|
||||||
include(RippledCompiler)
|
include(XrplCompiler)
|
||||||
include(RippledInterface)
|
include(XrplInterface)
|
||||||
|
|
||||||
option(only_docs "Include only the docs target?" FALSE)
|
option(only_docs "Include only the docs target?" FALSE)
|
||||||
include(RippledDocs)
|
include(XrplDocs)
|
||||||
if(only_docs)
|
if(only_docs)
|
||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
@@ -90,6 +90,11 @@ set_target_properties(OpenSSL::SSL PROPERTIES
|
|||||||
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
|
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
|
||||||
)
|
)
|
||||||
set(SECP256K1_INSTALL TRUE)
|
set(SECP256K1_INSTALL TRUE)
|
||||||
|
set(SECP256K1_BUILD_BENCHMARK FALSE)
|
||||||
|
set(SECP256K1_BUILD_TESTS FALSE)
|
||||||
|
set(SECP256K1_BUILD_EXHAUSTIVE_TESTS FALSE)
|
||||||
|
set(SECP256K1_BUILD_CTIME_TESTS FALSE)
|
||||||
|
set(SECP256K1_BUILD_EXAMPLES FALSE)
|
||||||
add_subdirectory(external/secp256k1)
|
add_subdirectory(external/secp256k1)
|
||||||
add_library(secp256k1::secp256k1 ALIAS secp256k1)
|
add_library(secp256k1::secp256k1 ALIAS secp256k1)
|
||||||
add_subdirectory(external/ed25519-donna)
|
add_subdirectory(external/ed25519-donna)
|
||||||
@@ -107,16 +112,16 @@ option(rocksdb "Enable RocksDB" ON)
|
|||||||
if(rocksdb)
|
if(rocksdb)
|
||||||
find_package(RocksDB REQUIRED)
|
find_package(RocksDB REQUIRED)
|
||||||
set_target_properties(RocksDB::rocksdb PROPERTIES
|
set_target_properties(RocksDB::rocksdb PROPERTIES
|
||||||
INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1
|
INTERFACE_COMPILE_DEFINITIONS XRPL_ROCKSDB_AVAILABLE=1
|
||||||
)
|
)
|
||||||
target_link_libraries(ripple_libs INTERFACE RocksDB::rocksdb)
|
target_link_libraries(xrpl_libs INTERFACE RocksDB::rocksdb)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
find_package(nudb REQUIRED)
|
find_package(nudb REQUIRED)
|
||||||
find_package(date REQUIRED)
|
find_package(date REQUIRED)
|
||||||
find_package(xxHash REQUIRED)
|
find_package(xxHash REQUIRED)
|
||||||
|
|
||||||
target_link_libraries(ripple_libs INTERFACE
|
target_link_libraries(xrpl_libs INTERFACE
|
||||||
ed25519::ed25519
|
ed25519::ed25519
|
||||||
lz4::lz4
|
lz4::lz4
|
||||||
OpenSSL::Crypto
|
OpenSSL::Crypto
|
||||||
@@ -134,13 +139,18 @@ elseif(TARGET NuDB::nudb)
|
|||||||
else()
|
else()
|
||||||
message(FATAL_ERROR "unknown nudb target")
|
message(FATAL_ERROR "unknown nudb target")
|
||||||
endif()
|
endif()
|
||||||
target_link_libraries(ripple_libs INTERFACE ${nudb})
|
target_link_libraries(xrpl_libs INTERFACE ${nudb})
|
||||||
|
|
||||||
if(coverage)
|
if(coverage)
|
||||||
include(RippledCov)
|
include(XrplCov)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
set(PROJECT_EXPORT_SET RippleExports)
|
set(PROJECT_EXPORT_SET XrplExports)
|
||||||
include(RippledCore)
|
include(XrplCore)
|
||||||
include(RippledInstall)
|
include(XrplInstall)
|
||||||
include(RippledValidatorKeys)
|
include(XrplValidatorKeys)
|
||||||
|
|
||||||
|
if(tests)
|
||||||
|
include(CTest)
|
||||||
|
add_subdirectory(src/tests/libxrpl)
|
||||||
|
endif()
|
||||||
|
|||||||
290
CONTRIBUTING.md
290
CONTRIBUTING.md
@@ -8,13 +8,12 @@ We assume you are familiar with the general practice of [making
|
|||||||
contributions on GitHub][contrib]. This file includes only special
|
contributions on GitHub][contrib]. This file includes only special
|
||||||
instructions specific to this project.
|
instructions specific to this project.
|
||||||
|
|
||||||
|
|
||||||
## Before you start
|
## Before you start
|
||||||
|
|
||||||
The following branches exist in the main project repository:
|
The following branches exist in the main project repository:
|
||||||
|
|
||||||
- `develop`: The latest set of unreleased features, and the most common
|
- `develop`: The latest set of unreleased features, and the most common
|
||||||
starting point for contributions.
|
starting point for contributions.
|
||||||
- `release`: The latest beta release or release candidate.
|
- `release`: The latest beta release or release candidate.
|
||||||
- `master`: The latest stable release.
|
- `master`: The latest stable release.
|
||||||
- `gh-pages`: The documentation for this project, built by Doxygen.
|
- `gh-pages`: The documentation for this project, built by Doxygen.
|
||||||
@@ -27,18 +26,18 @@ In general, external contributions should be developed in your personal
|
|||||||
[fork][forking]. Contributions from developers with write permissions
|
[fork][forking]. Contributions from developers with write permissions
|
||||||
should be done in [the main repository][rippled] in a branch with
|
should be done in [the main repository][rippled] in a branch with
|
||||||
a permitted prefix. Permitted prefixes are:
|
a permitted prefix. Permitted prefixes are:
|
||||||
* XLS-[a-zA-Z0-9]+/.+
|
|
||||||
* e.g. XLS-0033d/mpt-clarify-STEitherAmount
|
|
||||||
* [GitHub username]/.+
|
|
||||||
* e.g. JoelKatz/fix-rpc-webhook-queue
|
|
||||||
* [Organization name]/.+
|
|
||||||
* e.g. ripple/antithesis
|
|
||||||
|
|
||||||
Regardless of where the branch is created, please open a *draft* pull
|
- XLS-[a-zA-Z0-9]+/.+
|
||||||
|
- e.g. XLS-0033d/mpt-clarify-STEitherAmount
|
||||||
|
- [GitHub username]/.+
|
||||||
|
- e.g. JoelKatz/fix-rpc-webhook-queue
|
||||||
|
- [Organization name]/.+
|
||||||
|
- e.g. ripple/antithesis
|
||||||
|
|
||||||
|
Regardless of where the branch is created, please open a _draft_ pull
|
||||||
request as soon as possible after pushing the branch to Github, to
|
request as soon as possible after pushing the branch to Github, to
|
||||||
increase visibility, and ease feedback during the development process.
|
increase visibility, and ease feedback during the development process.
|
||||||
|
|
||||||
|
|
||||||
## Major contributions
|
## Major contributions
|
||||||
|
|
||||||
If your contribution is a major feature or breaking change, then you
|
If your contribution is a major feature or breaking change, then you
|
||||||
@@ -55,8 +54,8 @@ responsibility of the XLS author to update the draft to match the final
|
|||||||
implementation when its corresponding pull request is merged, unless the
|
implementation when its corresponding pull request is merged, unless the
|
||||||
author delegates that responsibility to others.
|
author delegates that responsibility to others.
|
||||||
|
|
||||||
|
|
||||||
## Before making a pull request
|
## Before making a pull request
|
||||||
|
|
||||||
(Or marking a draft pull request as ready.)
|
(Or marking a draft pull request as ready.)
|
||||||
|
|
||||||
Changes that alter transaction processing must be guarded by an
|
Changes that alter transaction processing must be guarded by an
|
||||||
@@ -73,30 +72,32 @@ automatic test run by `rippled --unittest`.
|
|||||||
Otherwise, it must be a manual test.
|
Otherwise, it must be a manual test.
|
||||||
|
|
||||||
If you create new source files, they must be organized as follows:
|
If you create new source files, they must be organized as follows:
|
||||||
* If the files are in any of the `libxrpl` modules, the headers (`.h`) must go
|
|
||||||
|
- If the files are in any of the `libxrpl` modules, the headers (`.h`) must go
|
||||||
under `include/xrpl`, and source (`.cpp`) files must go under
|
under `include/xrpl`, and source (`.cpp`) files must go under
|
||||||
`src/libxrpl`.
|
`src/libxrpl`.
|
||||||
* All other non-test files must go under `src/xrpld`.
|
- All other non-test files must go under `src/xrpld`.
|
||||||
* All test source files must go under `src/test`.
|
- All test source files must go under `src/test`.
|
||||||
|
|
||||||
The source must be formatted according to the style guide below.
|
The source must be formatted according to the style guide below.
|
||||||
|
|
||||||
Header includes must be [levelized](./Builds/levelization).
|
Header includes must be [levelized](.github/scripts/levelization).
|
||||||
|
|
||||||
Changes should be usually squashed down into a single commit.
|
Changes should be usually squashed down into a single commit.
|
||||||
Some larger or more complicated change sets make more sense,
|
Some larger or more complicated change sets make more sense,
|
||||||
and are easier to review if organized into multiple logical commits.
|
and are easier to review if organized into multiple logical commits.
|
||||||
Either way, all commits should fit the following criteria:
|
Either way, all commits should fit the following criteria:
|
||||||
* Changes should be presented in a single commit or a logical
|
|
||||||
|
- Changes should be presented in a single commit or a logical
|
||||||
sequence of commits.
|
sequence of commits.
|
||||||
Specifically, chronological commits that simply
|
Specifically, chronological commits that simply
|
||||||
reflect the history of how the author implemented
|
reflect the history of how the author implemented
|
||||||
the change, "warts and all", are not useful to
|
the change, "warts and all", are not useful to
|
||||||
reviewers.
|
reviewers.
|
||||||
* Every commit should have a [good message](#good-commit-messages).
|
- Every commit should have a [good message](#good-commit-messages).
|
||||||
to explain a specific aspects of the change.
|
to explain a specific aspects of the change.
|
||||||
* Every commit should be signed.
|
- Every commit should be signed.
|
||||||
* Every commit should be well-formed (builds successfully,
|
- Every commit should be well-formed (builds successfully,
|
||||||
unit tests passing), as this helps to resolve merge
|
unit tests passing), as this helps to resolve merge
|
||||||
conflicts, and makes it easier to use `git bisect`
|
conflicts, and makes it easier to use `git bisect`
|
||||||
to find bugs.
|
to find bugs.
|
||||||
@@ -108,13 +109,14 @@ Refer to
|
|||||||
for general rules on writing a good commit message.
|
for general rules on writing a good commit message.
|
||||||
|
|
||||||
tl;dr
|
tl;dr
|
||||||
|
|
||||||
> 1. Separate subject from body with a blank line.
|
> 1. Separate subject from body with a blank line.
|
||||||
> 2. Limit the subject line to 50 characters.
|
> 2. Limit the subject line to 50 characters.
|
||||||
> * [...]shoot for 50 characters, but consider 72 the hard limit.
|
> - [...]shoot for 50 characters, but consider 72 the hard limit.
|
||||||
> 3. Capitalize the subject line.
|
> 3. Capitalize the subject line.
|
||||||
> 4. Do not end the subject line with a period.
|
> 4. Do not end the subject line with a period.
|
||||||
> 5. Use the imperative mood in the subject line.
|
> 5. Use the imperative mood in the subject line.
|
||||||
> * A properly formed Git commit subject line should always be able
|
> - A properly formed Git commit subject line should always be able
|
||||||
> to complete the following sentence: "If applied, this commit will
|
> to complete the following sentence: "If applied, this commit will
|
||||||
> _your subject line here_".
|
> _your subject line here_".
|
||||||
> 6. Wrap the body at 72 characters.
|
> 6. Wrap the body at 72 characters.
|
||||||
@@ -122,16 +124,17 @@ tl;dr
|
|||||||
|
|
||||||
In addition to those guidelines, please add one of the following
|
In addition to those guidelines, please add one of the following
|
||||||
prefixes to the subject line if appropriate.
|
prefixes to the subject line if appropriate.
|
||||||
* `fix:` - The primary purpose is to fix an existing bug.
|
|
||||||
* `perf:` - The primary purpose is performance improvements.
|
- `fix:` - The primary purpose is to fix an existing bug.
|
||||||
* `refactor:` - The changes refactor code without affecting
|
- `perf:` - The primary purpose is performance improvements.
|
||||||
|
- `refactor:` - The changes refactor code without affecting
|
||||||
functionality.
|
functionality.
|
||||||
* `test:` - The changes _only_ affect unit tests.
|
- `test:` - The changes _only_ affect unit tests.
|
||||||
* `docs:` - The changes _only_ affect documentation. This can
|
- `docs:` - The changes _only_ affect documentation. This can
|
||||||
include code comments in addition to `.md` files like this one.
|
include code comments in addition to `.md` files like this one.
|
||||||
* `build:` - The changes _only_ affect the build process,
|
- `build:` - The changes _only_ affect the build process,
|
||||||
including CMake and/or Conan settings.
|
including CMake and/or Conan settings.
|
||||||
* `chore:` - Other tasks that don't affect the binary, but don't fit
|
- `chore:` - Other tasks that don't affect the binary, but don't fit
|
||||||
any of the other cases. e.g. formatting, git settings, updating
|
any of the other cases. e.g. formatting, git settings, updating
|
||||||
Github Actions jobs.
|
Github Actions jobs.
|
||||||
|
|
||||||
@@ -143,9 +146,10 @@ unit tests for Feature X (#1234)`.
|
|||||||
|
|
||||||
In general, pull requests use `develop` as the base branch.
|
In general, pull requests use `develop` as the base branch.
|
||||||
The exceptions are
|
The exceptions are
|
||||||
* Fixes and improvements to a release candidate use `release` as the
|
|
||||||
|
- Fixes and improvements to a release candidate use `release` as the
|
||||||
base.
|
base.
|
||||||
* Hotfixes use `master` as the base.
|
- Hotfixes use `master` as the base.
|
||||||
|
|
||||||
If your changes are not quite ready, but you want to make it easily available
|
If your changes are not quite ready, but you want to make it easily available
|
||||||
for preliminary examination or review, you can create a "Draft" pull request.
|
for preliminary examination or review, you can create a "Draft" pull request.
|
||||||
@@ -182,11 +186,11 @@ meets a few criteria:
|
|||||||
2. All CI checks must be complete and passed. (One-off failures may
|
2. All CI checks must be complete and passed. (One-off failures may
|
||||||
be acceptable if they are related to a known issue.)
|
be acceptable if they are related to a known issue.)
|
||||||
3. The PR must have a [good commit message](#good-commit-messages).
|
3. The PR must have a [good commit message](#good-commit-messages).
|
||||||
* If the PR started with a good commit message, and it doesn't
|
- If the PR started with a good commit message, and it doesn't
|
||||||
need to be updated, the author can indicate that in a comment.
|
need to be updated, the author can indicate that in a comment.
|
||||||
* Any contributor, preferably the author, can leave a comment
|
- Any contributor, preferably the author, can leave a comment
|
||||||
suggesting a commit message.
|
suggesting a commit message.
|
||||||
* If the author squashes and rebases the code in preparation for
|
- If the author squashes and rebases the code in preparation for
|
||||||
merge, they should also ensure the commit message(s) are updated
|
merge, they should also ensure the commit message(s) are updated
|
||||||
as well.
|
as well.
|
||||||
4. The PR branch must be up to date with the base branch (usually
|
4. The PR branch must be up to date with the base branch (usually
|
||||||
@@ -208,7 +212,6 @@ This is a non-exhaustive list of recommended style guidelines. These are
|
|||||||
not always strictly enforced and serve as a way to keep the codebase
|
not always strictly enforced and serve as a way to keep the codebase
|
||||||
coherent rather than a set of _thou shalt not_ commandments.
|
coherent rather than a set of _thou shalt not_ commandments.
|
||||||
|
|
||||||
|
|
||||||
## Formatting
|
## Formatting
|
||||||
|
|
||||||
All code must conform to `clang-format` version 18,
|
All code must conform to `clang-format` version 18,
|
||||||
@@ -237,6 +240,7 @@ To download the patch file:
|
|||||||
5. Commit and push.
|
5. Commit and push.
|
||||||
|
|
||||||
You can install a pre-commit hook to automatically run `clang-format` before every commit:
|
You can install a pre-commit hook to automatically run `clang-format` before every commit:
|
||||||
|
|
||||||
```
|
```
|
||||||
pip3 install pre-commit
|
pip3 install pre-commit
|
||||||
pre-commit install
|
pre-commit install
|
||||||
@@ -267,49 +271,51 @@ locations, where the reporting of contract violations on the Antithesis
|
|||||||
platform is either not possible or not useful.
|
platform is either not possible or not useful.
|
||||||
|
|
||||||
For this reason:
|
For this reason:
|
||||||
* The locations where `assert` or `assert(false)` contracts should continue to be used:
|
|
||||||
* `constexpr` functions
|
- The locations where `assert` or `assert(false)` contracts should continue to be used:
|
||||||
* unit tests i.e. files under `src/test`
|
- `constexpr` functions
|
||||||
* unit tests-related modules (files under `beast/test` and `beast/unit_test`)
|
- unit tests i.e. files under `src/test`
|
||||||
* Outside of the listed locations, do not use `assert`; use `XRPL_ASSERT` instead,
|
- unit tests-related modules (files under `beast/test` and `beast/unit_test`)
|
||||||
|
- Outside of the listed locations, do not use `assert`; use `XRPL_ASSERT` instead,
|
||||||
giving it unique name, with the short description of the contract.
|
giving it unique name, with the short description of the contract.
|
||||||
* Outside of the listed locations, do not use `assert(false)`; use
|
- Outside of the listed locations, do not use `assert(false)`; use
|
||||||
`UNREACHABLE` instead, giving it unique name, with the description of the
|
`UNREACHABLE` instead, giving it unique name, with the description of the
|
||||||
condition being violated
|
condition being violated
|
||||||
* The contract name should start with a full name (including scope) of the
|
- The contract name should start with a full name (including scope) of the
|
||||||
function, optionally a named lambda, followed by a colon ` : ` and a brief
|
function, optionally a named lambda, followed by a colon `:` and a brief
|
||||||
(typically at most five words) description. `UNREACHABLE` contracts
|
(typically at most five words) description. `UNREACHABLE` contracts
|
||||||
can use slightly longer descriptions. If there are multiple overloads of the
|
can use slightly longer descriptions. If there are multiple overloads of the
|
||||||
function, use common sense to balance both brevity and unambiguity of the
|
function, use common sense to balance both brevity and unambiguity of the
|
||||||
function name. NOTE: the purpose of name is to provide stable means of
|
function name. NOTE: the purpose of name is to provide stable means of
|
||||||
unique identification of every contract; for this reason try to avoid elements
|
unique identification of every contract; for this reason try to avoid elements
|
||||||
which can change in some obvious refactors or when reinforcing the condition.
|
which can change in some obvious refactors or when reinforcing the condition.
|
||||||
* Contract description typically (except for `UNREACHABLE`) should describe the
|
- Contract description typically (except for `UNREACHABLE`) should describe the
|
||||||
_expected_ condition, as in "I assert that _expected_ is true".
|
_expected_ condition, as in "I assert that _expected_ is true".
|
||||||
* Contract description for `UNREACHABLE` should describe the _unexpected_
|
- Contract description for `UNREACHABLE` should describe the _unexpected_
|
||||||
situation which caused the line to have been reached.
|
situation which caused the line to have been reached.
|
||||||
* Example good name for an
|
- Example good name for an
|
||||||
`UNREACHABLE` macro `"Json::operator==(Value, Value) : invalid type"`; example
|
`UNREACHABLE` macro `"Json::operator==(Value, Value) : invalid type"`; example
|
||||||
good name for an `XRPL_ASSERT` macro `"Json::Value::asCString : valid type"`.
|
good name for an `XRPL_ASSERT` macro `"Json::Value::asCString : valid type"`.
|
||||||
* Example **bad** name
|
- Example **bad** name
|
||||||
`"RFC1751::insert(char* s, int x, int start, int length) : length is greater than or equal zero"`
|
`"RFC1751::insert(char* s, int x, int start, int length) : length is greater than or equal zero"`
|
||||||
(missing namespace, unnecessary full function signature, description too verbose).
|
(missing namespace, unnecessary full function signature, description too verbose).
|
||||||
Good name: `"ripple::RFC1751::insert : minimum length"`.
|
Good name: `"ripple::RFC1751::insert : minimum length"`.
|
||||||
* In **few** well-justified cases a non-standard name can be used, in which case a
|
- In **few** well-justified cases a non-standard name can be used, in which case a
|
||||||
comment should be placed to explain the rationale (example in `contract.cpp`)
|
comment should be placed to explain the rationale (example in `contract.cpp`)
|
||||||
* Do **not** rename a contract without a good reason (e.g. the name no longer
|
- Do **not** rename a contract without a good reason (e.g. the name no longer
|
||||||
reflects the location or the condition being checked)
|
reflects the location or the condition being checked)
|
||||||
* Do not use `std::unreachable`
|
- Do not use `std::unreachable`
|
||||||
* Do not put contracts where they can be violated by an external condition
|
- Do not put contracts where they can be violated by an external condition
|
||||||
(e.g. timing, data payload before mandatory validation etc.) as this creates
|
(e.g. timing, data payload before mandatory validation etc.) as this creates
|
||||||
bogus bug reports (and causes crashes of Debug builds)
|
bogus bug reports (and causes crashes of Debug builds)
|
||||||
|
|
||||||
## Unit Tests
|
## Unit Tests
|
||||||
|
|
||||||
To execute all unit tests:
|
To execute all unit tests:
|
||||||
|
|
||||||
```rippled --unittest --unittest-jobs=<number of cores>```
|
`rippled --unittest --unittest-jobs=<number of cores>`
|
||||||
|
|
||||||
(Note: Using multiple cores on a Mac M1 can cause spurious test failures. The
|
(Note: Using multiple cores on a Mac M1 can cause spurious test failures. The
|
||||||
cause is still under investigation. If you observe this problem, try specifying fewer jobs.)
|
cause is still under investigation. If you observe this problem, try specifying fewer jobs.)
|
||||||
|
|
||||||
To run a specific set of test suites:
|
To run a specific set of test suites:
|
||||||
@@ -317,10 +323,11 @@ To run a specific set of test suites:
|
|||||||
```
|
```
|
||||||
rippled --unittest TestSuiteName
|
rippled --unittest TestSuiteName
|
||||||
```
|
```
|
||||||
|
|
||||||
Note: In this example, all tests with prefix `TestSuiteName` will be run, so if
|
Note: In this example, all tests with prefix `TestSuiteName` will be run, so if
|
||||||
`TestSuiteName1` and `TestSuiteName2` both exist, then both tests will run.
|
`TestSuiteName1` and `TestSuiteName2` both exist, then both tests will run.
|
||||||
Alternatively, if the unit test name finds an exact match, it will stop
|
Alternatively, if the unit test name finds an exact match, it will stop
|
||||||
doing partial matches, i.e. if a unit test with a title of `TestSuiteName`
|
doing partial matches, i.e. if a unit test with a title of `TestSuiteName`
|
||||||
exists, then no other unit test will be executed, apart from `TestSuiteName`.
|
exists, then no other unit test will be executed, apart from `TestSuiteName`.
|
||||||
|
|
||||||
## Avoid
|
## Avoid
|
||||||
@@ -336,7 +343,6 @@ exists, then no other unit test will be executed, apart from `TestSuiteName`.
|
|||||||
explanatory comments.
|
explanatory comments.
|
||||||
8. Importing new libraries unless there is a very good reason to do so.
|
8. Importing new libraries unless there is a very good reason to do so.
|
||||||
|
|
||||||
|
|
||||||
## Seek to
|
## Seek to
|
||||||
|
|
||||||
9. Extend functionality of existing code rather than creating new code.
|
9. Extend functionality of existing code rather than creating new code.
|
||||||
@@ -351,14 +357,12 @@ exists, then no other unit test will be executed, apart from `TestSuiteName`.
|
|||||||
14. Provide as many comments as you feel that a competent programmer
|
14. Provide as many comments as you feel that a competent programmer
|
||||||
would need to understand what your code does.
|
would need to understand what your code does.
|
||||||
|
|
||||||
|
|
||||||
# Maintainers
|
# Maintainers
|
||||||
|
|
||||||
Maintainers are ecosystem participants with elevated access to the repository.
|
Maintainers are ecosystem participants with elevated access to the repository.
|
||||||
They are able to push new code, make decisions on when a release should be
|
They are able to push new code, make decisions on when a release should be
|
||||||
made, etc.
|
made, etc.
|
||||||
|
|
||||||
|
|
||||||
## Adding and removing
|
## Adding and removing
|
||||||
|
|
||||||
New maintainers can be proposed by two existing maintainers, subject to a vote
|
New maintainers can be proposed by two existing maintainers, subject to a vote
|
||||||
@@ -373,47 +377,41 @@ A minimum of 60% agreement and 50% participation are required.
|
|||||||
The XRP Ledger Foundation will have the ability, for cause, to remove an
|
The XRP Ledger Foundation will have the ability, for cause, to remove an
|
||||||
existing maintainer without a vote.
|
existing maintainer without a vote.
|
||||||
|
|
||||||
|
|
||||||
## Current Maintainers
|
## Current Maintainers
|
||||||
|
|
||||||
Maintainers are users with maintain or admin access to the repo.
|
Maintainers are users with maintain or admin access to the repo.
|
||||||
|
|
||||||
* [bthomee](https://github.com/bthomee) (Ripple)
|
- [bthomee](https://github.com/bthomee) (Ripple)
|
||||||
* [intelliot](https://github.com/intelliot) (Ripple)
|
- [intelliot](https://github.com/intelliot) (Ripple)
|
||||||
* [JoelKatz](https://github.com/JoelKatz) (Ripple)
|
- [JoelKatz](https://github.com/JoelKatz) (Ripple)
|
||||||
* [nixer89](https://github.com/nixer89) (XRP Ledger Foundation)
|
- [legleux](https://github.com/legleux) (Ripple)
|
||||||
* [RichardAH](https://github.com/RichardAH) (XRP Ledger Foundation)
|
- [mankins](https://github.com/mankins) (XRP Ledger Foundation)
|
||||||
* [Silkjaer](https://github.com/Silkjaer) (XRP Ledger Foundation)
|
- [WietseWind](https://github.com/WietseWind) (XRPL Labs + XRP Ledger Foundation)
|
||||||
* [WietseWind](https://github.com/WietseWind) (XRPL Labs + XRP Ledger Foundation)
|
- [ximinez](https://github.com/ximinez) (Ripple)
|
||||||
* [ximinez](https://github.com/ximinez) (Ripple)
|
|
||||||
|
|
||||||
|
|
||||||
## Current Code Reviewers
|
## Current Code Reviewers
|
||||||
|
|
||||||
Code Reviewers are developers who have the ability to review, approve, and
|
Code Reviewers are developers who have the ability to review, approve, and
|
||||||
in some cases merge source code changes.
|
in some cases merge source code changes.
|
||||||
|
|
||||||
* [HowardHinnant](https://github.com/HowardHinnant) (Ripple)
|
- [a1q123456](https://github.com/a1q123456) (Ripple)
|
||||||
* [scottschurr](https://github.com/scottschurr) (Ripple)
|
- [Bronek](https://github.com/Bronek) (Ripple)
|
||||||
* [seelabs](https://github.com/seelabs) (Ripple)
|
- [bthomee](https://github.com/bthomee) (Ripple)
|
||||||
* [Ed Hennis](https://github.com/ximinez) (Ripple)
|
- [ckeshava](https://github.com/ckeshava) (Ripple)
|
||||||
* [mvadari](https://github.com/mvadari) (Ripple)
|
- [dangell7](https://github.com/dangell7) (XRPL Labs)
|
||||||
* [thejohnfreeman](https://github.com/thejohnfreeman) (Ripple)
|
- [godexsoft](https://github.com/godexsoft) (Ripple)
|
||||||
* [Bronek](https://github.com/Bronek) (Ripple)
|
- [gregtatcam](https://github.com/gregtatcam) (Ripple)
|
||||||
* [manojsdoshi](https://github.com/manojsdoshi) (Ripple)
|
- [kuznetsss](https://github.com/kuznetsss) (Ripple)
|
||||||
* [godexsoft](https://github.com/godexsoft) (Ripple)
|
- [lmaisons](https://github.com/lmaisons) (Ripple)
|
||||||
* [mDuo13](https://github.com/mDuo13) (Ripple)
|
- [mathbunnyru](https://github.com/mathbunnyru) (Ripple)
|
||||||
* [ckniffen](https://github.com/ckniffen) (Ripple)
|
- [mvadari](https://github.com/mvadari) (Ripple)
|
||||||
* [arihantkothari](https://github.com/arihantkothari) (Ripple)
|
- [oleks-rip](https://github.com/oleks-rip) (Ripple)
|
||||||
* [pwang200](https://github.com/pwang200) (Ripple)
|
- [PeterChen13579](https://github.com/PeterChen13579) (Ripple)
|
||||||
* [sophiax851](https://github.com/sophiax851) (Ripple)
|
- [pwang200](https://github.com/pwang200) (Ripple)
|
||||||
* [shawnxie999](https://github.com/shawnxie999) (Ripple)
|
- [q73zhao](https://github.com/q73zhao) (Ripple)
|
||||||
* [gregtatcam](https://github.com/gregtatcam) (Ripple)
|
- [shawnxie999](https://github.com/shawnxie999) (Ripple)
|
||||||
* [mtrippled](https://github.com/mtrippled) (Ripple)
|
- [Tapanito](https://github.com/Tapanito) (Ripple)
|
||||||
* [ckeshava](https://github.com/ckeshava) (Ripple)
|
- [ximinez](https://github.com/ximinez) (Ripple)
|
||||||
* [nbougalis](https://github.com/nbougalis) None
|
|
||||||
* [RichardAH](https://github.com/RichardAH) (XRPL Labs + XRP Ledger Foundation)
|
|
||||||
* [dangell7](https://github.com/dangell7) (XRPL Labs)
|
|
||||||
|
|
||||||
Developers not on this list are able and encouraged to submit feedback
|
Developers not on this list are able and encouraged to submit feedback
|
||||||
on pending code changes (open pull requests).
|
on pending code changes (open pull requests).
|
||||||
@@ -423,6 +421,7 @@ on pending code changes (open pull requests).
|
|||||||
These instructions assume you have your git upstream remotes configured
|
These instructions assume you have your git upstream remotes configured
|
||||||
to avoid accidental pushes to the main repo, and a remote group
|
to avoid accidental pushes to the main repo, and a remote group
|
||||||
specifying both of them. e.g.
|
specifying both of them. e.g.
|
||||||
|
|
||||||
```
|
```
|
||||||
$ git remote -v | grep upstream
|
$ git remote -v | grep upstream
|
||||||
upstream https://github.com/XRPLF/rippled.git (fetch)
|
upstream https://github.com/XRPLF/rippled.git (fetch)
|
||||||
@@ -437,6 +436,7 @@ upstream upstream-push
|
|||||||
You can use the [setup-upstreams] script to set this up.
|
You can use the [setup-upstreams] script to set this up.
|
||||||
|
|
||||||
It also assumes you have a default gpg signing key set up in git. e.g.
|
It also assumes you have a default gpg signing key set up in git. e.g.
|
||||||
|
|
||||||
```
|
```
|
||||||
$ git config user.signingkey
|
$ git config user.signingkey
|
||||||
968479A1AFF927E37D1A566BB5690EEEBB952194
|
968479A1AFF927E37D1A566BB5690EEEBB952194
|
||||||
@@ -461,8 +461,8 @@ the suggested commit message, or modify it as needed.
|
|||||||
#### Slightly more complicated pull requests
|
#### Slightly more complicated pull requests
|
||||||
|
|
||||||
Some pull requests need to be pushed to `develop` as more than one
|
Some pull requests need to be pushed to `develop` as more than one
|
||||||
commit. A PR author may *request* to merge as separate commits. They
|
commit. A PR author may _request_ to merge as separate commits. They
|
||||||
must *justify* why separate commits are needed, and *specify* how they
|
must _justify_ why separate commits are needed, and _specify_ how they
|
||||||
would like the commits to be merged. If you disagree with the author,
|
would like the commits to be merged. If you disagree with the author,
|
||||||
discuss it with them directly.
|
discuss it with them directly.
|
||||||
|
|
||||||
@@ -471,20 +471,22 @@ fast forward only merge (`--ff-only`) on the command line and push to
|
|||||||
`develop`.
|
`develop`.
|
||||||
|
|
||||||
Some examples of when separate commits are worthwhile are:
|
Some examples of when separate commits are worthwhile are:
|
||||||
|
|
||||||
1. PRs where source files are reorganized in multiple steps.
|
1. PRs where source files are reorganized in multiple steps.
|
||||||
2. PRs where the commits are mostly independent and *could* be separate
|
2. PRs where the commits are mostly independent and _could_ be separate
|
||||||
PRs, but are pulled together into one PR under a commit theme or
|
PRs, but are pulled together into one PR under a commit theme or
|
||||||
issue.
|
issue.
|
||||||
3. PRs that are complicated enough that `git bisect` would not be much
|
3. PRs that are complicated enough that `git bisect` would not be much
|
||||||
help if it determined this PR introduced a problem.
|
help if it determined this PR introduced a problem.
|
||||||
|
|
||||||
Either way, check that:
|
Either way, check that:
|
||||||
* The commits are based on the current tip of `develop`.
|
|
||||||
* The commits are clean: No merge commits (except when reverse
|
- The commits are based on the current tip of `develop`.
|
||||||
|
- The commits are clean: No merge commits (except when reverse
|
||||||
merging), no "[FOLD]" or "fixup!" messages.
|
merging), no "[FOLD]" or "fixup!" messages.
|
||||||
* All commits are signed. If the commits are not signed by the author, use
|
- All commits are signed. If the commits are not signed by the author, use
|
||||||
`git commit --amend -S` to sign them yourself.
|
`git commit --amend -S` to sign them yourself.
|
||||||
* At least one (but preferably all) of the commits has the PR number
|
- At least one (but preferably all) of the commits has the PR number
|
||||||
in the commit message.
|
in the commit message.
|
||||||
|
|
||||||
The "Create a merge commit" and "Rebase and merge" options should be
|
The "Create a merge commit" and "Rebase and merge" options should be
|
||||||
@@ -502,13 +504,13 @@ Rippled uses a linear workflow model that can be summarized as:
|
|||||||
1. In between releases, developers work against the `develop` branch.
|
1. In between releases, developers work against the `develop` branch.
|
||||||
2. Periodically, a maintainer will build and tag a beta version from
|
2. Periodically, a maintainer will build and tag a beta version from
|
||||||
`develop`, which is pushed to `release`.
|
`develop`, which is pushed to `release`.
|
||||||
* Betas are usually released every two to three weeks, though that
|
- Betas are usually released every two to three weeks, though that
|
||||||
schedule can vary depending on progress, availability, and other
|
schedule can vary depending on progress, availability, and other
|
||||||
factors.
|
factors.
|
||||||
3. When the changes in `develop` are considered stable and mature enough
|
3. When the changes in `develop` are considered stable and mature enough
|
||||||
to be ready to release, a release candidate (RC) is built and tagged
|
to be ready to release, a release candidate (RC) is built and tagged
|
||||||
from `develop`, and merged to `release`.
|
from `develop`, and merged to `release`.
|
||||||
* Further development for that release (primarily fixes) then
|
- Further development for that release (primarily fixes) then
|
||||||
continues against `release`, while other development continues on
|
continues against `release`, while other development continues on
|
||||||
`develop`. Effectively, `release` is forked from `develop`. Changes
|
`develop`. Effectively, `release` is forked from `develop`. Changes
|
||||||
to `release` must be reverse merged to `develop`.
|
to `release` must be reverse merged to `develop`.
|
||||||
@@ -543,6 +545,7 @@ Rippled uses a linear workflow model that can be summarized as:
|
|||||||
the version number, etc.
|
the version number, etc.
|
||||||
|
|
||||||
The workflow may look something like:
|
The workflow may look something like:
|
||||||
|
|
||||||
```
|
```
|
||||||
git fetch --multiple upstreams user1 user2 user3 [...]
|
git fetch --multiple upstreams user1 user2 user3 [...]
|
||||||
git checkout -B release-next --no-track upstream/develop
|
git checkout -B release-next --no-track upstream/develop
|
||||||
@@ -581,8 +584,9 @@ This includes, betas, and the first release candidate (RC).
|
|||||||
|
|
||||||
1. If you didn't create one [preparing the `develop`
|
1. If you didn't create one [preparing the `develop`
|
||||||
branch](#preparing-the-develop-branch), Ensure there is no old
|
branch](#preparing-the-develop-branch), Ensure there is no old
|
||||||
`release-next` branch hanging around. Then make a `release-next`
|
`release-next` branch hanging around. Then make a `release-next`
|
||||||
branch that only changes the version number. e.g.
|
branch that only changes the version number. e.g.
|
||||||
|
|
||||||
```
|
```
|
||||||
git fetch upstreams
|
git fetch upstreams
|
||||||
|
|
||||||
@@ -603,25 +607,30 @@ git push upstream-push
|
|||||||
git fetch upstreams
|
git fetch upstreams
|
||||||
git branch --set-upstream-to=upstream/release-next
|
git branch --set-upstream-to=upstream/release-next
|
||||||
```
|
```
|
||||||
You can also use the [update-version] script.
|
|
||||||
2. Create a Pull Request for `release-next` with **`develop`** as
|
You can also use the [update-version] script. 2. Create a Pull Request for `release-next` with **`develop`** as
|
||||||
the base branch.
|
the base branch.
|
||||||
1. Use the title "[TRIVIAL] Set version to X.X.X-bX".
|
|
||||||
2. Instead of the default description template, use the following:
|
1. Use the title "[TRIVIAL] Set version to X.X.X-bX".
|
||||||
|
2. Instead of the default description template, use the following:
|
||||||
|
|
||||||
```
|
```
|
||||||
## High Level Overview of Change
|
## High Level Overview of Change
|
||||||
|
|
||||||
This PR only changes the version number. It will be merged as
|
This PR only changes the version number. It will be merged as
|
||||||
soon as Github CI actions successfully complete.
|
soon as Github CI actions successfully complete.
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Wait for CI to successfully complete, and get someone to approve
|
3. Wait for CI to successfully complete, and get someone to approve
|
||||||
the PR. (It is safe to ignore known CI issues.)
|
the PR. (It is safe to ignore known CI issues.)
|
||||||
4. Push the updated `develop` branch using your `release-next`
|
4. Push the updated `develop` branch using your `release-next`
|
||||||
branch. **Do not use the Github UI. It's important to preserve
|
branch. **Do not use the Github UI. It's important to preserve
|
||||||
commit IDs.**
|
commit IDs.**
|
||||||
|
|
||||||
```
|
```
|
||||||
git push upstream-push release-next:develop
|
git push upstream-push release-next:develop
|
||||||
```
|
```
|
||||||
|
|
||||||
5. In the unlikely event that the push fails because someone has merged
|
5. In the unlikely event that the push fails because someone has merged
|
||||||
something else in the meantime, rebase your branch onto the updated
|
something else in the meantime, rebase your branch onto the updated
|
||||||
`develop` branch, push again, and go back to step 3.
|
`develop` branch, push again, and go back to step 3.
|
||||||
@@ -630,22 +639,25 @@ git push upstream-push release-next:develop
|
|||||||
7. Once this is done, forward progress on `develop` can continue
|
7. Once this is done, forward progress on `develop` can continue
|
||||||
(other PRs may be merged).
|
(other PRs may be merged).
|
||||||
8. Now create a Pull Request for `release-next` with **`release`** as
|
8. Now create a Pull Request for `release-next` with **`release`** as
|
||||||
the base branch. Instead of the default template, reuse and update
|
the base branch. Instead of the default template, reuse and update
|
||||||
the message from the previous release. Include the following verbiage
|
the message from the previous release. Include the following verbiage
|
||||||
somewhere in the description:
|
somewhere in the description:
|
||||||
|
|
||||||
```
|
```
|
||||||
The base branch is `release`. [All releases (including
|
The base branch is `release`. [All releases (including
|
||||||
betas)](https://github.com/XRPLF/rippled/blob/develop/CONTRIBUTING.md#before-you-start)
|
betas)](https://github.com/XRPLF/rippled/blob/develop/CONTRIBUTING.md#before-you-start)
|
||||||
go in `release`. This PR branch will be pushed directly to `release` (not
|
go in `release`. This PR branch will be pushed directly to `release` (not
|
||||||
squashed or rebased, and not using the GitHub UI).
|
squashed or rebased, and not using the GitHub UI).
|
||||||
```
|
```
|
||||||
|
|
||||||
7. Sign-offs for the three platforms (Linux, Mac, Windows) usually occur
|
7. Sign-offs for the three platforms (Linux, Mac, Windows) usually occur
|
||||||
offline, but at least one approval will be needed on the PR.
|
offline, but at least one approval will be needed on the PR.
|
||||||
* If issues are discovered during testing, simply abandon the
|
- If issues are discovered during testing, simply abandon the
|
||||||
release. It's easy to start a new release, it should be easy to
|
release. It's easy to start a new release, it should be easy to
|
||||||
abandon one. **DO NOT REUSE THE VERSION NUMBER.** e.g. If you
|
abandon one. **DO NOT REUSE THE VERSION NUMBER.** e.g. If you
|
||||||
abandon 2.4.0-b1, the next attempt will be 2.4.0-b2.
|
abandon 2.4.0-b1, the next attempt will be 2.4.0-b2.
|
||||||
8. Once everything is ready to go, push to `release`.
|
8. Once everything is ready to go, push to `release`.
|
||||||
|
|
||||||
```
|
```
|
||||||
git fetch upstreams
|
git fetch upstreams
|
||||||
|
|
||||||
@@ -666,23 +678,28 @@ git log -1 --oneline
|
|||||||
# Other branches, including some from upstream-push, may also be
|
# Other branches, including some from upstream-push, may also be
|
||||||
# present.
|
# present.
|
||||||
```
|
```
|
||||||
|
|
||||||
9. Tag the release, too.
|
9. Tag the release, too.
|
||||||
|
|
||||||
```
|
```
|
||||||
git tag <version number>
|
git tag <version number>
|
||||||
git push upstream-push <version number>
|
git push upstream-push <version number>
|
||||||
```
|
```
|
||||||
|
|
||||||
10. Delete the `release-next` branch on the repo. Use the Github UI or:
|
10. Delete the `release-next` branch on the repo. Use the Github UI or:
|
||||||
|
|
||||||
```
|
```
|
||||||
git push --delete upstream-push release-next
|
git push --delete upstream-push release-next
|
||||||
```
|
```
|
||||||
|
|
||||||
11. Finally [create a new release on
|
11. Finally [create a new release on
|
||||||
Github](https://github.com/XRPLF/rippled/releases).
|
Github](https://github.com/XRPLF/rippled/releases).
|
||||||
|
|
||||||
#### Release candidates after the first
|
#### Release candidates after the first
|
||||||
|
|
||||||
Once the first release candidate is [merged into
|
Once the first release candidate is [merged into
|
||||||
release](#making-the-release), then `release` and `develop` *are allowed
|
release](#making-the-release), then `release` and `develop` _are allowed
|
||||||
to diverge*.
|
to diverge_.
|
||||||
|
|
||||||
If a bug or issue is discovered in a version that has a release
|
If a bug or issue is discovered in a version that has a release
|
||||||
candidate being tested, any fix and new version will need to be applied
|
candidate being tested, any fix and new version will need to be applied
|
||||||
@@ -690,7 +707,7 @@ against `release`, then reverse-merged to `develop`. This helps keep git
|
|||||||
history as linear as possible.
|
history as linear as possible.
|
||||||
|
|
||||||
A `release-next` branch will be created from `release`, and any further
|
A `release-next` branch will be created from `release`, and any further
|
||||||
work for that release must be based on `release-next`. Specifically,
|
work for that release must be based on `release-next`. Specifically,
|
||||||
PRs must use `release-next` as the base, and those PRs will be merged
|
PRs must use `release-next` as the base, and those PRs will be merged
|
||||||
directly to `release-next` when approved. Changes should be restricted
|
directly to `release-next` when approved. Changes should be restricted
|
||||||
to bug fixes, but other changes may be necessary from time to time.
|
to bug fixes, but other changes may be necessary from time to time.
|
||||||
@@ -713,17 +730,21 @@ Once the RC is merged and tagged, it needs to be reverse merged into
|
|||||||
1. Create a branch, based on `upstream/develop`.
|
1. Create a branch, based on `upstream/develop`.
|
||||||
The branch name is not important, but could include "mergeNNNrcN".
|
The branch name is not important, but could include "mergeNNNrcN".
|
||||||
E.g. For release A.B.C-rcD, use `mergeABCrcD`.
|
E.g. For release A.B.C-rcD, use `mergeABCrcD`.
|
||||||
|
|
||||||
```
|
```
|
||||||
git fetch upstreams
|
git fetch upstreams
|
||||||
|
|
||||||
git checkout --no-track -b mergeABCrcD upstream/develop
|
git checkout --no-track -b mergeABCrcD upstream/develop
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Merge `release` into your branch.
|
2. Merge `release` into your branch.
|
||||||
|
|
||||||
```
|
```
|
||||||
# I like the "--edit --log --verbose" parameters, but they are
|
# I like the "--edit --log --verbose" parameters, but they are
|
||||||
# not required.
|
# not required.
|
||||||
git merge upstream/release
|
git merge upstream/release
|
||||||
```
|
```
|
||||||
|
|
||||||
3. `BuildInfo.cpp` will have a conflict with the version number.
|
3. `BuildInfo.cpp` will have a conflict with the version number.
|
||||||
Resolve it with the version from `develop` - the higher version.
|
Resolve it with the version from `develop` - the higher version.
|
||||||
4. Push your branch to your repo (or `upstream` if you have permission),
|
4. Push your branch to your repo (or `upstream` if you have permission),
|
||||||
@@ -731,22 +752,27 @@ git merge upstream/release
|
|||||||
simply indicate that this is a merge of the RC. The "Context" should
|
simply indicate that this is a merge of the RC. The "Context" should
|
||||||
summarize the changes from the RC. Include the following text
|
summarize the changes from the RC. Include the following text
|
||||||
prominently:
|
prominently:
|
||||||
|
|
||||||
```
|
```
|
||||||
This PR must be merged manually using a push. Do not use the Github UI.
|
This PR must be merged manually using a push. Do not use the Github UI.
|
||||||
```
|
```
|
||||||
|
|
||||||
5. Depending on the complexity of the changes, and/or merge conflicts,
|
5. Depending on the complexity of the changes, and/or merge conflicts,
|
||||||
the PR may need a thorough review, or just a sign-off that the
|
the PR may need a thorough review, or just a sign-off that the
|
||||||
merge was done correctly.
|
merge was done correctly.
|
||||||
6. If `develop` is updated before this PR is merged, do not merge
|
6. If `develop` is updated before this PR is merged, do not merge
|
||||||
`develop` back into your branch. Instead rebase preserving merges,
|
`develop` back into your branch. Instead rebase preserving merges,
|
||||||
or do the merge again. (See also the `rerere` git config setting.)
|
or do the merge again. (See also the `rerere` git config setting.)
|
||||||
|
|
||||||
```
|
```
|
||||||
git rebase --rebase-merges upstream/develop
|
git rebase --rebase-merges upstream/develop
|
||||||
# OR
|
# OR
|
||||||
git reset --hard upstream/develop
|
git reset --hard upstream/develop
|
||||||
git merge upstream/release
|
git merge upstream/release
|
||||||
```
|
```
|
||||||
|
|
||||||
7. When the PR is ready, push it to `develop`.
|
7. When the PR is ready, push it to `develop`.
|
||||||
|
|
||||||
```
|
```
|
||||||
git fetch upstreams
|
git fetch upstreams
|
||||||
|
|
||||||
@@ -757,8 +783,8 @@ git push upstream-push mergeABCrcD:develop
|
|||||||
|
|
||||||
git fetch upstreams
|
git fetch upstreams
|
||||||
```
|
```
|
||||||
Development on `develop` can proceed as normal.
|
|
||||||
|
|
||||||
|
Development on `develop` can proceed as normal.
|
||||||
|
|
||||||
#### Final releases
|
#### Final releases
|
||||||
|
|
||||||
@@ -773,7 +799,7 @@ internally as if they were RCs (at minimum, ensuring unit tests pass,
|
|||||||
and the app starts, syncs, and stops cleanly across all three
|
and the app starts, syncs, and stops cleanly across all three
|
||||||
platforms.)
|
platforms.)
|
||||||
|
|
||||||
*If in doubt, make an RC first.*
|
_If in doubt, make an RC first._
|
||||||
|
|
||||||
The process for building a final release is very similar to [the process
|
The process for building a final release is very similar to [the process
|
||||||
for building a beta](#making-the-release), except the code will be
|
for building a beta](#making-the-release), except the code will be
|
||||||
@@ -785,20 +811,23 @@ moving from `release` to `master` instead of from `develop` to
|
|||||||
number. As above, or using the
|
number. As above, or using the
|
||||||
[update-version] script.
|
[update-version] script.
|
||||||
2. Create a Pull Request for `master-next` with **`master`** as
|
2. Create a Pull Request for `master-next` with **`master`** as
|
||||||
the base branch. Instead of the default template, reuse and update
|
the base branch. Instead of the default template, reuse and update
|
||||||
the message from the previous final release. Include the following verbiage
|
the message from the previous final release. Include the following verbiage
|
||||||
somewhere in the description:
|
somewhere in the description:
|
||||||
|
|
||||||
```
|
```
|
||||||
The base branch is `master`. This PR branch will be pushed directly to
|
The base branch is `master`. This PR branch will be pushed directly to
|
||||||
`release` and `master` (not squashed or rebased, and not using the
|
`release` and `master` (not squashed or rebased, and not using the
|
||||||
GitHub UI).
|
GitHub UI).
|
||||||
```
|
```
|
||||||
|
|
||||||
7. Sign-offs for the three platforms (Linux, Mac, Windows) usually occur
|
7. Sign-offs for the three platforms (Linux, Mac, Windows) usually occur
|
||||||
offline, but at least one approval will be needed on the PR.
|
offline, but at least one approval will be needed on the PR.
|
||||||
* If issues are discovered during testing, close the PR, delete
|
- If issues are discovered during testing, close the PR, delete
|
||||||
`master-next`, and move development back to `release`, [issuing
|
`master-next`, and move development back to `release`, [issuing
|
||||||
more RCs as necessary](#release-candidates-after-the-first)
|
more RCs as necessary](#release-candidates-after-the-first)
|
||||||
8. Once everything is ready to go, push to `release` and `master`.
|
8. Once everything is ready to go, push to `release` and `master`.
|
||||||
|
|
||||||
```
|
```
|
||||||
git fetch upstreams
|
git fetch upstreams
|
||||||
|
|
||||||
@@ -821,15 +850,20 @@ git log -1 --oneline
|
|||||||
# Other branches, including some from upstream-push, may also be
|
# Other branches, including some from upstream-push, may also be
|
||||||
# present.
|
# present.
|
||||||
```
|
```
|
||||||
|
|
||||||
9. Tag the release, too.
|
9. Tag the release, too.
|
||||||
|
|
||||||
```
|
```
|
||||||
git tag <version number>
|
git tag <version number>
|
||||||
git push upstream-push <version number>
|
git push upstream-push <version number>
|
||||||
```
|
```
|
||||||
|
|
||||||
10. Delete the `master-next` branch on the repo. Use the Github UI or:
|
10. Delete the `master-next` branch on the repo. Use the Github UI or:
|
||||||
|
|
||||||
```
|
```
|
||||||
git push --delete upstream-push master-next
|
git push --delete upstream-push master-next
|
||||||
```
|
```
|
||||||
|
|
||||||
11. [Create a new release on
|
11. [Create a new release on
|
||||||
Github](https://github.com/XRPLF/rippled/releases). Be sure that
|
Github](https://github.com/XRPLF/rippled/releases). Be sure that
|
||||||
"Set as the latest release" is checked.
|
"Set as the latest release" is checked.
|
||||||
@@ -856,11 +890,13 @@ any branch. When it's ready to merge, jump to step 3 using your branch
|
|||||||
instead of `master-next`.
|
instead of `master-next`.
|
||||||
|
|
||||||
1. Create a `master-next` branch from `master`.
|
1. Create a `master-next` branch from `master`.
|
||||||
|
|
||||||
```
|
```
|
||||||
git checkout --no-track -b master-next upstream/master
|
git checkout --no-track -b master-next upstream/master
|
||||||
git push upstream-push
|
git push upstream-push
|
||||||
git fetch upstreams
|
git fetch upstreams
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Open any PRs for the pending hotfix using `master-next` as the base,
|
2. Open any PRs for the pending hotfix using `master-next` as the base,
|
||||||
so they can be merged directly in to it. Unlike `develop`, though,
|
so they can be merged directly in to it. Unlike `develop`, though,
|
||||||
`master-next` can be thrown away and recreated if necessary.
|
`master-next` can be thrown away and recreated if necessary.
|
||||||
@@ -868,19 +904,22 @@ git fetch upstreams
|
|||||||
steps as above, or use the
|
steps as above, or use the
|
||||||
[update-version] script.
|
[update-version] script.
|
||||||
4. Create a Pull Request for `master-next` with **`master`** as
|
4. Create a Pull Request for `master-next` with **`master`** as
|
||||||
the base branch. Instead of the default template, reuse and update
|
the base branch. Instead of the default template, reuse and update
|
||||||
the message from the previous final release. Include the following verbiage
|
the message from the previous final release. Include the following verbiage
|
||||||
somewhere in the description:
|
somewhere in the description:
|
||||||
|
|
||||||
```
|
```
|
||||||
The base branch is `master`. This PR branch will be pushed directly to
|
The base branch is `master`. This PR branch will be pushed directly to
|
||||||
`master` (not squashed or rebased, and not using the GitHub UI).
|
`master` (not squashed or rebased, and not using the GitHub UI).
|
||||||
```
|
```
|
||||||
|
|
||||||
7. Sign-offs for the three platforms (Linux, Mac, Windows) usually occur
|
7. Sign-offs for the three platforms (Linux, Mac, Windows) usually occur
|
||||||
offline, but at least one approval will be needed on the PR.
|
offline, but at least one approval will be needed on the PR.
|
||||||
* If issues are discovered during testing, update `master-next` as
|
- If issues are discovered during testing, update `master-next` as
|
||||||
needed, but ensure that the changes are properly squashed, and the
|
needed, but ensure that the changes are properly squashed, and the
|
||||||
version setting commit remains last
|
version setting commit remains last
|
||||||
8. Once everything is ready to go, push to `master` **only**.
|
8. Once everything is ready to go, push to `master` **only**.
|
||||||
|
|
||||||
```
|
```
|
||||||
git fetch upstreams
|
git fetch upstreams
|
||||||
|
|
||||||
@@ -901,15 +940,20 @@ git log -1 --oneline
|
|||||||
# Other branches, including some from upstream-push, may also be
|
# Other branches, including some from upstream-push, may also be
|
||||||
# present.
|
# present.
|
||||||
```
|
```
|
||||||
|
|
||||||
9. Tag the release, too.
|
9. Tag the release, too.
|
||||||
|
|
||||||
```
|
```
|
||||||
git tag <version number>
|
git tag <version number>
|
||||||
git push upstream-push <version number>
|
git push upstream-push <version number>
|
||||||
```
|
```
|
||||||
|
|
||||||
9. Delete the `master-next` branch on the repo.
|
9. Delete the `master-next` branch on the repo.
|
||||||
|
|
||||||
```
|
```
|
||||||
git push --delete upstream-push master-next
|
git push --delete upstream-push master-next
|
||||||
```
|
```
|
||||||
|
|
||||||
10. [Create a new release on
|
10. [Create a new release on
|
||||||
Github](https://github.com/XRPLF/rippled/releases). Be sure that
|
Github](https://github.com/XRPLF/rippled/releases). Be sure that
|
||||||
"Set as the latest release" is checked.
|
"Set as the latest release" is checked.
|
||||||
@@ -921,17 +965,21 @@ Once the hotfix is released, it needs to be reverse merged into
|
|||||||
1. Create a branch in your own repo, based on `upstream/develop`.
|
1. Create a branch in your own repo, based on `upstream/develop`.
|
||||||
The branch name is not important, but could include "mergeNNN".
|
The branch name is not important, but could include "mergeNNN".
|
||||||
E.g. For release 2.2.3, use `merge223`.
|
E.g. For release 2.2.3, use `merge223`.
|
||||||
|
|
||||||
```
|
```
|
||||||
git fetch upstreams
|
git fetch upstreams
|
||||||
|
|
||||||
git checkout --no-track -b merge223 upstream/develop
|
git checkout --no-track -b merge223 upstream/develop
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Merge master into your branch.
|
2. Merge master into your branch.
|
||||||
|
|
||||||
```
|
```
|
||||||
# I like the "--edit --log --verbose" parameters, but they are
|
# I like the "--edit --log --verbose" parameters, but they are
|
||||||
# not required.
|
# not required.
|
||||||
git merge upstream/master
|
git merge upstream/master
|
||||||
```
|
```
|
||||||
|
|
||||||
3. `BuildInfo.cpp` will have a conflict with the version number.
|
3. `BuildInfo.cpp` will have a conflict with the version number.
|
||||||
Resolve it with the version from `develop` - the higher version.
|
Resolve it with the version from `develop` - the higher version.
|
||||||
4. Push your branch to your repo, and open a normal PR against
|
4. Push your branch to your repo, and open a normal PR against
|
||||||
@@ -939,22 +987,27 @@ git merge upstream/master
|
|||||||
is a merge of the hotfix version. The "Context" should summarize
|
is a merge of the hotfix version. The "Context" should summarize
|
||||||
the changes from the hotfix. Include the following text
|
the changes from the hotfix. Include the following text
|
||||||
prominently:
|
prominently:
|
||||||
|
|
||||||
```
|
```
|
||||||
This PR must be merged manually using a --ff-only merge. Do not use the Github UI.
|
This PR must be merged manually using a --ff-only merge. Do not use the Github UI.
|
||||||
```
|
```
|
||||||
|
|
||||||
5. Depending on the complexity of the hotfix, and/or merge conflicts,
|
5. Depending on the complexity of the hotfix, and/or merge conflicts,
|
||||||
the PR may need a thorough review, or just a sign-off that the
|
the PR may need a thorough review, or just a sign-off that the
|
||||||
merge was done correctly.
|
merge was done correctly.
|
||||||
6. If `develop` is updated before this PR is merged, do not merge
|
6. If `develop` is updated before this PR is merged, do not merge
|
||||||
`develop` back into your branch. Instead rebase preserving merges,
|
`develop` back into your branch. Instead rebase preserving merges,
|
||||||
or do the merge again. (See also the `rerere` git config setting.)
|
or do the merge again. (See also the `rerere` git config setting.)
|
||||||
|
|
||||||
```
|
```
|
||||||
git rebase --rebase-merges upstream/develop
|
git rebase --rebase-merges upstream/develop
|
||||||
# OR
|
# OR
|
||||||
git reset --hard upstream/develop
|
git reset --hard upstream/develop
|
||||||
git merge upstream/master
|
git merge upstream/master
|
||||||
```
|
```
|
||||||
|
|
||||||
7. When the PR is ready, push it to `develop`.
|
7. When the PR is ready, push it to `develop`.
|
||||||
|
|
||||||
```
|
```
|
||||||
git fetch upstreams
|
git fetch upstreams
|
||||||
|
|
||||||
@@ -963,6 +1016,7 @@ git log --show-signature "upstream/develop..HEAD"
|
|||||||
|
|
||||||
git push upstream-push HEAD:develop
|
git push upstream-push HEAD:develop
|
||||||
```
|
```
|
||||||
|
|
||||||
Development on `develop` can proceed as normal. It is recommended to
|
Development on `develop` can proceed as normal. It is recommended to
|
||||||
create a beta (or RC) immediately to ensure that everything worked as
|
create a beta (or RC) immediately to ensure that everything worked as
|
||||||
expected.
|
expected.
|
||||||
@@ -977,12 +1031,13 @@ a significant fraction of users, which would necessitate a hotfix / point
|
|||||||
release to that version as well as any later versions.
|
release to that version as well as any later versions.
|
||||||
|
|
||||||
This scenario would follow the same basic procedure as above,
|
This scenario would follow the same basic procedure as above,
|
||||||
except that *none* of `develop`, `release`, or `master`
|
except that _none_ of `develop`, `release`, or `master`
|
||||||
would be touched during the release process.
|
would be touched during the release process.
|
||||||
|
|
||||||
In this example, consider if version 2.1.1 needed to be patched.
|
In this example, consider if version 2.1.1 needed to be patched.
|
||||||
|
|
||||||
1. Create two branches in the main (`upstream`) repo.
|
1. Create two branches in the main (`upstream`) repo.
|
||||||
|
|
||||||
```
|
```
|
||||||
git fetch upstreams
|
git fetch upstreams
|
||||||
|
|
||||||
@@ -996,6 +1051,7 @@ git push upstream-push
|
|||||||
|
|
||||||
git fetch upstreams
|
git fetch upstreams
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Work continues as above, except using `master-2.1.2`as
|
2. Work continues as above, except using `master-2.1.2`as
|
||||||
the base branch for any merging, packaging, etc.
|
the base branch for any merging, packaging, etc.
|
||||||
3. After the release is tagged and packages are built, you could
|
3. After the release is tagged and packages are built, you could
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
ISC License
|
ISC License
|
||||||
|
|
||||||
Copyright (c) 2011, Arthur Britto, David Schwartz, Jed McCaleb, Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant.
|
Copyright (c) 2011, Arthur Britto, David Schwartz, Jed McCaleb, Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant.
|
||||||
Copyright (c) 2012-2020, the XRP Ledger developers.
|
Copyright (c) 2012-2025, the XRP Ledger developers.
|
||||||
|
|
||||||
Permission to use, copy, modify, and distribute this software for any
|
Permission to use, copy, modify, and distribute this software for any
|
||||||
purpose with or without fee is hereby granted, provided that the above
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
@@ -14,4 +14,3 @@ ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|||||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
|||||||
46
README.md
46
README.md
@@ -5,49 +5,50 @@
|
|||||||
The [XRP Ledger](https://xrpl.org/) is a decentralized cryptographic ledger powered by a network of peer-to-peer nodes. The XRP Ledger uses a novel Byzantine Fault Tolerant consensus algorithm to settle and record transactions in a secure distributed database without a central operator.
|
The [XRP Ledger](https://xrpl.org/) is a decentralized cryptographic ledger powered by a network of peer-to-peer nodes. The XRP Ledger uses a novel Byzantine Fault Tolerant consensus algorithm to settle and record transactions in a secure distributed database without a central operator.
|
||||||
|
|
||||||
## XRP
|
## XRP
|
||||||
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free asset native to the XRP Ledger, and is designed to bridge the many different currencies in use worldwide. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
|
|
||||||
|
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free crypto-asset native to the XRP Ledger, and is designed as a gas token for network services and to bridge different currencies. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
|
||||||
|
|
||||||
## rippled
|
## rippled
|
||||||
|
|
||||||
The server software that powers the XRP Ledger is called `rippled` and is available in this repository under the permissive [ISC open-source license](LICENSE.md). The `rippled` server software is written primarily in C++ and runs on a variety of platforms. The `rippled` server software can run in several modes depending on its [configuration](https://xrpl.org/rippled-server-modes.html).
|
The server software that powers the XRP Ledger is called `rippled` and is available in this repository under the permissive [ISC open-source license](LICENSE.md). The `rippled` server software is written primarily in C++ and runs on a variety of platforms. The `rippled` server software can run in several modes depending on its [configuration](https://xrpl.org/rippled-server-modes.html).
|
||||||
|
|
||||||
If you are interested in running an **API Server** (including a **Full History Server**), take a look at [Clio](https://github.com/XRPLF/clio). (rippled Reporting Mode has been replaced by Clio.)
|
If you are interested in running an **API Server** (including a **Full History Server**), take a look at [Clio](https://github.com/XRPLF/clio). (rippled Reporting Mode has been replaced by Clio.)
|
||||||
|
|
||||||
### Build from Source
|
### Build from Source
|
||||||
|
|
||||||
* [Read the build instructions in `BUILD.md`](BUILD.md)
|
- [Read the build instructions in `BUILD.md`](BUILD.md)
|
||||||
* If you encounter any issues, please [open an issue](https://github.com/XRPLF/rippled/issues)
|
- If you encounter any issues, please [open an issue](https://github.com/XRPLF/rippled/issues)
|
||||||
|
|
||||||
## Key Features of the XRP Ledger
|
## Key Features of the XRP Ledger
|
||||||
|
|
||||||
- **[Censorship-Resistant Transaction Processing][]:** No single party decides which transactions succeed or fail, and no one can "roll back" a transaction after it completes. As long as those who choose to participate in the network keep it healthy, they can settle transactions in seconds.
|
- **[Censorship-Resistant Transaction Processing][]:** No single party decides which transactions succeed or fail, and no one can "roll back" a transaction after it completes. As long as those who choose to participate in the network keep it healthy, they can settle transactions in seconds.
|
||||||
- **[Fast, Efficient Consensus Algorithm][]:** The XRP Ledger's consensus algorithm settles transactions in 4 to 5 seconds, processing at a throughput of up to 1500 transactions per second. These properties put XRP at least an order of magnitude ahead of other top digital assets.
|
- **[Fast, Efficient Consensus Algorithm][]:** The XRP Ledger's consensus algorithm settles transactions in 4 to 5 seconds, processing at a throughput of up to 1500 transactions per second. These properties put XRP at least an order of magnitude ahead of other top digital assets.
|
||||||
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction costs.
|
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction fees.
|
||||||
- **[Responsible Software Governance][]:** A team of full-time, world-class developers at Ripple maintain and continually improve the XRP Ledger's underlying software with contributions from the open-source community. Ripple acts as a steward for the technology and an advocate for its interests, and builds constructive relationships with governments and financial institutions worldwide.
|
- **[Responsible Software Governance][]:** A team of full-time developers at Ripple & other organizations maintain and continually improve the XRP Ledger's underlying software with contributions from the open-source community. Ripple acts as a steward for the technology and an advocate for its interests.
|
||||||
- **[Secure, Adaptable Cryptography][]:** The XRP Ledger relies on industry standard digital signature systems like ECDSA (the same scheme used by Bitcoin) but also supports modern, efficient algorithms like Ed25519. The extensible nature of the XRP Ledger's software makes it possible to add and disable algorithms as the state of the art in cryptography advances.
|
- **[Secure, Adaptable Cryptography][]:** The XRP Ledger relies on industry standard digital signature systems like ECDSA (the same scheme used by Bitcoin) but also supports modern, efficient algorithms like Ed25519. The extensible nature of the XRP Ledger's software makes it possible to add and disable algorithms as the state of the art in cryptography advances.
|
||||||
- **[Modern Features for Smart Contracts][]:** Features like Escrow, Checks, and Payment Channels support cutting-edge financial applications including the [Interledger Protocol](https://interledger.org/). This toolbox of advanced features comes with safety features like a process for amending the network and separate checks against invariant constraints.
|
- **[Modern Features][]:** Features like Escrow, Checks, and Payment Channels support financial applications atop of the XRP Ledger. This toolbox of advanced features comes with safety features like a process for amending the network and separate checks against invariant constraints.
|
||||||
- **[On-Ledger Decentralized Exchange][]:** In addition to all the features that make XRP useful on its own, the XRP Ledger also has a fully-functional accounting system for tracking and trading obligations denominated in any way users want, and an exchange built into the protocol. The XRP Ledger can settle long, cross-currency payment paths and exchanges of multiple currencies in atomic transactions, bridging gaps of trust with XRP.
|
- **[On-Ledger Decentralized Exchange][]:** In addition to all the features that make XRP useful on its own, the XRP Ledger also has a fully-functional accounting system for tracking and trading obligations denominated in any way users want, and an exchange built into the protocol. The XRP Ledger can settle long, cross-currency payment paths and exchanges of multiple currencies in atomic transactions, bridging gaps of trust with XRP.
|
||||||
|
|
||||||
[Censorship-Resistant Transaction Processing]: https://xrpl.org/xrp-ledger-overview.html#censorship-resistant-transaction-processing
|
[Censorship-Resistant Transaction Processing]: https://xrpl.org/transaction-censorship-detection.html#transaction-censorship-detection
|
||||||
[Fast, Efficient Consensus Algorithm]: https://xrpl.org/xrp-ledger-overview.html#fast-efficient-consensus-algorithm
|
[Fast, Efficient Consensus Algorithm]: https://xrpl.org/consensus-research.html#consensus-research
|
||||||
[Finite XRP Supply]: https://xrpl.org/xrp-ledger-overview.html#finite-xrp-supply
|
[Finite XRP Supply]: https://xrpl.org/what-is-xrp.html
|
||||||
[Responsible Software Governance]: https://xrpl.org/xrp-ledger-overview.html#responsible-software-governance
|
[Responsible Software Governance]: https://xrpl.org/contribute-code.html#contribute-code-to-the-xrp-ledger
|
||||||
[Secure, Adaptable Cryptography]: https://xrpl.org/xrp-ledger-overview.html#secure-adaptable-cryptography
|
[Secure, Adaptable Cryptography]: https://xrpl.org/cryptographic-keys.html#cryptographic-keys
|
||||||
[Modern Features for Smart Contracts]: https://xrpl.org/xrp-ledger-overview.html#modern-features-for-smart-contracts
|
[Modern Features]: https://xrpl.org/use-specialized-payment-types.html
|
||||||
[On-Ledger Decentralized Exchange]: https://xrpl.org/xrp-ledger-overview.html#on-ledger-decentralized-exchange
|
[On-Ledger Decentralized Exchange]: https://xrpl.org/decentralized-exchange.html#decentralized-exchange
|
||||||
|
|
||||||
|
|
||||||
## Source Code
|
## Source Code
|
||||||
|
|
||||||
Here are some good places to start learning the source code:
|
Here are some good places to start learning the source code:
|
||||||
|
|
||||||
- Read the markdown files in the source tree: `src/ripple/**/*.md`.
|
- Read the markdown files in the source tree: `src/ripple/**/*.md`.
|
||||||
- Read [the levelization document](./Builds/levelization) to get an idea of the internal dependency graph.
|
- Read [the levelization document](.github/scripts/levelization) to get an idea of the internal dependency graph.
|
||||||
- In the big picture, the `main` function constructs an `ApplicationImp` object, which implements the `Application` virtual interface. Almost every component in the application takes an `Application&` parameter in its constructor, typically named `app` and stored as a member variable `app_`. This allows most components to depend on any other component.
|
- In the big picture, the `main` function constructs an `ApplicationImp` object, which implements the `Application` virtual interface. Almost every component in the application takes an `Application&` parameter in its constructor, typically named `app` and stored as a member variable `app_`. This allows most components to depend on any other component.
|
||||||
|
|
||||||
### Repository Contents
|
### Repository Contents
|
||||||
|
|
||||||
| Folder | Contents |
|
| Folder | Contents |
|
||||||
|:-----------|:-------------------------------------------------|
|
| :--------- | :----------------------------------------------- |
|
||||||
| `./bin` | Scripts and data files for Ripple integrators. |
|
| `./bin` | Scripts and data files for Ripple integrators. |
|
||||||
| `./Builds` | Platform-specific guides for building `rippled`. |
|
| `./Builds` | Platform-specific guides for building `rippled`. |
|
||||||
| `./docs` | Source documentation files and doxygen config. |
|
| `./docs` | Source documentation files and doxygen config. |
|
||||||
@@ -57,15 +58,14 @@ Here are some good places to start learning the source code:
|
|||||||
Some of the directories under `src` are external repositories included using
|
Some of the directories under `src` are external repositories included using
|
||||||
git-subtree. See those directories' README files for more details.
|
git-subtree. See those directories' README files for more details.
|
||||||
|
|
||||||
|
|
||||||
## Additional Documentation
|
## Additional Documentation
|
||||||
|
|
||||||
* [XRP Ledger Dev Portal](https://xrpl.org/)
|
- [XRP Ledger Dev Portal](https://xrpl.org/)
|
||||||
* [Setup and Installation](https://xrpl.org/install-rippled.html)
|
- [Setup and Installation](https://xrpl.org/install-rippled.html)
|
||||||
* [Source Documentation (Doxygen)](https://xrplf.github.io/rippled/)
|
- [Source Documentation (Doxygen)](https://xrplf.github.io/rippled/)
|
||||||
|
|
||||||
## See Also
|
## See Also
|
||||||
|
|
||||||
* [Clio API Server for the XRP Ledger](https://github.com/XRPLF/clio)
|
- [Clio API Server for the XRP Ledger](https://github.com/XRPLF/clio)
|
||||||
* [Mailing List for Release Announcements](https://groups.google.com/g/ripple-server)
|
- [Mailing List for Release Announcements](https://groups.google.com/g/ripple-server)
|
||||||
* [Learn more about the XRP Ledger (YouTube)](https://www.youtube.com/playlist?list=PLJQ55Tj1hIVZtJ_JdTvSum2qMTsedWkNi)
|
- [Learn more about the XRP Ledger (YouTube)](https://www.youtube.com/playlist?list=PLJQ55Tj1hIVZtJ_JdTvSum2qMTsedWkNi)
|
||||||
|
|||||||
4817
RELEASENOTES.md
4817
RELEASENOTES.md
File diff suppressed because it is too large
Load Diff
14
SECURITY.md
14
SECURITY.md
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
For more details on operating an XRP Ledger server securely, please visit https://xrpl.org/manage-the-rippled-server.html.
|
For more details on operating an XRP Ledger server securely, please visit https://xrpl.org/manage-the-rippled-server.html.
|
||||||
|
|
||||||
|
|
||||||
# Security Policy
|
# Security Policy
|
||||||
|
|
||||||
## Supported Versions
|
## Supported Versions
|
||||||
@@ -77,13 +76,14 @@ The amount paid varies dramatically. Vulnerabilities that are harmless on their
|
|||||||
|
|
||||||
To report a qualifying bug, please send a detailed report to:
|
To report a qualifying bug, please send a detailed report to:
|
||||||
|
|
||||||
|Email Address|bugs@ripple.com |
|
| Email Address | bugs@ripple.com |
|
||||||
|:-----------:|:----------------------------------------------------|
|
| :-----------: | :-------------------------------------------------- |
|
||||||
|Short Key ID | `0xC57929BE` |
|
| Short Key ID | `0xC57929BE` |
|
||||||
|Long Key ID | `0xCD49A0AFC57929BE` |
|
| Long Key ID | `0xCD49A0AFC57929BE` |
|
||||||
|Fingerprint | `24E6 3B02 37E0 FA9C 5E96 8974 CD49 A0AF C579 29BE` |
|
| Fingerprint | `24E6 3B02 37E0 FA9C 5E96 8974 CD49 A0AF C579 29BE` |
|
||||||
|
|
||||||
|
The full PGP key for this address, which is also available on several key servers (e.g. on [keyserver.ubuntu.com](https://keyserver.ubuntu.com)), is:
|
||||||
|
|
||||||
The full PGP key for this address, which is also available on several key servers (e.g. on [keyserver.ubuntu.com](https://keyserver.ubuntu.com)), is:
|
|
||||||
```
|
```
|
||||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||||
mQINBFUwGHYBEAC0wpGpBPkd8W1UdQjg9+cEFzeIEJRaoZoeuJD8mofwI5Ejnjdt
|
mQINBFUwGHYBEAC0wpGpBPkd8W1UdQjg9+cEFzeIEJRaoZoeuJD8mofwI5Ejnjdt
|
||||||
|
|||||||
470
bin/browser.js
470
bin/browser.js
@@ -1,470 +0,0 @@
|
|||||||
#!/usr/bin/node
|
|
||||||
//
|
|
||||||
// ledger?l=L
|
|
||||||
// transaction?h=H
|
|
||||||
// ledger_entry?l=L&h=H
|
|
||||||
// account?l=L&a=A
|
|
||||||
// directory?l=L&dir_root=H&i=I
|
|
||||||
// directory?l=L&o=A&i=I // owner directory
|
|
||||||
// offer?l=L&offer=H
|
|
||||||
// offer?l=L&account=A&i=I
|
|
||||||
// ripple_state=l=L&a=A&b=A&c=C
|
|
||||||
// account_lines?l=L&a=A
|
|
||||||
//
|
|
||||||
// A=address
|
|
||||||
// C=currency 3 letter code
|
|
||||||
// H=hash
|
|
||||||
// I=index
|
|
||||||
// L=current | closed | validated | index | hash
|
|
||||||
//
|
|
||||||
|
|
||||||
var async = require("async");
|
|
||||||
var extend = require("extend");
|
|
||||||
var http = require("http");
|
|
||||||
var url = require("url");
|
|
||||||
|
|
||||||
var Remote = require("ripple-lib").Remote;
|
|
||||||
|
|
||||||
var program = process.argv[1];
|
|
||||||
|
|
||||||
var httpd_response = function (res, opts) {
|
|
||||||
var self=this;
|
|
||||||
|
|
||||||
res.statusCode = opts.statusCode;
|
|
||||||
res.end(
|
|
||||||
"<HTML>"
|
|
||||||
+ "<HEAD><TITLE>Title</TITLE></HEAD>"
|
|
||||||
+ "<BODY BACKGROUND=\"#FFFFFF\">"
|
|
||||||
+ "State:" + self.state
|
|
||||||
+ "<UL>"
|
|
||||||
+ "<LI><A HREF=\"/\">home</A>"
|
|
||||||
+ "<LI>" + html_link('r4EM4gBQfr1QgQLXSPF4r7h84qE9mb6iCC')
|
|
||||||
// + "<LI><A HREF=\""+test+"\">rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh</A>"
|
|
||||||
+ "<LI><A HREF=\"/ledger\">ledger</A>"
|
|
||||||
+ "</UL>"
|
|
||||||
+ (opts.body || '')
|
|
||||||
+ '<HR><PRE>'
|
|
||||||
+ (opts.url || '')
|
|
||||||
+ '</PRE>'
|
|
||||||
+ "</BODY>"
|
|
||||||
+ "</HTML>"
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
var html_link = function (generic) {
|
|
||||||
return '<A HREF="' + build_uri({ type: 'account', account: generic}) + '">' + generic + '</A>';
|
|
||||||
};
|
|
||||||
|
|
||||||
// Build a link to a type.
|
|
||||||
var build_uri = function (params, opts) {
|
|
||||||
var c;
|
|
||||||
|
|
||||||
if (params.type === 'account') {
|
|
||||||
c = {
|
|
||||||
pathname: 'account',
|
|
||||||
query: {
|
|
||||||
l: params.ledger,
|
|
||||||
a: params.account,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
} else if (params.type === 'ledger') {
|
|
||||||
c = {
|
|
||||||
pathname: 'ledger',
|
|
||||||
query: {
|
|
||||||
l: params.ledger,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
} else if (params.type === 'transaction') {
|
|
||||||
c = {
|
|
||||||
pathname: 'transaction',
|
|
||||||
query: {
|
|
||||||
h: params.hash,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
c = {};
|
|
||||||
}
|
|
||||||
|
|
||||||
opts = opts || {};
|
|
||||||
|
|
||||||
c.protocol = "http";
|
|
||||||
c.hostname = opts.hostname || self.base.hostname;
|
|
||||||
c.port = opts.port || self.base.port;
|
|
||||||
|
|
||||||
return url.format(c);
|
|
||||||
};
|
|
||||||
|
|
||||||
var build_link = function (item, link) {
|
|
||||||
console.log(link);
|
|
||||||
return "<A HREF=" + link + ">" + item + "</A>";
|
|
||||||
};
|
|
||||||
|
|
||||||
var rewrite_field = function (type, obj, field, opts) {
|
|
||||||
if (field in obj) {
|
|
||||||
obj[field] = rewrite_type(type, obj[field], opts);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
var rewrite_type = function (type, obj, opts) {
|
|
||||||
if ('amount' === type) {
|
|
||||||
if ('string' === typeof obj) {
|
|
||||||
// XRP.
|
|
||||||
return '<B>' + obj + '</B>';
|
|
||||||
|
|
||||||
} else {
|
|
||||||
rewrite_field('address', obj, 'issuer', opts);
|
|
||||||
|
|
||||||
return obj;
|
|
||||||
}
|
|
||||||
return build_link(
|
|
||||||
obj,
|
|
||||||
build_uri({
|
|
||||||
type: 'account',
|
|
||||||
account: obj
|
|
||||||
}, opts)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if ('address' === type) {
|
|
||||||
return build_link(
|
|
||||||
obj,
|
|
||||||
build_uri({
|
|
||||||
type: 'account',
|
|
||||||
account: obj
|
|
||||||
}, opts)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
else if ('ledger' === type) {
|
|
||||||
return build_link(
|
|
||||||
obj,
|
|
||||||
build_uri({
|
|
||||||
type: 'ledger',
|
|
||||||
ledger: obj,
|
|
||||||
}, opts)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
else if ('node' === type) {
|
|
||||||
// A node
|
|
||||||
if ('PreviousTxnID' in obj)
|
|
||||||
obj.PreviousTxnID = rewrite_type('transaction', obj.PreviousTxnID, opts);
|
|
||||||
|
|
||||||
if ('Offer' === obj.LedgerEntryType) {
|
|
||||||
if ('NewFields' in obj) {
|
|
||||||
if ('TakerGets' in obj.NewFields)
|
|
||||||
obj.NewFields.TakerGets = rewrite_type('amount', obj.NewFields.TakerGets, opts);
|
|
||||||
|
|
||||||
if ('TakerPays' in obj.NewFields)
|
|
||||||
obj.NewFields.TakerPays = rewrite_type('amount', obj.NewFields.TakerPays, opts);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
obj.LedgerEntryType = '<B>' + obj.LedgerEntryType + '</B>';
|
|
||||||
|
|
||||||
return obj;
|
|
||||||
}
|
|
||||||
else if ('transaction' === type) {
|
|
||||||
// Reference to a transaction.
|
|
||||||
return build_link(
|
|
||||||
obj,
|
|
||||||
build_uri({
|
|
||||||
type: 'transaction',
|
|
||||||
hash: obj
|
|
||||||
}, opts)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return 'ERROR: ' + type;
|
|
||||||
};
|
|
||||||
|
|
||||||
var rewrite_object = function (obj, opts) {
|
|
||||||
var out = extend({}, obj);
|
|
||||||
|
|
||||||
rewrite_field('address', out, 'Account', opts);
|
|
||||||
|
|
||||||
rewrite_field('ledger', out, 'parent_hash', opts);
|
|
||||||
rewrite_field('ledger', out, 'ledger_index', opts);
|
|
||||||
rewrite_field('ledger', out, 'ledger_current_index', opts);
|
|
||||||
rewrite_field('ledger', out, 'ledger_hash', opts);
|
|
||||||
|
|
||||||
if ('ledger' in obj) {
|
|
||||||
// It's a ledger header.
|
|
||||||
out.ledger = rewrite_object(out.ledger, opts);
|
|
||||||
|
|
||||||
if ('ledger_hash' in out.ledger)
|
|
||||||
out.ledger.ledger_hash = '<B>' + out.ledger.ledger_hash + '</B>';
|
|
||||||
|
|
||||||
delete out.ledger.hash;
|
|
||||||
delete out.ledger.totalCoins;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ('TransactionType' in obj) {
|
|
||||||
// It's a transaction.
|
|
||||||
out.TransactionType = '<B>' + obj.TransactionType + '</B>';
|
|
||||||
|
|
||||||
rewrite_field('amount', out, 'TakerGets', opts);
|
|
||||||
rewrite_field('amount', out, 'TakerPays', opts);
|
|
||||||
rewrite_field('ledger', out, 'inLedger', opts);
|
|
||||||
|
|
||||||
out.meta.AffectedNodes = out.meta.AffectedNodes.map(function (node) {
|
|
||||||
var kind = 'CreatedNode' in node
|
|
||||||
? 'CreatedNode'
|
|
||||||
: 'ModifiedNode' in node
|
|
||||||
? 'ModifiedNode'
|
|
||||||
: 'DeletedNode' in node
|
|
||||||
? 'DeletedNode'
|
|
||||||
: undefined;
|
|
||||||
|
|
||||||
if (kind) {
|
|
||||||
node[kind] = rewrite_type('node', node[kind], opts);
|
|
||||||
}
|
|
||||||
return node;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else if ('node' in obj && 'LedgerEntryType' in obj.node) {
|
|
||||||
// Its a ledger entry.
|
|
||||||
|
|
||||||
if (obj.node.LedgerEntryType === 'AccountRoot') {
|
|
||||||
rewrite_field('address', out.node, 'Account', opts);
|
|
||||||
rewrite_field('transaction', out.node, 'PreviousTxnID', opts);
|
|
||||||
rewrite_field('ledger', out.node, 'PreviousTxnLgrSeq', opts);
|
|
||||||
}
|
|
||||||
|
|
||||||
out.node.LedgerEntryType = '<B>' + out.node.LedgerEntryType + '</B>';
|
|
||||||
}
|
|
||||||
|
|
||||||
return out;
|
|
||||||
};
|
|
||||||
|
|
||||||
var augment_object = function (obj, opts, done) {
|
|
||||||
if (obj.node.LedgerEntryType == 'AccountRoot') {
|
|
||||||
var tx_hash = obj.node.PreviousTxnID;
|
|
||||||
var tx_ledger = obj.node.PreviousTxnLgrSeq;
|
|
||||||
|
|
||||||
obj.history = [];
|
|
||||||
|
|
||||||
async.whilst(
|
|
||||||
function () { return tx_hash; },
|
|
||||||
function (callback) {
|
|
||||||
// console.log("augment_object: request: %s %s", tx_hash, tx_ledger);
|
|
||||||
opts.remote.request_tx(tx_hash)
|
|
||||||
.on('success', function (m) {
|
|
||||||
tx_hash = undefined;
|
|
||||||
tx_ledger = undefined;
|
|
||||||
|
|
||||||
//console.log("augment_object: ", JSON.stringify(m));
|
|
||||||
m.meta.AffectedNodes.filter(function(n) {
|
|
||||||
// console.log("augment_object: ", JSON.stringify(n));
|
|
||||||
// if (n.ModifiedNode)
|
|
||||||
// console.log("augment_object: %s %s %s %s %s %s/%s", 'ModifiedNode' in n, n.ModifiedNode && (n.ModifiedNode.LedgerEntryType === 'AccountRoot'), n.ModifiedNode && n.ModifiedNode.FinalFields && (n.ModifiedNode.FinalFields.Account === obj.node.Account), Object.keys(n)[0], n.ModifiedNode && (n.ModifiedNode.LedgerEntryType), obj.node.Account, n.ModifiedNode && n.ModifiedNode.FinalFields && n.ModifiedNode.FinalFields.Account);
|
|
||||||
// if ('ModifiedNode' in n && n.ModifiedNode.LedgerEntryType === 'AccountRoot')
|
|
||||||
// {
|
|
||||||
// console.log("***: ", JSON.stringify(m));
|
|
||||||
// console.log("***: ", JSON.stringify(n));
|
|
||||||
// }
|
|
||||||
return 'ModifiedNode' in n
|
|
||||||
&& n.ModifiedNode.LedgerEntryType === 'AccountRoot'
|
|
||||||
&& n.ModifiedNode.FinalFields
|
|
||||||
&& n.ModifiedNode.FinalFields.Account === obj.node.Account;
|
|
||||||
})
|
|
||||||
.forEach(function (n) {
|
|
||||||
tx_hash = n.ModifiedNode.PreviousTxnID;
|
|
||||||
tx_ledger = n.ModifiedNode.PreviousTxnLgrSeq;
|
|
||||||
|
|
||||||
obj.history.push({
|
|
||||||
tx_hash: tx_hash,
|
|
||||||
tx_ledger: tx_ledger
|
|
||||||
});
|
|
||||||
console.log("augment_object: next: %s %s", tx_hash, tx_ledger);
|
|
||||||
});
|
|
||||||
|
|
||||||
callback();
|
|
||||||
})
|
|
||||||
.on('error', function (m) {
|
|
||||||
callback(m);
|
|
||||||
})
|
|
||||||
.request();
|
|
||||||
},
|
|
||||||
function (err) {
|
|
||||||
if (err) {
|
|
||||||
done();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
async.forEach(obj.history, function (o, callback) {
|
|
||||||
opts.remote.request_account_info(obj.node.Account)
|
|
||||||
.ledger_index(o.tx_ledger)
|
|
||||||
.on('success', function (m) {
|
|
||||||
//console.log("augment_object: ", JSON.stringify(m));
|
|
||||||
o.Balance = m.account_data.Balance;
|
|
||||||
// o.account_data = m.account_data;
|
|
||||||
callback();
|
|
||||||
})
|
|
||||||
.on('error', function (m) {
|
|
||||||
o.error = m;
|
|
||||||
callback();
|
|
||||||
})
|
|
||||||
.request();
|
|
||||||
},
|
|
||||||
function (err) {
|
|
||||||
done(err);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
done();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (process.argv.length < 4 || process.argv.length > 7) {
|
|
||||||
console.log("Usage: %s ws_ip ws_port [<ip> [<port> [<start>]]]", program);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
var ws_ip = process.argv[2];
|
|
||||||
var ws_port = process.argv[3];
|
|
||||||
var ip = process.argv.length > 4 ? process.argv[4] : "127.0.0.1";
|
|
||||||
var port = process.argv.length > 5 ? process.argv[5] : "8080";
|
|
||||||
|
|
||||||
// console.log("START");
|
|
||||||
var self = this;
|
|
||||||
|
|
||||||
var remote = (new Remote({
|
|
||||||
websocket_ip: ws_ip,
|
|
||||||
websocket_port: ws_port,
|
|
||||||
trace: false
|
|
||||||
}))
|
|
||||||
.on('state', function (m) {
|
|
||||||
console.log("STATE: %s", m);
|
|
||||||
|
|
||||||
self.state = m;
|
|
||||||
})
|
|
||||||
// .once('ledger_closed', callback)
|
|
||||||
.connect()
|
|
||||||
;
|
|
||||||
|
|
||||||
self.base = {
|
|
||||||
hostname: ip,
|
|
||||||
port: port,
|
|
||||||
remote: remote,
|
|
||||||
};
|
|
||||||
|
|
||||||
// console.log("SERVE");
|
|
||||||
var server = http.createServer(function (req, res) {
|
|
||||||
var input = "";
|
|
||||||
|
|
||||||
req.setEncoding();
|
|
||||||
|
|
||||||
req.on('data', function (buffer) {
|
|
||||||
// console.log("DATA: %s", buffer);
|
|
||||||
input = input + buffer;
|
|
||||||
});
|
|
||||||
|
|
||||||
req.on('end', function () {
|
|
||||||
// console.log("URL: %s", req.url);
|
|
||||||
// console.log("HEADERS: %s", JSON.stringify(req.headers, undefined, 2));
|
|
||||||
|
|
||||||
var _parsed = url.parse(req.url, true);
|
|
||||||
var _url = JSON.stringify(_parsed, undefined, 2);
|
|
||||||
|
|
||||||
// console.log("HEADERS: %s", JSON.stringify(_parsed, undefined, 2));
|
|
||||||
if (_parsed.pathname === "/account") {
|
|
||||||
var request = remote
|
|
||||||
.request_ledger_entry('account_root')
|
|
||||||
.ledger_index(-1)
|
|
||||||
.account_root(_parsed.query.a)
|
|
||||||
.on('success', function (m) {
|
|
||||||
// console.log("account_root: %s", JSON.stringify(m, undefined, 2));
|
|
||||||
|
|
||||||
augment_object(m, self.base, function() {
|
|
||||||
httpd_response(res,
|
|
||||||
{
|
|
||||||
statusCode: 200,
|
|
||||||
url: _url,
|
|
||||||
body: "<PRE>"
|
|
||||||
+ JSON.stringify(rewrite_object(m, self.base), undefined, 2)
|
|
||||||
+ "</PRE>"
|
|
||||||
});
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.request();
|
|
||||||
|
|
||||||
} else if (_parsed.pathname === "/ledger") {
|
|
||||||
var request = remote
|
|
||||||
.request_ledger(undefined, { expand: true, transactions: true })
|
|
||||||
.on('success', function (m) {
|
|
||||||
// console.log("Ledger: %s", JSON.stringify(m, undefined, 2));
|
|
||||||
|
|
||||||
httpd_response(res,
|
|
||||||
{
|
|
||||||
statusCode: 200,
|
|
||||||
url: _url,
|
|
||||||
body: "<PRE>"
|
|
||||||
+ JSON.stringify(rewrite_object(m, self.base), undefined, 2)
|
|
||||||
+"</PRE>"
|
|
||||||
});
|
|
||||||
})
|
|
||||||
|
|
||||||
if (_parsed.query.l && _parsed.query.l.length === 64) {
|
|
||||||
request.ledger_hash(_parsed.query.l);
|
|
||||||
}
|
|
||||||
else if (_parsed.query.l) {
|
|
||||||
request.ledger_index(Number(_parsed.query.l));
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
request.ledger_index(-1);
|
|
||||||
}
|
|
||||||
|
|
||||||
request.request();
|
|
||||||
|
|
||||||
} else if (_parsed.pathname === "/transaction") {
|
|
||||||
var request = remote
|
|
||||||
.request_tx(_parsed.query.h)
|
|
||||||
// .request_transaction_entry(_parsed.query.h)
|
|
||||||
// .ledger_select(_parsed.query.l)
|
|
||||||
.on('success', function (m) {
|
|
||||||
// console.log("transaction: %s", JSON.stringify(m, undefined, 2));
|
|
||||||
|
|
||||||
httpd_response(res,
|
|
||||||
{
|
|
||||||
statusCode: 200,
|
|
||||||
url: _url,
|
|
||||||
body: "<PRE>"
|
|
||||||
+ JSON.stringify(rewrite_object(m, self.base), undefined, 2)
|
|
||||||
+"</PRE>"
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.on('error', function (m) {
|
|
||||||
httpd_response(res,
|
|
||||||
{
|
|
||||||
statusCode: 200,
|
|
||||||
url: _url,
|
|
||||||
body: "<PRE>"
|
|
||||||
+ 'ERROR: ' + JSON.stringify(m, undefined, 2)
|
|
||||||
+"</PRE>"
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.request();
|
|
||||||
|
|
||||||
} else {
|
|
||||||
var test = build_uri({
|
|
||||||
type: 'account',
|
|
||||||
ledger: 'closed',
|
|
||||||
account: 'rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh',
|
|
||||||
}, self.base);
|
|
||||||
|
|
||||||
httpd_response(res,
|
|
||||||
{
|
|
||||||
statusCode: req.url === "/" ? 200 : 404,
|
|
||||||
url: _url,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
server.listen(port, ip, undefined,
|
|
||||||
function () {
|
|
||||||
console.log("Listening at: http://%s:%s", ip, port);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// vim:sw=2:sts=2:ts=8:et
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
var ripple = require('ripple-lib');
|
|
||||||
|
|
||||||
var v = {
|
|
||||||
seed: "snoPBrXtMeMyMHUVTgbuqAfg1SUTb",
|
|
||||||
addr: "rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh"
|
|
||||||
};
|
|
||||||
|
|
||||||
var remote = ripple.Remote.from_config({
|
|
||||||
"trusted" : true,
|
|
||||||
"websocket_ip" : "127.0.0.1",
|
|
||||||
"websocket_port" : 5006,
|
|
||||||
"websocket_ssl" : false,
|
|
||||||
"local_signing" : true
|
|
||||||
});
|
|
||||||
|
|
||||||
var tx_json = {
|
|
||||||
"Account" : v.addr,
|
|
||||||
"Amount" : "10000000",
|
|
||||||
"Destination" : "rEu2ULPiEQm1BAL8pYzmXnNX1aFX9sCks",
|
|
||||||
"Fee" : "10",
|
|
||||||
"Flags" : 0,
|
|
||||||
"Sequence" : 3,
|
|
||||||
"TransactionType" : "Payment"
|
|
||||||
|
|
||||||
//"SigningPubKey": '0396941B22791A448E5877A44CE98434DB217D6FB97D63F0DAD23BE49ED45173C9'
|
|
||||||
};
|
|
||||||
|
|
||||||
remote.on('connected', function () {
|
|
||||||
var req = remote.request_sign(v.seed, tx_json);
|
|
||||||
req.message.debug_signing = true;
|
|
||||||
req.on('success', function (result) {
|
|
||||||
console.log("SERVER RESULT");
|
|
||||||
console.log(result);
|
|
||||||
|
|
||||||
var sim = {};
|
|
||||||
var tx = remote.transaction();
|
|
||||||
tx.tx_json = tx_json;
|
|
||||||
tx._secret = v.seed;
|
|
||||||
tx.complete();
|
|
||||||
var unsigned = tx.serialize().to_hex();
|
|
||||||
tx.sign();
|
|
||||||
|
|
||||||
sim.tx_blob = tx.serialize().to_hex();
|
|
||||||
sim.tx_json = tx.tx_json;
|
|
||||||
sim.tx_signing_hash = tx.signing_hash().to_hex();
|
|
||||||
sim.tx_unsigned = unsigned;
|
|
||||||
|
|
||||||
console.log("\nLOCAL RESULT");
|
|
||||||
console.log(sim);
|
|
||||||
|
|
||||||
remote.connect(false);
|
|
||||||
});
|
|
||||||
req.on('error', function (err) {
|
|
||||||
if (err.error === "remoteError" && err.remote.error === "srcActNotFound") {
|
|
||||||
console.log("Please fund account "+v.addr+" to run this test.");
|
|
||||||
} else {
|
|
||||||
console.log('error', err);
|
|
||||||
}
|
|
||||||
remote.connect(false);
|
|
||||||
});
|
|
||||||
req.request();
|
|
||||||
|
|
||||||
});
|
|
||||||
remote.connect();
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
#!/usr/bin/node
|
|
||||||
//
|
|
||||||
// Returns a Gravatar style hash as per: http://en.gravatar.com/site/implement/hash/
|
|
||||||
//
|
|
||||||
|
|
||||||
if (3 != process.argv.length) {
|
|
||||||
process.stderr.write("Usage: " + process.argv[1] + " email_address\n\nReturns gravatar style hash.\n");
|
|
||||||
process.exit(1);
|
|
||||||
|
|
||||||
} else {
|
|
||||||
var md5 = require('crypto').createHash('md5');
|
|
||||||
|
|
||||||
md5.update(process.argv[2].trim().toLowerCase());
|
|
||||||
|
|
||||||
process.stdout.write(md5.digest('hex') + "\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
// vim:sw=2:sts=2:ts=8:et
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
#!/usr/bin/node
|
|
||||||
//
|
|
||||||
// This program allows IE 9 ripple-clients to make websocket connections to
|
|
||||||
// rippled using flash. As IE 9 does not have websocket support, this required
|
|
||||||
// if you wish to support IE 9 ripple-clients.
|
|
||||||
//
|
|
||||||
// http://www.lightsphere.com/dev/articles/flash_socket_policy.html
|
|
||||||
//
|
|
||||||
// For better security, be sure to set the Port below to the port of your
|
|
||||||
// [websocket_public_port].
|
|
||||||
//
|
|
||||||
|
|
||||||
var net = require("net"),
|
|
||||||
port = "*",
|
|
||||||
domains = ["*:"+port]; // Domain:Port
|
|
||||||
|
|
||||||
net.createServer(
|
|
||||||
function(socket) {
|
|
||||||
socket.write("<?xml version='1.0' ?>\n");
|
|
||||||
socket.write("<!DOCTYPE cross-domain-policy SYSTEM 'http://www.macromedia.com/xml/dtds/cross-domain-policy.dtd'>\n");
|
|
||||||
socket.write("<cross-domain-policy>\n");
|
|
||||||
domains.forEach(
|
|
||||||
function(domain) {
|
|
||||||
var parts = domain.split(':');
|
|
||||||
socket.write("\t<allow-access-from domain='" + parts[0] + "' to-ports='" + parts[1] + "' />\n");
|
|
||||||
}
|
|
||||||
);
|
|
||||||
socket.write("</cross-domain-policy>\n");
|
|
||||||
socket.end();
|
|
||||||
}
|
|
||||||
).listen(843);
|
|
||||||
@@ -1,150 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# This script generates information about your rippled installation
|
|
||||||
# and system. It can be used to help debug issues that you may face
|
|
||||||
# in your installation. While this script endeavors to not display any
|
|
||||||
# sensitive information, it is recommended that you read the output
|
|
||||||
# before sharing with any third parties.
|
|
||||||
|
|
||||||
|
|
||||||
rippled_exe=/opt/ripple/bin/rippled
|
|
||||||
conf_file=/etc/opt/ripple/rippled.cfg
|
|
||||||
|
|
||||||
while getopts ":e:c:" opt; do
|
|
||||||
case $opt in
|
|
||||||
e)
|
|
||||||
rippled_exe=${OPTARG}
|
|
||||||
;;
|
|
||||||
c)
|
|
||||||
conf_file=${OPTARG}
|
|
||||||
;;
|
|
||||||
\?)
|
|
||||||
echo "Invalid option: -$OPTARG"
|
|
||||||
exit -1
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
tmp_loc=$(mktemp -d --tmpdir ripple_info.XXXXX)
|
|
||||||
chmod 751 ${tmp_loc}
|
|
||||||
awk_prog=${tmp_loc}/cfg.awk
|
|
||||||
summary_out=${tmp_loc}/rippled_info.md
|
|
||||||
printf "# rippled report info\n\n> generated at %s\n" "$(date -R)" > ${summary_out}
|
|
||||||
|
|
||||||
function log_section {
|
|
||||||
printf "\n## %s\n" "$*" >> ${summary_out}
|
|
||||||
|
|
||||||
while read -r l; do
|
|
||||||
echo " $l" >> ${summary_out}
|
|
||||||
done </dev/stdin
|
|
||||||
}
|
|
||||||
|
|
||||||
function join_by {
|
|
||||||
local IFS="$1"; shift; echo "$*";
|
|
||||||
}
|
|
||||||
|
|
||||||
if [[ -f ${conf_file} ]] ; then
|
|
||||||
exclude=( ips ips_fixed node_seed validation_seed validator_token )
|
|
||||||
cleaned_conf=${tmp_loc}/cleaned_rippled_cfg.txt
|
|
||||||
cat << 'EOP' >> ${awk_prog}
|
|
||||||
BEGIN {FS="[[:space:]]*=[[:space:]]*"; skip=0; db_path=""; print > OUT_FILE; split(exl,exa,"|")}
|
|
||||||
/^#/ {next}
|
|
||||||
save==2 && /^[[:space:]]*$/ {next}
|
|
||||||
/^\[.+\]$/ {
|
|
||||||
section=tolower(gensub(/^\[[[:space:]]*([a-zA-Z_]+)[[:space:]]*\]$/, "\\1", "g"))
|
|
||||||
skip = 0
|
|
||||||
for (i in exa) {
|
|
||||||
if (section == exa[i])
|
|
||||||
skip = 1
|
|
||||||
}
|
|
||||||
if (section == "database_path")
|
|
||||||
save = 1
|
|
||||||
}
|
|
||||||
skip==1 {next}
|
|
||||||
save==2 {save=0; db_path=$0}
|
|
||||||
save==1 {save=2}
|
|
||||||
$1 ~ /password/ {$0=$1"=<redacted>"}
|
|
||||||
{print >> OUT_FILE}
|
|
||||||
END {print db_path}
|
|
||||||
EOP
|
|
||||||
|
|
||||||
db=$(\
|
|
||||||
sed -r -e 's/\<s[[:alnum:]]{28}\>/<redactedsecret>/g;s/^[[:space:]]*//;s/[[:space:]]*$//' ${conf_file} |\
|
|
||||||
awk -v OUT_FILE=${cleaned_conf} -v exl="$(join_by '|' "${exclude[@]}")" -f ${awk_prog})
|
|
||||||
rm ${awk_prog}
|
|
||||||
cat ${cleaned_conf} | log_section "cleaned config file"
|
|
||||||
rm ${cleaned_conf}
|
|
||||||
echo "${db}" | log_section "database path"
|
|
||||||
df ${db} | log_section "df: database"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Send output from this script to a log file
|
|
||||||
## this captures any messages
|
|
||||||
## or errors from the script itself
|
|
||||||
|
|
||||||
log_file=${tmp_loc}/get_info.log
|
|
||||||
exec 3>&1 1>>${log_file} 2>&1
|
|
||||||
|
|
||||||
## Send all stdout files to /tmp
|
|
||||||
|
|
||||||
if [[ -x ${rippled_exe} ]] ; then
|
|
||||||
pgrep rippled && \
|
|
||||||
${rippled_exe} --conf ${conf_file} \
|
|
||||||
-- server_info | log_section "server info"
|
|
||||||
fi
|
|
||||||
|
|
||||||
cat /proc/meminfo | log_section "meminfo"
|
|
||||||
cat /proc/swaps | log_section "swap space"
|
|
||||||
ulimit -a | log_section "ulimit"
|
|
||||||
|
|
||||||
if command -v lshw >/dev/null 2>&1 ; then
|
|
||||||
lshw 2>/dev/null | log_section "hardware info"
|
|
||||||
else
|
|
||||||
lscpu > ${tmp_loc}/hw_info.txt
|
|
||||||
hwinfo >> ${tmp_loc}/hw_info.txt
|
|
||||||
lspci >> ${tmp_loc}/hw_info.txt
|
|
||||||
lsblk >> ${tmp_loc}/hw_info.txt
|
|
||||||
cat ${tmp_loc}/hw_info.txt | log_section "hardware info"
|
|
||||||
rm ${tmp_loc}/hw_info.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
if command -v iostat >/dev/null 2>&1 ; then
|
|
||||||
iostat -t -d -x 2 6 | log_section "iostat"
|
|
||||||
fi
|
|
||||||
|
|
||||||
df -h | log_section "free disk space"
|
|
||||||
drives=($(df | awk '$1 ~ /^\/dev\// {print $1}' | xargs -n 1 basename))
|
|
||||||
block_devs=($(ls /sys/block/))
|
|
||||||
for d in "${drives[@]}"; do
|
|
||||||
for dev in "${block_devs[@]}"; do
|
|
||||||
#echo "D: [$d], DEV: [$dev]"
|
|
||||||
if [[ $d =~ $dev ]]; then
|
|
||||||
# this file (if exists) has 0 for SSD and 1 for HDD
|
|
||||||
if [[ "$(cat /sys/block/${dev}/queue/rotational 2>/dev/null)" == 0 ]] ; then
|
|
||||||
echo "${d} : SSD" >> ${tmp_loc}/is_ssd.txt
|
|
||||||
else
|
|
||||||
echo "${d} : NO SSD" >> ${tmp_loc}/is_ssd.txt
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ -f ${tmp_loc}/is_ssd.txt ]] ; then
|
|
||||||
cat ${tmp_loc}/is_ssd.txt | log_section "SSD"
|
|
||||||
rm ${tmp_loc}/is_ssd.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
cat ${log_file} | log_section "script log"
|
|
||||||
|
|
||||||
cat << MSG | tee /dev/fd/3
|
|
||||||
####################################################
|
|
||||||
rippled info has been gathered. Please copy the
|
|
||||||
contents of ${summary_out}
|
|
||||||
to a github gist at https://gist.github.com/
|
|
||||||
|
|
||||||
PLEASE REVIEW THIS FILE FOR ANY SENSITIVE DATA
|
|
||||||
BEFORE POSTING! We have tried our best to omit
|
|
||||||
any sensitive information from this file, but you
|
|
||||||
should verify before posting.
|
|
||||||
####################################################
|
|
||||||
MSG
|
|
||||||
|
|
||||||
@@ -5,7 +5,7 @@ then
|
|||||||
name=$( basename $0 )
|
name=$( basename $0 )
|
||||||
cat <<- USAGE
|
cat <<- USAGE
|
||||||
Usage: $name <username>
|
Usage: $name <username>
|
||||||
|
|
||||||
Where <username> is the Github username of the upstream repo. e.g. XRPLF
|
Where <username> is the Github username of the upstream repo. e.g. XRPLF
|
||||||
USAGE
|
USAGE
|
||||||
exit 0
|
exit 0
|
||||||
@@ -83,4 +83,3 @@ fi
|
|||||||
_run git fetch --jobs=$(nproc) upstreams
|
_run git fetch --jobs=$(nproc) upstreams
|
||||||
|
|
||||||
exit 0
|
exit 0
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ then
|
|||||||
name=$( basename $0 )
|
name=$( basename $0 )
|
||||||
cat <<- USAGE
|
cat <<- USAGE
|
||||||
Usage: $name workbranch base/branch user/branch [user/branch [...]]
|
Usage: $name workbranch base/branch user/branch [user/branch [...]]
|
||||||
|
|
||||||
* workbranch will be created locally from base/branch
|
* workbranch will be created locally from base/branch
|
||||||
* base/branch and user/branch may be specified as user:branch to allow
|
* base/branch and user/branch may be specified as user:branch to allow
|
||||||
easy copying from Github PRs
|
easy copying from Github PRs
|
||||||
@@ -66,4 +66,3 @@ git push $push HEAD:$b
|
|||||||
git fetch $repo
|
git fetch $repo
|
||||||
-------------------------------------------------------------------
|
-------------------------------------------------------------------
|
||||||
PUSH
|
PUSH
|
||||||
|
|
||||||
|
|||||||
@@ -1,23 +0,0 @@
|
|||||||
#!/usr/bin/node
|
|
||||||
//
|
|
||||||
// Returns hex of lowercasing a string.
|
|
||||||
//
|
|
||||||
|
|
||||||
var stringToHex = function (s) {
|
|
||||||
return Array.prototype.map.call(s, function (c) {
|
|
||||||
var b = c.charCodeAt(0);
|
|
||||||
|
|
||||||
return b < 16 ? "0" + b.toString(16) : b.toString(16);
|
|
||||||
}).join("");
|
|
||||||
};
|
|
||||||
|
|
||||||
if (3 != process.argv.length) {
|
|
||||||
process.stderr.write("Usage: " + process.argv[1] + " string\n\nReturns hex of lowercasing string.\n");
|
|
||||||
process.exit(1);
|
|
||||||
|
|
||||||
} else {
|
|
||||||
|
|
||||||
process.stdout.write(stringToHex(process.argv[2].toLowerCase()) + "\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
// vim:sw=2:sts=2:ts=8:et
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
#!/usr/bin/node
|
|
||||||
//
|
|
||||||
// This is a tool to issue JSON-RPC requests from the command line.
|
|
||||||
//
|
|
||||||
// This can be used to test a JSON-RPC server.
|
|
||||||
//
|
|
||||||
// Requires: npm simple-jsonrpc
|
|
||||||
//
|
|
||||||
|
|
||||||
var jsonrpc = require('simple-jsonrpc');
|
|
||||||
|
|
||||||
var program = process.argv[1];
|
|
||||||
|
|
||||||
if (5 !== process.argv.length) {
|
|
||||||
console.log("Usage: %s <URL> <method> <json>", program);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
var url = process.argv[2];
|
|
||||||
var method = process.argv[3];
|
|
||||||
var json_raw = process.argv[4];
|
|
||||||
var json;
|
|
||||||
|
|
||||||
try {
|
|
||||||
json = JSON.parse(json_raw);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
console.log("JSON parse error: %s", e.message);
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
|
|
||||||
var client = jsonrpc.client(url);
|
|
||||||
|
|
||||||
client.call(method, json,
|
|
||||||
function (result) {
|
|
||||||
console.log(JSON.stringify(result, undefined, 2));
|
|
||||||
},
|
|
||||||
function (error) {
|
|
||||||
console.log(JSON.stringify(error, undefined, 2));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// vim:sw=2:sts=2:ts=8:et
|
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
#!/usr/bin/node
|
|
||||||
//
|
|
||||||
// This is a tool to listen for JSON-RPC requests at an IP and port.
|
|
||||||
//
|
|
||||||
// This will report the request to console and echo back the request as the response.
|
|
||||||
//
|
|
||||||
|
|
||||||
var http = require("http");
|
|
||||||
|
|
||||||
var program = process.argv[1];
|
|
||||||
|
|
||||||
if (4 !== process.argv.length) {
|
|
||||||
console.log("Usage: %s <ip> <port>", program);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
var ip = process.argv[2];
|
|
||||||
var port = process.argv[3];
|
|
||||||
|
|
||||||
var server = http.createServer(function (req, res) {
|
|
||||||
console.log("CONNECT");
|
|
||||||
var input = "";
|
|
||||||
|
|
||||||
req.setEncoding();
|
|
||||||
|
|
||||||
req.on('data', function (buffer) {
|
|
||||||
// console.log("DATA: %s", buffer);
|
|
||||||
input = input + buffer;
|
|
||||||
});
|
|
||||||
|
|
||||||
req.on('end', function () {
|
|
||||||
// console.log("END");
|
|
||||||
|
|
||||||
var json_req;
|
|
||||||
|
|
||||||
console.log("URL: %s", req.url);
|
|
||||||
console.log("HEADERS: %s", JSON.stringify(req.headers, undefined, 2));
|
|
||||||
|
|
||||||
try {
|
|
||||||
json_req = JSON.parse(input);
|
|
||||||
|
|
||||||
console.log("REQ: %s", JSON.stringify(json_req, undefined, 2));
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
console.log("BAD JSON: %s", e.message);
|
|
||||||
|
|
||||||
json_req = { error : e.message }
|
|
||||||
}
|
|
||||||
|
|
||||||
res.statusCode = 200;
|
|
||||||
res.end(JSON.stringify({
|
|
||||||
jsonrpc: "2.0",
|
|
||||||
result: { request : json_req },
|
|
||||||
id: req.id
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
|
|
||||||
req.on('close', function () {
|
|
||||||
console.log("CLOSE");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
server.listen(port, ip, undefined,
|
|
||||||
function () {
|
|
||||||
console.log("Listening at: %s:%s", ip, port);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// vim:sw=2:sts=2:ts=8:et
|
|
||||||
218
bin/physical.sh
218
bin/physical.sh
@@ -1,218 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -o errexit
|
|
||||||
|
|
||||||
marker_base=985c80fbc6131f3a8cedd0da7e8af98dfceb13c7
|
|
||||||
marker_commit=${1:-${marker_base}}
|
|
||||||
|
|
||||||
if [ $(git merge-base ${marker_commit} ${marker_base}) != ${marker_base} ]; then
|
|
||||||
echo "first marker commit not an ancestor: ${marker_commit}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ $(git merge-base ${marker_commit} HEAD) != $(git rev-parse --verify ${marker_commit}) ]; then
|
|
||||||
echo "given marker commit not an ancestor: ${marker_commit}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -e Builds/CMake ]; then
|
|
||||||
echo move CMake
|
|
||||||
git mv Builds/CMake cmake
|
|
||||||
git add --update .
|
|
||||||
git commit -m 'Move CMake directory' --author 'Pretty Printer <cpp@ripple.com>'
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -e src/ripple ]; then
|
|
||||||
|
|
||||||
echo move protocol buffers
|
|
||||||
mkdir -p include/xrpl
|
|
||||||
if [ -e src/ripple/proto ]; then
|
|
||||||
git mv src/ripple/proto include/xrpl
|
|
||||||
fi
|
|
||||||
|
|
||||||
extract_list() {
|
|
||||||
git show ${marker_commit}:Builds/CMake/RippledCore.cmake | \
|
|
||||||
awk "/END ${1}/ { p = 0 } p && /src\/ripple/; /BEGIN ${1}/ { p = 1 }" | \
|
|
||||||
sed -e 's#src/ripple/##' -e 's#[^a-z]\+$##'
|
|
||||||
}
|
|
||||||
|
|
||||||
move_files() {
|
|
||||||
oldroot="$1"; shift
|
|
||||||
newroot="$1"; shift
|
|
||||||
detail="$1"; shift
|
|
||||||
files=("$@")
|
|
||||||
for file in ${files[@]}; do
|
|
||||||
if [ ! -e ${oldroot}/${file} ]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
dir=$(dirname ${file})
|
|
||||||
if [ $(basename ${dir}) == 'details' ]; then
|
|
||||||
dir=$(dirname ${dir})
|
|
||||||
fi
|
|
||||||
if [ $(basename ${dir}) == 'impl' ]; then
|
|
||||||
dir="$(dirname ${dir})/${detail}"
|
|
||||||
fi
|
|
||||||
mkdir -p ${newroot}/${dir}
|
|
||||||
git mv ${oldroot}/${file} ${newroot}/${dir}
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
echo move libxrpl headers
|
|
||||||
files=$(extract_list 'LIBXRPL HEADERS')
|
|
||||||
files+=(
|
|
||||||
basics/SlabAllocator.h
|
|
||||||
|
|
||||||
beast/asio/io_latency_probe.h
|
|
||||||
beast/container/aged_container.h
|
|
||||||
beast/container/aged_container_utility.h
|
|
||||||
beast/container/aged_map.h
|
|
||||||
beast/container/aged_multimap.h
|
|
||||||
beast/container/aged_multiset.h
|
|
||||||
beast/container/aged_set.h
|
|
||||||
beast/container/aged_unordered_map.h
|
|
||||||
beast/container/aged_unordered_multimap.h
|
|
||||||
beast/container/aged_unordered_multiset.h
|
|
||||||
beast/container/aged_unordered_set.h
|
|
||||||
beast/container/detail/aged_associative_container.h
|
|
||||||
beast/container/detail/aged_container_iterator.h
|
|
||||||
beast/container/detail/aged_ordered_container.h
|
|
||||||
beast/container/detail/aged_unordered_container.h
|
|
||||||
beast/container/detail/empty_base_optimization.h
|
|
||||||
beast/core/LockFreeStack.h
|
|
||||||
beast/insight/Collector.h
|
|
||||||
beast/insight/Counter.h
|
|
||||||
beast/insight/CounterImpl.h
|
|
||||||
beast/insight/Event.h
|
|
||||||
beast/insight/EventImpl.h
|
|
||||||
beast/insight/Gauge.h
|
|
||||||
beast/insight/GaugeImpl.h
|
|
||||||
beast/insight/Group.h
|
|
||||||
beast/insight/Groups.h
|
|
||||||
beast/insight/Hook.h
|
|
||||||
beast/insight/HookImpl.h
|
|
||||||
beast/insight/Insight.h
|
|
||||||
beast/insight/Meter.h
|
|
||||||
beast/insight/MeterImpl.h
|
|
||||||
beast/insight/NullCollector.h
|
|
||||||
beast/insight/StatsDCollector.h
|
|
||||||
beast/test/fail_counter.h
|
|
||||||
beast/test/fail_stream.h
|
|
||||||
beast/test/pipe_stream.h
|
|
||||||
beast/test/sig_wait.h
|
|
||||||
beast/test/string_iostream.h
|
|
||||||
beast/test/string_istream.h
|
|
||||||
beast/test/string_ostream.h
|
|
||||||
beast/test/test_allocator.h
|
|
||||||
beast/test/yield_to.h
|
|
||||||
beast/utility/hash_pair.h
|
|
||||||
beast/utility/maybe_const.h
|
|
||||||
beast/utility/temp_dir.h
|
|
||||||
|
|
||||||
# included by only json/impl/json_assert.h
|
|
||||||
json/json_errors.h
|
|
||||||
|
|
||||||
protocol/PayChan.h
|
|
||||||
protocol/RippleLedgerHash.h
|
|
||||||
protocol/messages.h
|
|
||||||
protocol/st.h
|
|
||||||
)
|
|
||||||
files+=(
|
|
||||||
basics/README.md
|
|
||||||
crypto/README.md
|
|
||||||
json/README.md
|
|
||||||
protocol/README.md
|
|
||||||
resource/README.md
|
|
||||||
)
|
|
||||||
move_files src/ripple include/xrpl detail ${files[@]}
|
|
||||||
|
|
||||||
echo move libxrpl sources
|
|
||||||
files=$(extract_list 'LIBXRPL SOURCES')
|
|
||||||
move_files src/ripple src/libxrpl "" ${files[@]}
|
|
||||||
|
|
||||||
echo check leftovers
|
|
||||||
dirs=$(cd include/xrpl; ls -d */)
|
|
||||||
dirs=$(cd src/ripple; ls -d ${dirs} 2>/dev/null || true)
|
|
||||||
files="$(cd src/ripple; find ${dirs} -type f)"
|
|
||||||
if [ -n "${files}" ]; then
|
|
||||||
echo "leftover files:"
|
|
||||||
echo ${files}
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo remove empty directories
|
|
||||||
empty_dirs="$(cd src/ripple; find ${dirs} -depth -type d)"
|
|
||||||
for dir in ${empty_dirs[@]}; do
|
|
||||||
if [ -e ${dir} ]; then
|
|
||||||
rmdir ${dir}
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
echo move xrpld sources
|
|
||||||
files=$(
|
|
||||||
extract_list 'XRPLD SOURCES'
|
|
||||||
cd src/ripple
|
|
||||||
find * -regex '.*\.\(h\|ipp\|md\|pu\|uml\|png\)'
|
|
||||||
)
|
|
||||||
move_files src/ripple src/xrpld detail ${files[@]}
|
|
||||||
|
|
||||||
files="$(cd src/ripple; find . -type f)"
|
|
||||||
if [ -n "${files}" ]; then
|
|
||||||
echo "leftover files:"
|
|
||||||
echo ${files}
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -rf src/ripple
|
|
||||||
|
|
||||||
echo rename .hpp to .h
|
|
||||||
find include src -name '*.hpp' -exec bash -c 'f="{}"; git mv "${f}" "${f%hpp}h"' \;
|
|
||||||
|
|
||||||
echo move PerfLog.h
|
|
||||||
if [ -e include/xrpl/basics/PerfLog.h ]; then
|
|
||||||
git mv include/xrpl/basics/PerfLog.h src/xrpld/perflog
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Make sure all protobuf includes have the correct prefix.
|
|
||||||
protobuf_replace='s:^#include\s*["<].*org/xrpl\([^">]\+\)[">]:#include <xrpl/proto/org/xrpl\1>:'
|
|
||||||
# Make sure first-party includes use angle brackets and .h extension.
|
|
||||||
ripple_replace='s:include\s*["<]ripple/\(.*\)\.h\(pp\)\?[">]:include <ripple/\1.h>:'
|
|
||||||
beast_replace='s:include\s*<beast/:include <xrpl/beast/:'
|
|
||||||
# Rename impl directories to detail.
|
|
||||||
impl_rename='s:\(<xrpl.*\)/impl\(/details\)\?/:\1/detail/:'
|
|
||||||
|
|
||||||
echo rewrite includes in libxrpl
|
|
||||||
find include/xrpl src/libxrpl -type f -exec sed -i \
|
|
||||||
-e "${protobuf_replace}" \
|
|
||||||
-e "${ripple_replace}" \
|
|
||||||
-e "${beast_replace}" \
|
|
||||||
-e 's:^#include <ripple/:#include <xrpl/:' \
|
|
||||||
-e "${impl_rename}" \
|
|
||||||
{} +
|
|
||||||
|
|
||||||
echo rewrite includes in xrpld
|
|
||||||
# # https://www.baeldung.com/linux/join-multiple-lines
|
|
||||||
libxrpl_dirs="$(cd include/xrpl; ls -d1 */ | sed 's:/$::')"
|
|
||||||
# libxrpl_dirs='a\nb\nc\n'
|
|
||||||
readarray -t libxrpl_dirs <<< "${libxrpl_dirs}"
|
|
||||||
# libxrpl_dirs=(a b c)
|
|
||||||
libxrpl_dirs=$(printf -v txt '%s\\|' "${libxrpl_dirs[@]}"; echo "${txt%\\|}")
|
|
||||||
# libxrpl_dirs='a\|b\|c'
|
|
||||||
find src/xrpld src/test -type f -exec sed -i \
|
|
||||||
-e "${protobuf_replace}" \
|
|
||||||
-e "${ripple_replace}" \
|
|
||||||
-e "${beast_replace}" \
|
|
||||||
-e "s:^#include <ripple/basics/PerfLog.h>:#include <xrpld/perflog/PerfLog.h>:" \
|
|
||||||
-e "s:^#include <ripple/\(${libxrpl_dirs}\)/:#include <xrpl/\1/:" \
|
|
||||||
-e 's:^#include <ripple/:#include <xrpld/:' \
|
|
||||||
-e "${impl_rename}" \
|
|
||||||
{} +
|
|
||||||
|
|
||||||
git commit -m 'Rearrange sources' --author 'Pretty Printer <cpp@ripple.com>'
|
|
||||||
find include src -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -exec clang-format-10 -i {} +
|
|
||||||
git add --update .
|
|
||||||
git commit -m 'Rewrite includes' --author 'Pretty Printer <cpp@ripple.com>'
|
|
||||||
./Builds/levelization/levelization.sh
|
|
||||||
git add --update .
|
|
||||||
git commit -m 'Recompute loops' --author 'Pretty Printer <cpp@ripple.com>'
|
|
||||||
252
bin/rlint.js
252
bin/rlint.js
@@ -1,252 +0,0 @@
|
|||||||
#!/usr/bin/node
|
|
||||||
|
|
||||||
var async = require('async');
|
|
||||||
var Remote = require('ripple-lib').Remote;
|
|
||||||
var Transaction = require('ripple-lib').Transaction;
|
|
||||||
var UInt160 = require('ripple-lib').UInt160;
|
|
||||||
var Amount = require('ripple-lib').Amount;
|
|
||||||
|
|
||||||
var book_key = function (book) {
|
|
||||||
return book.taker_pays.currency
|
|
||||||
+ ":" + book.taker_pays.issuer
|
|
||||||
+ ":" + book.taker_gets.currency
|
|
||||||
+ ":" + book.taker_gets.issuer;
|
|
||||||
};
|
|
||||||
|
|
||||||
var book_key_cross = function (book) {
|
|
||||||
return book.taker_gets.currency
|
|
||||||
+ ":" + book.taker_gets.issuer
|
|
||||||
+ ":" + book.taker_pays.currency
|
|
||||||
+ ":" + book.taker_pays.issuer;
|
|
||||||
};
|
|
||||||
|
|
||||||
var ledger_verify = function (ledger) {
|
|
||||||
var dir_nodes = ledger.accountState.filter(function (entry) {
|
|
||||||
return entry.LedgerEntryType === 'DirectoryNode' // Only directories
|
|
||||||
&& entry.index === entry.RootIndex // Only root nodes
|
|
||||||
&& 'TakerGetsCurrency' in entry; // Only offer directories
|
|
||||||
});
|
|
||||||
|
|
||||||
var books = {};
|
|
||||||
|
|
||||||
dir_nodes.forEach(function (node) {
|
|
||||||
var book = {
|
|
||||||
taker_gets: {
|
|
||||||
currency: UInt160.from_generic(node.TakerGetsCurrency).to_json(),
|
|
||||||
issuer: UInt160.from_generic(node.TakerGetsIssuer).to_json()
|
|
||||||
},
|
|
||||||
taker_pays: {
|
|
||||||
currency: UInt160.from_generic(node.TakerPaysCurrency).to_json(),
|
|
||||||
issuer: UInt160.from_generic(node.TakerPaysIssuer).to_json()
|
|
||||||
},
|
|
||||||
quality: Amount.from_quality(node.RootIndex),
|
|
||||||
index: node.RootIndex
|
|
||||||
};
|
|
||||||
|
|
||||||
books[book_key(book)] = book;
|
|
||||||
|
|
||||||
// console.log(JSON.stringify(node, undefined, 2));
|
|
||||||
});
|
|
||||||
|
|
||||||
// console.log(JSON.stringify(dir_entry, undefined, 2));
|
|
||||||
console.log("#%s books: %s", ledger.ledger_index, Object.keys(books).length);
|
|
||||||
|
|
||||||
Object.keys(books).forEach(function (key) {
|
|
||||||
var book = books[key];
|
|
||||||
var key_cross = book_key_cross(book);
|
|
||||||
var book_cross = books[key_cross];
|
|
||||||
|
|
||||||
if (book && book_cross && !book_cross.done)
|
|
||||||
{
|
|
||||||
var book_cross_quality_inverted = Amount.from_json("1.0/1/1").divide(book_cross.quality);
|
|
||||||
|
|
||||||
if (book_cross_quality_inverted.compareTo(book.quality) >= 0)
|
|
||||||
{
|
|
||||||
// Crossing books
|
|
||||||
console.log("crossing: #%s :: %s :: %s :: %s :: %s :: %s :: %s", ledger.ledger_index, key, book.quality.to_text(), book_cross.quality.to_text(), book_cross_quality_inverted.to_text(),
|
|
||||||
book.index, book_cross.index);
|
|
||||||
}
|
|
||||||
|
|
||||||
book_cross.done = true;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
var ripple_selfs = {};
|
|
||||||
|
|
||||||
var accounts = {};
|
|
||||||
var counts = {};
|
|
||||||
|
|
||||||
ledger.accountState.forEach(function (entry) {
|
|
||||||
if (entry.LedgerEntryType === 'Offer')
|
|
||||||
{
|
|
||||||
counts[entry.Account] = (counts[entry.Account] || 0) + 1;
|
|
||||||
}
|
|
||||||
else if (entry.LedgerEntryType === 'RippleState')
|
|
||||||
{
|
|
||||||
if (entry.Flags & (0x10000 | 0x40000))
|
|
||||||
{
|
|
||||||
counts[entry.LowLimit.issuer] = (counts[entry.LowLimit.issuer] || 0) + 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (entry.Flags & (0x20000 | 0x80000))
|
|
||||||
{
|
|
||||||
counts[entry.HighLimit.issuer] = (counts[entry.HighLimit.issuer] || 0) + 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (entry.HighLimit.issuer === entry.LowLimit.issuer)
|
|
||||||
ripple_selfs[entry.Account] = entry;
|
|
||||||
}
|
|
||||||
else if (entry.LedgerEntryType == 'AccountRoot')
|
|
||||||
{
|
|
||||||
accounts[entry.Account] = entry;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
var low = 0; // Accounts with too low a count.
|
|
||||||
var high = 0;
|
|
||||||
var missing_accounts = 0; // Objects with no referencing account.
|
|
||||||
var missing_objects = 0; // Accounts specifying an object but having none.
|
|
||||||
|
|
||||||
Object.keys(counts).forEach(function (account) {
|
|
||||||
if (account in accounts)
|
|
||||||
{
|
|
||||||
if (counts[account] !== accounts[account].OwnerCount)
|
|
||||||
{
|
|
||||||
if (counts[account] < accounts[account].OwnerCount)
|
|
||||||
{
|
|
||||||
high += 1;
|
|
||||||
console.log("%s: high count %s/%s", account, counts[account], accounts[account].OwnerCount);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
low += 1;
|
|
||||||
console.log("%s: low count %s/%s", account, counts[account], accounts[account].OwnerCount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
missing_accounts += 1;
|
|
||||||
|
|
||||||
console.log("%s: missing : count %s", account, counts[account]);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
Object.keys(accounts).forEach(function (account) {
|
|
||||||
if (!('OwnerCount' in accounts[account]))
|
|
||||||
{
|
|
||||||
console.log("%s: bad entry : %s", account, JSON.stringify(accounts[account], undefined, 2));
|
|
||||||
}
|
|
||||||
else if (!(account in counts) && accounts[account].OwnerCount)
|
|
||||||
{
|
|
||||||
missing_objects += 1;
|
|
||||||
|
|
||||||
console.log("%s: no objects : %s/%s", account, 0, accounts[account].OwnerCount);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (low)
|
|
||||||
console.log("counts too low = %s", low);
|
|
||||||
|
|
||||||
if (high)
|
|
||||||
console.log("counts too high = %s", high);
|
|
||||||
|
|
||||||
if (missing_objects)
|
|
||||||
console.log("missing_objects = %s", missing_objects);
|
|
||||||
|
|
||||||
if (missing_accounts)
|
|
||||||
console.log("missing_accounts = %s", missing_accounts);
|
|
||||||
|
|
||||||
if (Object.keys(ripple_selfs).length)
|
|
||||||
console.log("RippleState selfs = %s", Object.keys(ripple_selfs).length);
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
var ledger_request = function (remote, ledger_index, done) {
|
|
||||||
remote.request_ledger(undefined, {
|
|
||||||
accounts: true,
|
|
||||||
expand: true,
|
|
||||||
})
|
|
||||||
.ledger_index(ledger_index)
|
|
||||||
.on('success', function (m) {
|
|
||||||
// console.log("ledger: ", ledger_index);
|
|
||||||
// console.log("ledger: ", JSON.stringify(m, undefined, 2));
|
|
||||||
done(m.ledger);
|
|
||||||
})
|
|
||||||
.on('error', function (m) {
|
|
||||||
console.log("error");
|
|
||||||
done();
|
|
||||||
})
|
|
||||||
.request();
|
|
||||||
};
|
|
||||||
|
|
||||||
var usage = function () {
|
|
||||||
console.log("rlint.js _websocket_ip_ _websocket_port_ ");
|
|
||||||
};
|
|
||||||
|
|
||||||
var finish = function (remote) {
|
|
||||||
remote.disconnect();
|
|
||||||
|
|
||||||
// XXX Because remote.disconnect() doesn't work:
|
|
||||||
process.exit();
|
|
||||||
};
|
|
||||||
|
|
||||||
console.log("args: ", process.argv.length);
|
|
||||||
console.log("args: ", process.argv);
|
|
||||||
|
|
||||||
if (process.argv.length < 4) {
|
|
||||||
usage();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
var remote = Remote.from_config({
|
|
||||||
websocket_ip: process.argv[2],
|
|
||||||
websocket_port: process.argv[3],
|
|
||||||
})
|
|
||||||
.once('ledger_closed', function (m) {
|
|
||||||
console.log("ledger_closed: ", JSON.stringify(m, undefined, 2));
|
|
||||||
|
|
||||||
if (process.argv.length === 5) {
|
|
||||||
var ledger_index = process.argv[4];
|
|
||||||
|
|
||||||
ledger_request(remote, ledger_index, function (l) {
|
|
||||||
if (l) {
|
|
||||||
ledger_verify(l);
|
|
||||||
}
|
|
||||||
|
|
||||||
finish(remote);
|
|
||||||
});
|
|
||||||
|
|
||||||
} else if (process.argv.length === 6) {
|
|
||||||
var ledger_start = Number(process.argv[4]);
|
|
||||||
var ledger_end = Number(process.argv[5]);
|
|
||||||
var ledger_cursor = ledger_end;
|
|
||||||
|
|
||||||
async.whilst(
|
|
||||||
function () {
|
|
||||||
return ledger_start <= ledger_cursor && ledger_cursor <=ledger_end;
|
|
||||||
},
|
|
||||||
function (callback) {
|
|
||||||
// console.log(ledger_cursor);
|
|
||||||
|
|
||||||
ledger_request(remote, ledger_cursor, function (l) {
|
|
||||||
if (l) {
|
|
||||||
ledger_verify(l);
|
|
||||||
}
|
|
||||||
|
|
||||||
--ledger_cursor;
|
|
||||||
|
|
||||||
callback();
|
|
||||||
});
|
|
||||||
},
|
|
||||||
function (error) {
|
|
||||||
finish(remote);
|
|
||||||
});
|
|
||||||
|
|
||||||
} else {
|
|
||||||
finish(remote);
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.connect();
|
|
||||||
}
|
|
||||||
|
|
||||||
// vim:sw=2:sts=2:ts=8:et
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -exu
|
|
||||||
|
|
||||||
: ${TRAVIS_BUILD_DIR:=""}
|
|
||||||
: ${VCPKG_DIR:=".vcpkg"}
|
|
||||||
export VCPKG_ROOT=${VCPKG_DIR}
|
|
||||||
: ${VCPKG_DEFAULT_TRIPLET:="x64-windows-static"}
|
|
||||||
|
|
||||||
export VCPKG_DEFAULT_TRIPLET
|
|
||||||
|
|
||||||
EXE="vcpkg"
|
|
||||||
if [[ -z ${COMSPEC:-} ]]; then
|
|
||||||
EXE="${EXE}.exe"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -d "${VCPKG_DIR}" && -x "${VCPKG_DIR}/${EXE}" && -d "${VCPKG_DIR}/installed" ]] ; then
|
|
||||||
echo "Using cached vcpkg at ${VCPKG_DIR}"
|
|
||||||
${VCPKG_DIR}/${EXE} list
|
|
||||||
else
|
|
||||||
if [[ -d "${VCPKG_DIR}" ]] ; then
|
|
||||||
rm -rf "${VCPKG_DIR}"
|
|
||||||
fi
|
|
||||||
git clone --branch 2021.04.30 https://github.com/Microsoft/vcpkg.git ${VCPKG_DIR}
|
|
||||||
pushd ${VCPKG_DIR}
|
|
||||||
BSARGS=()
|
|
||||||
if [[ "$(uname)" == "Darwin" ]] ; then
|
|
||||||
BSARGS+=(--allowAppleClang)
|
|
||||||
fi
|
|
||||||
if [[ -z ${COMSPEC:-} ]]; then
|
|
||||||
chmod +x ./bootstrap-vcpkg.sh
|
|
||||||
time ./bootstrap-vcpkg.sh "${BSARGS[@]}"
|
|
||||||
else
|
|
||||||
time ./bootstrap-vcpkg.bat
|
|
||||||
fi
|
|
||||||
popd
|
|
||||||
fi
|
|
||||||
|
|
||||||
# TODO: bring boost in this way as well ?
|
|
||||||
# NOTE: can pin specific ports to a commit/version like this:
|
|
||||||
# git checkout <SOME COMMIT HASH> ports/boost
|
|
||||||
if [ $# -eq 0 ]; then
|
|
||||||
echo "No extra packages specified..."
|
|
||||||
PKGS=()
|
|
||||||
else
|
|
||||||
PKGS=( "$@" )
|
|
||||||
fi
|
|
||||||
for LIB in "${PKGS[@]}"; do
|
|
||||||
time ${VCPKG_DIR}/${EXE} --clean-after-build install ${LIB}
|
|
||||||
done
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
|
|
||||||
# NOTE: must be sourced from a shell so it can export vars
|
|
||||||
|
|
||||||
cat << BATCH > ./getenv.bat
|
|
||||||
CALL %*
|
|
||||||
ENV
|
|
||||||
BATCH
|
|
||||||
|
|
||||||
while read line ; do
|
|
||||||
IFS='"' read x path arg <<<"${line}"
|
|
||||||
if [ -f "${path}" ] ; then
|
|
||||||
echo "FOUND: $path"
|
|
||||||
export VCINSTALLDIR=$(./getenv.bat "${path}" ${arg} | grep "^VCINSTALLDIR=" | sed -E "s/^VCINSTALLDIR=//g")
|
|
||||||
if [ "${VCINSTALLDIR}" != "" ] ; then
|
|
||||||
echo "USING ${VCINSTALLDIR}"
|
|
||||||
export LIB=$(./getenv.bat "${path}" ${arg} | grep "^LIB=" | sed -E "s/^LIB=//g")
|
|
||||||
export LIBPATH=$(./getenv.bat "${path}" ${arg} | grep "^LIBPATH=" | sed -E "s/^LIBPATH=//g")
|
|
||||||
export INCLUDE=$(./getenv.bat "${path}" ${arg} | grep "^INCLUDE=" | sed -E "s/^INCLUDE=//g")
|
|
||||||
ADDPATH=$(./getenv.bat "${path}" ${arg} | grep "^PATH=" | sed -E "s/^PATH=//g")
|
|
||||||
export PATH="${ADDPATH}:${PATH}"
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
done <<EOL
|
|
||||||
"C:/Program Files (x86)/Microsoft Visual Studio/2019/BuildTools/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
|
||||||
"C:/Program Files (x86)/Microsoft Visual Studio/2019/Community/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
|
||||||
"C:/Program Files (x86)/Microsoft Visual Studio/2017/BuildTools/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
|
||||||
"C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
|
|
||||||
"C:/Program Files (x86)/Microsoft Visual Studio 15.0/VC/vcvarsall.bat" amd64
|
|
||||||
"C:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/vcvarsall.bat" amd64
|
|
||||||
"C:/Program Files (x86)/Microsoft Visual Studio 13.0/VC/vcvarsall.bat" amd64
|
|
||||||
"C:/Program Files (x86)/Microsoft Visual Studio 12.0/VC/vcvarsall.bat" amd64
|
|
||||||
EOL
|
|
||||||
# TODO: update the list above as needed to support newer versions of msvc tools
|
|
||||||
|
|
||||||
rm -f getenv.bat
|
|
||||||
|
|
||||||
if [ "${VCINSTALLDIR}" = "" ] ; then
|
|
||||||
echo "No compatible visual studio found!"
|
|
||||||
fi
|
|
||||||
@@ -1,246 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
"""A script to test rippled in an infinite loop of start-sync-stop.
|
|
||||||
|
|
||||||
- Requires Python 3.7+.
|
|
||||||
- Can be stopped with SIGINT.
|
|
||||||
- Has no dependencies outside the standard library.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
assert sys.version_info.major == 3 and sys.version_info.minor >= 7
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import asyncio
|
|
||||||
import configparser
|
|
||||||
import contextlib
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
import platform
|
|
||||||
import subprocess
|
|
||||||
import time
|
|
||||||
import urllib.error
|
|
||||||
import urllib.request
|
|
||||||
|
|
||||||
# Enable asynchronous subprocesses on Windows. The default changed in 3.8.
|
|
||||||
# https://docs.python.org/3.7/library/asyncio-platforms.html#subprocess-support-on-windows
|
|
||||||
if (platform.system() == 'Windows' and sys.version_info.major == 3
|
|
||||||
and sys.version_info.minor < 8):
|
|
||||||
asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
|
|
||||||
|
|
||||||
DEFAULT_EXE = 'rippled'
|
|
||||||
DEFAULT_CONFIGURATION_FILE = 'rippled.cfg'
|
|
||||||
# Number of seconds to wait before forcefully terminating.
|
|
||||||
PATIENCE = 120
|
|
||||||
# Number of contiguous seconds in a sync state to be considered synced.
|
|
||||||
DEFAULT_SYNC_DURATION = 60
|
|
||||||
# Number of seconds between polls of state.
|
|
||||||
DEFAULT_POLL_INTERVAL = 5
|
|
||||||
SYNC_STATES = ('full', 'validating', 'proposing')
|
|
||||||
|
|
||||||
|
|
||||||
def read_config(config_file):
|
|
||||||
# strict = False: Allow duplicate keys, e.g. [rpc_startup].
|
|
||||||
# allow_no_value = True: Allow keys with no values. Generally, these
|
|
||||||
# instances use the "key" as the value, and the section name is the key,
|
|
||||||
# e.g. [debug_logfile].
|
|
||||||
# delimiters = ('='): Allow ':' as a character in Windows paths. Some of
|
|
||||||
# our "keys" are actually values, and we don't want to split them on ':'.
|
|
||||||
config = configparser.ConfigParser(
|
|
||||||
strict=False,
|
|
||||||
allow_no_value=True,
|
|
||||||
delimiters=('='),
|
|
||||||
)
|
|
||||||
config.read(config_file)
|
|
||||||
return config
|
|
||||||
|
|
||||||
|
|
||||||
def to_list(value, separator=','):
|
|
||||||
"""Parse a list from a delimited string value."""
|
|
||||||
return [s.strip() for s in value.split(separator) if s]
|
|
||||||
|
|
||||||
|
|
||||||
def find_log_file(config_file):
|
|
||||||
"""Try to figure out what log file the user has chosen. Raises all kinds
|
|
||||||
of exceptions if there is any possibility of ambiguity."""
|
|
||||||
config = read_config(config_file)
|
|
||||||
values = list(config['debug_logfile'].keys())
|
|
||||||
if len(values) < 1:
|
|
||||||
raise ValueError(
|
|
||||||
f'no [debug_logfile] in configuration file: {config_file}')
|
|
||||||
if len(values) > 1:
|
|
||||||
raise ValueError(
|
|
||||||
f'too many [debug_logfile] in configuration file: {config_file}')
|
|
||||||
return values[0]
|
|
||||||
|
|
||||||
|
|
||||||
def find_http_port(config_file):
|
|
||||||
config = read_config(config_file)
|
|
||||||
names = list(config['server'].keys())
|
|
||||||
for name in names:
|
|
||||||
server = config[name]
|
|
||||||
if 'http' in to_list(server.get('protocol', '')):
|
|
||||||
return int(server['port'])
|
|
||||||
raise ValueError(f'no server in [server] for "http" protocol')
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.asynccontextmanager
|
|
||||||
async def rippled(exe=DEFAULT_EXE, config_file=DEFAULT_CONFIGURATION_FILE):
|
|
||||||
"""A context manager for a rippled process."""
|
|
||||||
# Start the server.
|
|
||||||
process = await asyncio.create_subprocess_exec(
|
|
||||||
str(exe),
|
|
||||||
'--conf',
|
|
||||||
str(config_file),
|
|
||||||
stdout=subprocess.DEVNULL,
|
|
||||||
stderr=subprocess.DEVNULL,
|
|
||||||
)
|
|
||||||
logging.info(f'rippled started with pid {process.pid}')
|
|
||||||
try:
|
|
||||||
yield process
|
|
||||||
finally:
|
|
||||||
# Ask it to stop.
|
|
||||||
logging.info(f'asking rippled (pid: {process.pid}) to stop')
|
|
||||||
start = time.time()
|
|
||||||
process.terminate()
|
|
||||||
|
|
||||||
# Wait nicely.
|
|
||||||
try:
|
|
||||||
await asyncio.wait_for(process.wait(), PATIENCE)
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
# Ask the operating system to kill it.
|
|
||||||
logging.warning(f'killing rippled ({process.pid})')
|
|
||||||
try:
|
|
||||||
process.kill()
|
|
||||||
except ProcessLookupError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
code = await process.wait()
|
|
||||||
end = time.time()
|
|
||||||
logging.info(
|
|
||||||
f'rippled stopped after {end - start:.1f} seconds with code {code}'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def sync(
|
|
||||||
port,
|
|
||||||
*,
|
|
||||||
duration=DEFAULT_SYNC_DURATION,
|
|
||||||
interval=DEFAULT_POLL_INTERVAL,
|
|
||||||
):
|
|
||||||
"""Poll rippled on an interval until it has been synced for a duration."""
|
|
||||||
start = time.perf_counter()
|
|
||||||
while (time.perf_counter() - start) < duration:
|
|
||||||
await asyncio.sleep(interval)
|
|
||||||
|
|
||||||
request = urllib.request.Request(
|
|
||||||
f'http://127.0.0.1:{port}',
|
|
||||||
data=json.dumps({
|
|
||||||
'method': 'server_state'
|
|
||||||
}).encode(),
|
|
||||||
headers={'Content-Type': 'application/json'},
|
|
||||||
)
|
|
||||||
with urllib.request.urlopen(request) as response:
|
|
||||||
try:
|
|
||||||
body = json.loads(response.read())
|
|
||||||
except urllib.error.HTTPError as cause:
|
|
||||||
logging.warning(f'server_state returned not JSON: {cause}')
|
|
||||||
start = time.perf_counter()
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
state = body['result']['state']['server_state']
|
|
||||||
except KeyError as cause:
|
|
||||||
logging.warning(f'server_state response missing key: {cause.key}')
|
|
||||||
start = time.perf_counter()
|
|
||||||
continue
|
|
||||||
logging.info(f'server_state: {state}')
|
|
||||||
if state not in SYNC_STATES:
|
|
||||||
# Require a contiguous sync state.
|
|
||||||
start = time.perf_counter()
|
|
||||||
|
|
||||||
|
|
||||||
async def loop(test,
|
|
||||||
*,
|
|
||||||
exe=DEFAULT_EXE,
|
|
||||||
config_file=DEFAULT_CONFIGURATION_FILE):
|
|
||||||
"""
|
|
||||||
Start-test-stop rippled in an infinite loop.
|
|
||||||
|
|
||||||
Moves log to a different file after each iteration.
|
|
||||||
"""
|
|
||||||
log_file = find_log_file(config_file)
|
|
||||||
id = 0
|
|
||||||
while True:
|
|
||||||
logging.info(f'iteration: {id}')
|
|
||||||
async with rippled(exe, config_file) as process:
|
|
||||||
start = time.perf_counter()
|
|
||||||
exited = asyncio.create_task(process.wait())
|
|
||||||
tested = asyncio.create_task(test())
|
|
||||||
# Try to sync as long as the process is running.
|
|
||||||
done, pending = await asyncio.wait(
|
|
||||||
{exited, tested},
|
|
||||||
return_when=asyncio.FIRST_COMPLETED,
|
|
||||||
)
|
|
||||||
if done == {exited}:
|
|
||||||
code = exited.result()
|
|
||||||
logging.warning(
|
|
||||||
f'server halted for unknown reason with code {code}')
|
|
||||||
else:
|
|
||||||
assert done == {tested}
|
|
||||||
assert tested.exception() is None
|
|
||||||
end = time.perf_counter()
|
|
||||||
logging.info(f'synced after {end - start:.0f} seconds')
|
|
||||||
os.replace(log_file, f'debug.{id}.log')
|
|
||||||
id += 1
|
|
||||||
|
|
||||||
|
|
||||||
logging.basicConfig(
|
|
||||||
format='%(asctime)s %(levelname)-8s %(message)s',
|
|
||||||
level=logging.INFO,
|
|
||||||
datefmt='%Y-%m-%d %H:%M:%S',
|
|
||||||
)
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
|
||||||
parser.add_argument(
|
|
||||||
'rippled',
|
|
||||||
type=Path,
|
|
||||||
nargs='?',
|
|
||||||
default=DEFAULT_EXE,
|
|
||||||
help='Path to rippled.',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'--conf',
|
|
||||||
type=Path,
|
|
||||||
default=DEFAULT_CONFIGURATION_FILE,
|
|
||||||
help='Path to configuration file.',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'--duration',
|
|
||||||
type=int,
|
|
||||||
default=DEFAULT_SYNC_DURATION,
|
|
||||||
help='Number of contiguous seconds required in a synchronized state.',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'--interval',
|
|
||||||
type=int,
|
|
||||||
default=DEFAULT_POLL_INTERVAL,
|
|
||||||
help='Number of seconds to wait between polls of state.',
|
|
||||||
)
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
port = find_http_port(args.conf)
|
|
||||||
|
|
||||||
|
|
||||||
def test():
|
|
||||||
return sync(port, duration=args.duration, interval=args.interval)
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
asyncio.run(loop(test, exe=args.rippled, config_file=args.conf))
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
# Squelch the message. This is a normal mode of exit.
|
|
||||||
pass
|
|
||||||
133
bin/stop-test.js
133
bin/stop-test.js
@@ -1,133 +0,0 @@
|
|||||||
/* -------------------------------- REQUIRES -------------------------------- */
|
|
||||||
|
|
||||||
var child = require("child_process");
|
|
||||||
var assert = require("assert");
|
|
||||||
|
|
||||||
/* --------------------------------- CONFIG --------------------------------- */
|
|
||||||
|
|
||||||
if (process.argv[2] == null) {
|
|
||||||
[
|
|
||||||
'Usage: ',
|
|
||||||
'',
|
|
||||||
' `node bin/stop-test.js i,j [rippled_path] [rippled_conf]`',
|
|
||||||
'',
|
|
||||||
' Launch rippled and stop it after n seconds for all n in [i, j}',
|
|
||||||
' For all even values of n launch rippled with `--fg`',
|
|
||||||
' For values of n where n % 3 == 0 launch rippled with `--fg`\n',
|
|
||||||
'Examples: ',
|
|
||||||
'',
|
|
||||||
' $ node bin/stop-test.js 5,10',
|
|
||||||
(' $ node bin/stop-test.js 1,4 ' +
|
|
||||||
'build/clang.debug/rippled $HOME/.confs/rippled.cfg')
|
|
||||||
]
|
|
||||||
.forEach(function(l){console.log(l)});
|
|
||||||
|
|
||||||
process.exit();
|
|
||||||
} else {
|
|
||||||
var testRange = process.argv[2].split(',').map(Number);
|
|
||||||
var rippledPath = process.argv[3] || 'build/rippled'
|
|
||||||
var rippledConf = process.argv[4] || 'rippled.cfg'
|
|
||||||
}
|
|
||||||
|
|
||||||
var options = {
|
|
||||||
env: process.env,
|
|
||||||
stdio: 'ignore' // we could dump the child io when it fails abnormally
|
|
||||||
};
|
|
||||||
|
|
||||||
// default args
|
|
||||||
var conf_args = ['--conf='+rippledConf];
|
|
||||||
var start_args = conf_args.concat([/*'--net'*/])
|
|
||||||
var stop_args = conf_args.concat(['stop']);
|
|
||||||
|
|
||||||
/* --------------------------------- HELPERS -------------------------------- */
|
|
||||||
|
|
||||||
function start(args) {
|
|
||||||
return child.spawn(rippledPath, args, options);
|
|
||||||
}
|
|
||||||
function stop(rippled) { child.execFile(rippledPath, stop_args, options)}
|
|
||||||
function secs_l8r(ms, f) {setTimeout(f, ms * 1000); }
|
|
||||||
|
|
||||||
function show_results_and_exit(results) {
|
|
||||||
console.log(JSON.stringify(results, undefined, 2));
|
|
||||||
process.exit();
|
|
||||||
}
|
|
||||||
|
|
||||||
var timeTakes = function (range) {
|
|
||||||
function sumRange(n) {return (n+1) * n /2}
|
|
||||||
var ret = sumRange(range[1]);
|
|
||||||
if (range[0] > 1) {
|
|
||||||
ret = ret - sumRange(range[0] - 1)
|
|
||||||
}
|
|
||||||
var stopping = (range[1] - range[0]) * 0.5;
|
|
||||||
return ret + stopping;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ---------------------------------- TEST ---------------------------------- */
|
|
||||||
|
|
||||||
console.log("Test will take ~%s seconds", timeTakes(testRange));
|
|
||||||
|
|
||||||
(function oneTest(n /* seconds */, results) {
|
|
||||||
if (n >= testRange[1]) {
|
|
||||||
// show_results_and_exit(results);
|
|
||||||
console.log(JSON.stringify(results, undefined, 2));
|
|
||||||
oneTest(testRange[0], []);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
var args = start_args;
|
|
||||||
if (n % 2 == 0) {args = args.concat(['--fg'])}
|
|
||||||
if (n % 3 == 0) {args = args.concat(['--net'])}
|
|
||||||
|
|
||||||
var result = {args: args, alive_for: n};
|
|
||||||
results.push(result);
|
|
||||||
|
|
||||||
console.log("\nLaunching `%s` with `%s` for %d seconds",
|
|
||||||
rippledPath, JSON.stringify(args), n);
|
|
||||||
|
|
||||||
rippled = start(args);
|
|
||||||
console.log("Rippled pid: %d", rippled.pid);
|
|
||||||
|
|
||||||
// defaults
|
|
||||||
var b4StopSent = false;
|
|
||||||
var stopSent = false;
|
|
||||||
var stop_took = null;
|
|
||||||
|
|
||||||
rippled.once('exit', function(){
|
|
||||||
if (!stopSent && !b4StopSent) {
|
|
||||||
console.warn('\nRippled exited itself b4 stop issued');
|
|
||||||
process.exit();
|
|
||||||
};
|
|
||||||
|
|
||||||
// The io handles close AFTER exit, may have implications for
|
|
||||||
// `stdio:'inherit'` option to `child.spawn`.
|
|
||||||
rippled.once('close', function() {
|
|
||||||
result.stop_took = (+new Date() - stop_took) / 1000; // seconds
|
|
||||||
console.log("Stopping after %d seconds took %s seconds",
|
|
||||||
n, result.stop_took);
|
|
||||||
oneTest(n+1, results);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
secs_l8r(n, function(){
|
|
||||||
console.log("Stopping rippled after %d seconds", n);
|
|
||||||
|
|
||||||
// possible race here ?
|
|
||||||
// seems highly unlikely, but I was having issues at one point
|
|
||||||
b4StopSent=true;
|
|
||||||
stop_took = (+new Date());
|
|
||||||
// when does `exit` actually get sent?
|
|
||||||
stop();
|
|
||||||
stopSent=true;
|
|
||||||
|
|
||||||
// Sometimes we want to attach with a debugger.
|
|
||||||
if (process.env.ABORT_TESTS_ON_STALL != null) {
|
|
||||||
// We wait 30 seconds, and if it hasn't stopped, we abort the process
|
|
||||||
secs_l8r(30, function() {
|
|
||||||
if (result.stop_took == null) {
|
|
||||||
console.log("rippled has stalled");
|
|
||||||
process.exit();
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}(testRange[0], []));
|
|
||||||
@@ -1,119 +0,0 @@
|
|||||||
/**
|
|
||||||
* bin/update_bintypes.js
|
|
||||||
*
|
|
||||||
* This unholy abomination of a script generates the JavaScript file
|
|
||||||
* src/js/bintypes.js from various parts of the C++ source code.
|
|
||||||
*
|
|
||||||
* This should *NOT* be part of any automatic build process unless the C++
|
|
||||||
* source data are brought into a more easily parseable format. Until then,
|
|
||||||
* simply run this script manually and fix as needed.
|
|
||||||
*/
|
|
||||||
|
|
||||||
// XXX: Process LedgerFormats.(h|cpp) as well.
|
|
||||||
|
|
||||||
var filenameProto = __dirname + '/../src/cpp/ripple/SerializeProto.h',
|
|
||||||
filenameTxFormatsH = __dirname + '/../src/cpp/ripple/TransactionFormats.h',
|
|
||||||
filenameTxFormats = __dirname + '/../src/cpp/ripple/TransactionFormats.cpp';
|
|
||||||
|
|
||||||
var fs = require('fs');
|
|
||||||
|
|
||||||
var output = [];
|
|
||||||
|
|
||||||
// Stage 1: Get the field types and codes from SerializeProto.h
|
|
||||||
var types = {},
|
|
||||||
fields = {};
|
|
||||||
String(fs.readFileSync(filenameProto)).split('\n').forEach(function (line) {
|
|
||||||
line = line.replace(/^\s+|\s+$/g, '').replace(/\s+/g, '');
|
|
||||||
if (!line.length || line.slice(0, 2) === '//' || line.slice(-1) !== ')') return;
|
|
||||||
|
|
||||||
var tmp = line.slice(0, -1).split('('),
|
|
||||||
type = tmp[0],
|
|
||||||
opts = tmp[1].split(',');
|
|
||||||
|
|
||||||
if (type === 'TYPE') types[opts[1]] = [opts[0], +opts[2]];
|
|
||||||
else if (type === 'FIELD') fields[opts[0]] = [types[opts[1]][0], +opts[2]];
|
|
||||||
});
|
|
||||||
|
|
||||||
output.push('var ST = require("./serializedtypes");');
|
|
||||||
output.push('');
|
|
||||||
output.push('var REQUIRED = exports.REQUIRED = 0,');
|
|
||||||
output.push(' OPTIONAL = exports.OPTIONAL = 1,');
|
|
||||||
output.push(' DEFAULT = exports.DEFAULT = 2;');
|
|
||||||
output.push('');
|
|
||||||
|
|
||||||
function pad(s, n) { while (s.length < n) s += ' '; return s; }
|
|
||||||
function padl(s, n) { while (s.length < n) s = ' '+s; return s; }
|
|
||||||
|
|
||||||
Object.keys(types).forEach(function (type) {
|
|
||||||
output.push(pad('ST.'+types[type][0]+'.id', 25) + ' = '+types[type][1]+';');
|
|
||||||
});
|
|
||||||
output.push('');
|
|
||||||
|
|
||||||
// Stage 2: Get the transaction type IDs from TransactionFormats.h
|
|
||||||
var ttConsts = {};
|
|
||||||
String(fs.readFileSync(filenameTxFormatsH)).split('\n').forEach(function (line) {
|
|
||||||
var regex = /tt([A-Z_]+)\s+=\s+([0-9-]+)/;
|
|
||||||
var match = line.match(regex);
|
|
||||||
if (match) ttConsts[match[1]] = +match[2];
|
|
||||||
});
|
|
||||||
|
|
||||||
// Stage 3: Get the transaction formats from TransactionFormats.cpp
|
|
||||||
var base = [],
|
|
||||||
sections = [],
|
|
||||||
current = base;
|
|
||||||
String(fs.readFileSync(filenameTxFormats)).split('\n').forEach(function (line) {
|
|
||||||
line = line.replace(/^\s+|\s+$/g, '').replace(/\s+/g, '');
|
|
||||||
|
|
||||||
var d_regex = /DECLARE_TF\(([A-Za-z]+),tt([A-Z_]+)/;
|
|
||||||
var d_match = line.match(d_regex);
|
|
||||||
|
|
||||||
var s_regex = /SOElement\(sf([a-z]+),SOE_(REQUIRED|OPTIONAL|DEFAULT)/i;
|
|
||||||
var s_match = line.match(s_regex);
|
|
||||||
|
|
||||||
if (d_match) sections.push(current = [d_match[1], ttConsts[d_match[2]]]);
|
|
||||||
else if (s_match) current.push([s_match[1], s_match[2]]);
|
|
||||||
});
|
|
||||||
|
|
||||||
function removeFinalComma(arr) {
|
|
||||||
arr[arr.length-1] = arr[arr.length-1].slice(0, -1);
|
|
||||||
}
|
|
||||||
|
|
||||||
output.push('var base = [');
|
|
||||||
base.forEach(function (field) {
|
|
||||||
var spec = fields[field[0]];
|
|
||||||
output.push(' [ '+
|
|
||||||
pad("'"+field[0]+"'", 21)+', '+
|
|
||||||
pad(field[1], 8)+', '+
|
|
||||||
padl(""+spec[1], 2)+', '+
|
|
||||||
'ST.'+pad(spec[0], 3)+
|
|
||||||
' ],');
|
|
||||||
});
|
|
||||||
removeFinalComma(output);
|
|
||||||
output.push('];');
|
|
||||||
output.push('');
|
|
||||||
|
|
||||||
|
|
||||||
output.push('exports.tx = {');
|
|
||||||
sections.forEach(function (section) {
|
|
||||||
var name = section.shift(),
|
|
||||||
ttid = section.shift();
|
|
||||||
|
|
||||||
output.push(' '+name+': ['+ttid+'].concat(base, [');
|
|
||||||
section.forEach(function (field) {
|
|
||||||
var spec = fields[field[0]];
|
|
||||||
output.push(' [ '+
|
|
||||||
pad("'"+field[0]+"'", 21)+', '+
|
|
||||||
pad(field[1], 8)+', '+
|
|
||||||
padl(""+spec[1], 2)+', '+
|
|
||||||
'ST.'+pad(spec[0], 3)+
|
|
||||||
' ],');
|
|
||||||
});
|
|
||||||
removeFinalComma(output);
|
|
||||||
output.push(' ]),');
|
|
||||||
});
|
|
||||||
removeFinalComma(output);
|
|
||||||
output.push('};');
|
|
||||||
output.push('');
|
|
||||||
|
|
||||||
console.log(output.join('\n'));
|
|
||||||
|
|
||||||
@@ -396,8 +396,8 @@
|
|||||||
# true - enables compression
|
# true - enables compression
|
||||||
# false - disables compression [default].
|
# false - disables compression [default].
|
||||||
#
|
#
|
||||||
# The rippled server can save bandwidth by compressing its peer-to-peer communications,
|
# The rippled server can save bandwidth by compressing its peer-to-peer communications,
|
||||||
# at a cost of greater CPU usage. If you enable link compression,
|
# at a cost of greater CPU usage. If you enable link compression,
|
||||||
# the server automatically compresses communications with peer servers
|
# the server automatically compresses communications with peer servers
|
||||||
# that also have link compression enabled.
|
# that also have link compression enabled.
|
||||||
# https://xrpl.org/enable-link-compression.html
|
# https://xrpl.org/enable-link-compression.html
|
||||||
@@ -975,6 +975,47 @@
|
|||||||
# number of ledger records online. Must be greater
|
# number of ledger records online. Must be greater
|
||||||
# than or equal to ledger_history.
|
# than or equal to ledger_history.
|
||||||
#
|
#
|
||||||
|
# Optional keys for NuDB only:
|
||||||
|
#
|
||||||
|
# nudb_block_size EXPERIMENTAL: Block size in bytes for NuDB storage.
|
||||||
|
# Must be a power of 2 between 4096 and 32768. Default is 4096.
|
||||||
|
#
|
||||||
|
# This parameter controls the fundamental storage unit
|
||||||
|
# size for NuDB's internal data structures. The choice
|
||||||
|
# of block size can significantly impact performance
|
||||||
|
# depending on your storage hardware and filesystem:
|
||||||
|
#
|
||||||
|
# - 4096 bytes: Optimal for most standard SSDs and
|
||||||
|
# traditional filesystems (ext4, NTFS, HFS+).
|
||||||
|
# Provides good balance of performance and storage
|
||||||
|
# efficiency. Recommended for most deployments.
|
||||||
|
# Minimizes memory footprint and provides consistent
|
||||||
|
# low-latency access patterns across diverse hardware.
|
||||||
|
#
|
||||||
|
# - 8192-16384 bytes: May improve performance on
|
||||||
|
# high-end NVMe SSDs and copy-on-write filesystems
|
||||||
|
# like ZFS or Btrfs that benefit from larger block
|
||||||
|
# alignment. Can reduce metadata overhead for large
|
||||||
|
# databases. Offers better sequential throughput and
|
||||||
|
# reduced I/O operations at the cost of higher memory
|
||||||
|
# usage per operation.
|
||||||
|
#
|
||||||
|
# - 32768 bytes (32K): Maximum supported block size
|
||||||
|
# for high-performance scenarios with very fast
|
||||||
|
# storage. May increase memory usage and reduce
|
||||||
|
# efficiency for smaller databases. Best suited for
|
||||||
|
# enterprise environments with abundant RAM.
|
||||||
|
#
|
||||||
|
# Performance testing is recommended before deploying
|
||||||
|
# any non-default block size in production environments.
|
||||||
|
#
|
||||||
|
# Note: This setting cannot be changed after database
|
||||||
|
# creation without rebuilding the entire database.
|
||||||
|
# Choose carefully based on your hardware and expected
|
||||||
|
# database size.
|
||||||
|
#
|
||||||
|
# Example: nudb_block_size=4096
|
||||||
|
#
|
||||||
# These keys modify the behavior of online_delete, and thus are only
|
# These keys modify the behavior of online_delete, and thus are only
|
||||||
# relevant if online_delete is defined and non-zero:
|
# relevant if online_delete is defined and non-zero:
|
||||||
#
|
#
|
||||||
@@ -1011,7 +1052,7 @@
|
|||||||
# that rippled is still in sync with the network,
|
# that rippled is still in sync with the network,
|
||||||
# and that the validated ledger is less than
|
# and that the validated ledger is less than
|
||||||
# 'age_threshold_seconds' old. If not, then continue
|
# 'age_threshold_seconds' old. If not, then continue
|
||||||
# sleeping for this number of seconds and
|
# sleeping for this number of seconds and
|
||||||
# checking until healthy.
|
# checking until healthy.
|
||||||
# Default is 5.
|
# Default is 5.
|
||||||
#
|
#
|
||||||
@@ -1113,7 +1154,7 @@
|
|||||||
# page_size Valid values: integer (MUST be power of 2 between 512 and 65536)
|
# page_size Valid values: integer (MUST be power of 2 between 512 and 65536)
|
||||||
# The default is 4096 bytes. This setting determines
|
# The default is 4096 bytes. This setting determines
|
||||||
# the size of a page in the transaction.db file.
|
# the size of a page in the transaction.db file.
|
||||||
# See https://www.sqlite.org/pragma.html#pragma_page_size
|
# See https://www.sqlite.org/pragma.html#pragma_page_size
|
||||||
# for more details about the available options.
|
# for more details about the available options.
|
||||||
#
|
#
|
||||||
# journal_size_limit Valid values: integer
|
# journal_size_limit Valid values: integer
|
||||||
@@ -1471,6 +1512,7 @@ secure_gateway = 127.0.0.1
|
|||||||
[node_db]
|
[node_db]
|
||||||
type=NuDB
|
type=NuDB
|
||||||
path=/var/lib/rippled/db/nudb
|
path=/var/lib/rippled/db/nudb
|
||||||
|
nudb_block_size=4096
|
||||||
online_delete=512
|
online_delete=512
|
||||||
advisory_delete=0
|
advisory_delete=0
|
||||||
|
|
||||||
|
|||||||
@@ -101,6 +101,14 @@
|
|||||||
# 2025-05-12, Jingchen Wu
|
# 2025-05-12, Jingchen Wu
|
||||||
# - add -fprofile-update=atomic to ensure atomic profile generation
|
# - add -fprofile-update=atomic to ensure atomic profile generation
|
||||||
#
|
#
|
||||||
|
# 2025-08-28, Bronek Kozicki
|
||||||
|
# - fix "At least one COMMAND must be given" CMake warning from policy CMP0175
|
||||||
|
#
|
||||||
|
# 2025-09-03, Jingchen Wu
|
||||||
|
# - remove the unused function append_coverage_compiler_flags and append_coverage_compiler_flags_to_target
|
||||||
|
# - add a new function add_code_coverage_to_target
|
||||||
|
# - remove some unused code
|
||||||
|
#
|
||||||
# USAGE:
|
# USAGE:
|
||||||
#
|
#
|
||||||
# 1. Copy this file into your cmake modules path.
|
# 1. Copy this file into your cmake modules path.
|
||||||
@@ -109,10 +117,8 @@
|
|||||||
# using a CMake option() to enable it just optionally):
|
# using a CMake option() to enable it just optionally):
|
||||||
# include(CodeCoverage)
|
# include(CodeCoverage)
|
||||||
#
|
#
|
||||||
# 3. Append necessary compiler flags for all supported source files:
|
# 3. Append necessary compiler flags and linker flags for all supported source files:
|
||||||
# append_coverage_compiler_flags()
|
# add_code_coverage_to_target(<target> <PRIVATE|PUBLIC|INTERFACE>)
|
||||||
# Or for specific target:
|
|
||||||
# append_coverage_compiler_flags_to_target(YOUR_TARGET_NAME)
|
|
||||||
#
|
#
|
||||||
# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og
|
# 3.a (OPTIONAL) Set appropriate optimization flags, e.g. -O0, -O1 or -Og
|
||||||
#
|
#
|
||||||
@@ -201,67 +207,69 @@ endforeach()
|
|||||||
|
|
||||||
set(COVERAGE_COMPILER_FLAGS "-g --coverage"
|
set(COVERAGE_COMPILER_FLAGS "-g --coverage"
|
||||||
CACHE INTERNAL "")
|
CACHE INTERNAL "")
|
||||||
|
|
||||||
|
set(COVERAGE_CXX_COMPILER_FLAGS "")
|
||||||
|
set(COVERAGE_C_COMPILER_FLAGS "")
|
||||||
|
set(COVERAGE_CXX_LINKER_FLAGS "")
|
||||||
|
set(COVERAGE_C_LINKER_FLAGS "")
|
||||||
|
|
||||||
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
|
if(CMAKE_CXX_COMPILER_ID MATCHES "(GNU|Clang)")
|
||||||
include(CheckCXXCompilerFlag)
|
include(CheckCXXCompilerFlag)
|
||||||
include(CheckCCompilerFlag)
|
include(CheckCCompilerFlag)
|
||||||
|
include(CheckLinkerFlag)
|
||||||
|
|
||||||
|
set(COVERAGE_CXX_COMPILER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||||
|
set(COVERAGE_C_COMPILER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||||
|
set(COVERAGE_CXX_LINKER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||||
|
set(COVERAGE_C_LINKER_FLAGS ${COVERAGE_COMPILER_FLAGS})
|
||||||
|
|
||||||
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
|
check_cxx_compiler_flag(-fprofile-abs-path HAVE_cxx_fprofile_abs_path)
|
||||||
if(HAVE_cxx_fprofile_abs_path)
|
if(HAVE_cxx_fprofile_abs_path)
|
||||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-abs-path")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
|
check_c_compiler_flag(-fprofile-abs-path HAVE_c_fprofile_abs_path)
|
||||||
if(HAVE_c_fprofile_abs_path)
|
if(HAVE_c_fprofile_abs_path)
|
||||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-abs-path")
|
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-abs-path")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
check_cxx_compiler_flag(-fprofile-update HAVE_cxx_fprofile_update)
|
check_linker_flag(CXX -fprofile-abs-path HAVE_cxx_linker_fprofile_abs_path)
|
||||||
|
if(HAVE_cxx_linker_fprofile_abs_path)
|
||||||
|
set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-abs-path")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
check_linker_flag(C -fprofile-abs-path HAVE_c_linker_fprofile_abs_path)
|
||||||
|
if(HAVE_c_linker_fprofile_abs_path)
|
||||||
|
set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-abs-path")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
check_cxx_compiler_flag(-fprofile-update=atomic HAVE_cxx_fprofile_update)
|
||||||
if(HAVE_cxx_fprofile_update)
|
if(HAVE_cxx_fprofile_update)
|
||||||
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
|
set(COVERAGE_CXX_COMPILER_FLAGS "${COVERAGE_CXX_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
check_c_compiler_flag(-fprofile-update HAVE_c_fprofile_update)
|
check_c_compiler_flag(-fprofile-update=atomic HAVE_c_fprofile_update)
|
||||||
if(HAVE_c_fprofile_update)
|
if(HAVE_c_fprofile_update)
|
||||||
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_COMPILER_FLAGS} -fprofile-update=atomic")
|
set(COVERAGE_C_COMPILER_FLAGS "${COVERAGE_C_COMPILER_FLAGS} -fprofile-update=atomic")
|
||||||
endif()
|
endif()
|
||||||
endif()
|
|
||||||
|
|
||||||
set(CMAKE_Fortran_FLAGS_COVERAGE
|
check_linker_flag(CXX -fprofile-update=atomic HAVE_cxx_linker_fprofile_update)
|
||||||
${COVERAGE_COMPILER_FLAGS}
|
if(HAVE_cxx_linker_fprofile_update)
|
||||||
CACHE STRING "Flags used by the Fortran compiler during coverage builds."
|
set(COVERAGE_CXX_LINKER_FLAGS "${COVERAGE_CXX_LINKER_FLAGS} -fprofile-update=atomic")
|
||||||
FORCE )
|
endif()
|
||||||
set(CMAKE_CXX_FLAGS_COVERAGE
|
|
||||||
${COVERAGE_COMPILER_FLAGS}
|
check_linker_flag(C -fprofile-update=atomic HAVE_c_linker_fprofile_update)
|
||||||
CACHE STRING "Flags used by the C++ compiler during coverage builds."
|
if(HAVE_c_linker_fprofile_update)
|
||||||
FORCE )
|
set(COVERAGE_C_LINKER_FLAGS "${COVERAGE_C_LINKER_FLAGS} -fprofile-update=atomic")
|
||||||
set(CMAKE_C_FLAGS_COVERAGE
|
endif()
|
||||||
${COVERAGE_COMPILER_FLAGS}
|
|
||||||
CACHE STRING "Flags used by the C compiler during coverage builds."
|
endif()
|
||||||
FORCE )
|
|
||||||
set(CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
|
||||||
""
|
|
||||||
CACHE STRING "Flags used for linking binaries during coverage builds."
|
|
||||||
FORCE )
|
|
||||||
set(CMAKE_SHARED_LINKER_FLAGS_COVERAGE
|
|
||||||
""
|
|
||||||
CACHE STRING "Flags used by the shared libraries linker during coverage builds."
|
|
||||||
FORCE )
|
|
||||||
mark_as_advanced(
|
|
||||||
CMAKE_Fortran_FLAGS_COVERAGE
|
|
||||||
CMAKE_CXX_FLAGS_COVERAGE
|
|
||||||
CMAKE_C_FLAGS_COVERAGE
|
|
||||||
CMAKE_EXE_LINKER_FLAGS_COVERAGE
|
|
||||||
CMAKE_SHARED_LINKER_FLAGS_COVERAGE )
|
|
||||||
|
|
||||||
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
get_property(GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||||
if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
|
if(NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG))
|
||||||
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
|
message(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
|
||||||
endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
|
endif() # NOT (CMAKE_BUILD_TYPE STREQUAL "Debug" OR GENERATOR_IS_MULTI_CONFIG)
|
||||||
|
|
||||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
|
||||||
link_libraries(gcov)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
# Defines a target for running and collection code coverage information
|
# Defines a target for running and collection code coverage information
|
||||||
# Builds dependencies, runs the given executable and outputs reports.
|
# Builds dependencies, runs the given executable and outputs reports.
|
||||||
# NOTE! The executable should always have a ZERO as exit code otherwise
|
# NOTE! The executable should always have a ZERO as exit code otherwise
|
||||||
@@ -446,23 +454,24 @@ function(setup_target_for_coverage_gcovr)
|
|||||||
|
|
||||||
# Show info where to find the report
|
# Show info where to find the report
|
||||||
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
|
add_custom_command(TARGET ${Coverage_NAME} POST_BUILD
|
||||||
COMMAND ;
|
COMMAND echo
|
||||||
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
|
COMMENT "Code coverage report saved in ${GCOVR_OUTPUT_FILE} formatted as ${Coverage_FORMAT}"
|
||||||
)
|
)
|
||||||
endfunction() # setup_target_for_coverage_gcovr
|
endfunction() # setup_target_for_coverage_gcovr
|
||||||
|
|
||||||
function(append_coverage_compiler_flags)
|
function(add_code_coverage_to_target name scope)
|
||||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
separate_arguments(COVERAGE_CXX_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_COMPILER_FLAGS}")
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
separate_arguments(COVERAGE_C_COMPILER_FLAGS NATIVE_COMMAND "${COVERAGE_C_COMPILER_FLAGS}")
|
||||||
set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${COVERAGE_COMPILER_FLAGS}" PARENT_SCOPE)
|
separate_arguments(COVERAGE_CXX_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_CXX_LINKER_FLAGS}")
|
||||||
message(STATUS "Appending code coverage compiler flags: ${COVERAGE_COMPILER_FLAGS}")
|
separate_arguments(COVERAGE_C_LINKER_FLAGS NATIVE_COMMAND "${COVERAGE_C_LINKER_FLAGS}")
|
||||||
endfunction() # append_coverage_compiler_flags
|
|
||||||
|
|
||||||
# Setup coverage for specific library
|
# Add compiler options to the target
|
||||||
function(append_coverage_compiler_flags_to_target name)
|
target_compile_options(${name} ${scope}
|
||||||
separate_arguments(_flag_list NATIVE_COMMAND "${COVERAGE_COMPILER_FLAGS}")
|
$<$<COMPILE_LANGUAGE:CXX>:${COVERAGE_CXX_COMPILER_FLAGS}>
|
||||||
target_compile_options(${name} PRIVATE ${_flag_list})
|
$<$<COMPILE_LANGUAGE:C>:${COVERAGE_C_COMPILER_FLAGS}>)
|
||||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU" OR CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
|
|
||||||
target_link_libraries(${name} PRIVATE gcov)
|
target_link_libraries (${name} ${scope}
|
||||||
endif()
|
$<$<LINK_LANGUAGE:CXX>:${COVERAGE_CXX_LINKER_FLAGS} gcov>
|
||||||
endfunction()
|
$<$<LINK_LANGUAGE:C>:${COVERAGE_C_LINKER_FLAGS} gcov>
|
||||||
|
)
|
||||||
|
endfunction() # add_code_coverage_to_target
|
||||||
|
|||||||
@@ -1,22 +0,0 @@
|
|||||||
option (validator_keys "Enables building of validator-keys-tool as a separate target (imported via FetchContent)" OFF)
|
|
||||||
|
|
||||||
if (validator_keys)
|
|
||||||
git_branch (current_branch)
|
|
||||||
# default to tracking VK master branch unless we are on release
|
|
||||||
if (NOT (current_branch STREQUAL "release"))
|
|
||||||
set (current_branch "master")
|
|
||||||
endif ()
|
|
||||||
message (STATUS "tracking ValidatorKeys branch: ${current_branch}")
|
|
||||||
|
|
||||||
FetchContent_Declare (
|
|
||||||
validator_keys_src
|
|
||||||
GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
|
|
||||||
GIT_TAG "${current_branch}"
|
|
||||||
)
|
|
||||||
FetchContent_GetProperties (validator_keys_src)
|
|
||||||
if (NOT validator_keys_src_POPULATED)
|
|
||||||
message (STATUS "Pausing to download ValidatorKeys...")
|
|
||||||
FetchContent_Populate (validator_keys_src)
|
|
||||||
endif ()
|
|
||||||
add_subdirectory (${validator_keys_src_SOURCE_DIR} ${CMAKE_BINARY_DIR}/validator-keys)
|
|
||||||
endif ()
|
|
||||||
41
cmake/XrplAddTest.cmake
Normal file
41
cmake/XrplAddTest.cmake
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
include(isolate_headers)
|
||||||
|
|
||||||
|
function(xrpl_add_test name)
|
||||||
|
set(target ${PROJECT_NAME}.test.${name})
|
||||||
|
|
||||||
|
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||||
|
"${CMAKE_CURRENT_SOURCE_DIR}/${name}/*.cpp"
|
||||||
|
"${CMAKE_CURRENT_SOURCE_DIR}/${name}.cpp"
|
||||||
|
)
|
||||||
|
add_executable(${target} ${ARGN} ${sources})
|
||||||
|
|
||||||
|
isolate_headers(
|
||||||
|
${target}
|
||||||
|
"${CMAKE_SOURCE_DIR}"
|
||||||
|
"${CMAKE_SOURCE_DIR}/tests/${name}"
|
||||||
|
PRIVATE
|
||||||
|
)
|
||||||
|
|
||||||
|
# Make sure the test isn't optimized away in unity builds
|
||||||
|
set_target_properties(${target} PROPERTIES
|
||||||
|
UNITY_BUILD_MODE GROUP
|
||||||
|
UNITY_BUILD_BATCH_SIZE 0) # Adjust as needed
|
||||||
|
|
||||||
|
add_test(NAME ${target} COMMAND ${target})
|
||||||
|
set_tests_properties(
|
||||||
|
${target} PROPERTIES
|
||||||
|
FIXTURES_REQUIRED ${target}_fixture
|
||||||
|
)
|
||||||
|
|
||||||
|
add_test(
|
||||||
|
NAME ${target}.build
|
||||||
|
COMMAND
|
||||||
|
${CMAKE_COMMAND}
|
||||||
|
--build ${CMAKE_BINARY_DIR}
|
||||||
|
--config $<CONFIG>
|
||||||
|
--target ${target}
|
||||||
|
)
|
||||||
|
set_tests_properties(${target}.build PROPERTIES
|
||||||
|
FIXTURES_SETUP ${target}_fixture
|
||||||
|
)
|
||||||
|
endfunction()
|
||||||
@@ -7,22 +7,25 @@
|
|||||||
toolchain file, especially the ABI-impacting ones
|
toolchain file, especially the ABI-impacting ones
|
||||||
#]=========================================================]
|
#]=========================================================]
|
||||||
add_library (common INTERFACE)
|
add_library (common INTERFACE)
|
||||||
add_library (Ripple::common ALIAS common)
|
add_library (Xrpl::common ALIAS common)
|
||||||
# add a single global dependency on this interface lib
|
# add a single global dependency on this interface lib
|
||||||
link_libraries (Ripple::common)
|
link_libraries (Xrpl::common)
|
||||||
set_target_properties (common
|
set_target_properties (common
|
||||||
PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ON)
|
PROPERTIES INTERFACE_POSITION_INDEPENDENT_CODE ON)
|
||||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||||
target_compile_definitions (common
|
target_compile_definitions (common
|
||||||
INTERFACE
|
INTERFACE
|
||||||
$<$<CONFIG:Debug>:DEBUG _DEBUG>
|
$<$<CONFIG:Debug>:DEBUG _DEBUG>
|
||||||
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>)
|
#[===[
|
||||||
# ^^^^ NOTE: CMAKE release builds already have NDEBUG
|
NOTE: CMAKE release builds already have NDEBUG defined, so no need to add it
|
||||||
# defined, so no need to add it explicitly except for
|
explicitly except for the special case of (profile ON) and (assert OFF).
|
||||||
# this special case of (profile ON) and (assert OFF)
|
Presumably this is because we don't want profile builds asserting unless
|
||||||
# -- presumably this is because we don't want profile
|
asserts were specifically requested.
|
||||||
# builds asserting unless asserts were specifically
|
]===]
|
||||||
# requested
|
$<$<AND:$<BOOL:${profile}>,$<NOT:$<BOOL:${assert}>>>:NDEBUG>
|
||||||
|
# TODO: Remove once we have migrated functions from OpenSSL 1.x to 3.x.
|
||||||
|
OPENSSL_SUPPRESS_DEPRECATED
|
||||||
|
)
|
||||||
|
|
||||||
if (MSVC)
|
if (MSVC)
|
||||||
# remove existing exception flag since we set it to -EHa
|
# remove existing exception flag since we set it to -EHa
|
||||||
@@ -90,28 +93,16 @@ if (MSVC)
|
|||||||
-errorreport:none
|
-errorreport:none
|
||||||
-machine:X64)
|
-machine:X64)
|
||||||
else ()
|
else ()
|
||||||
# HACK : because these need to come first, before any warning demotion
|
|
||||||
string (APPEND CMAKE_CXX_FLAGS " -Wall -Wdeprecated")
|
|
||||||
if (wextra)
|
|
||||||
string (APPEND CMAKE_CXX_FLAGS " -Wextra -Wno-unused-parameter")
|
|
||||||
endif ()
|
|
||||||
# not MSVC
|
|
||||||
target_compile_options (common
|
target_compile_options (common
|
||||||
INTERFACE
|
INTERFACE
|
||||||
|
-Wall
|
||||||
|
-Wdeprecated
|
||||||
|
$<$<BOOL:${is_clang}>:-Wno-deprecated-declarations>
|
||||||
|
$<$<BOOL:${wextra}>:-Wextra -Wno-unused-parameter>
|
||||||
$<$<BOOL:${werr}>:-Werror>
|
$<$<BOOL:${werr}>:-Werror>
|
||||||
$<$<COMPILE_LANGUAGE:CXX>:
|
|
||||||
-frtti
|
|
||||||
-Wnon-virtual-dtor
|
|
||||||
>
|
|
||||||
-Wno-sign-compare
|
|
||||||
-Wno-char-subscripts
|
|
||||||
-Wno-format
|
|
||||||
-Wno-unused-local-typedefs
|
|
||||||
-fstack-protector
|
-fstack-protector
|
||||||
$<$<BOOL:${is_gcc}>:
|
-Wno-sign-compare
|
||||||
-Wno-unused-but-set-variable
|
-Wno-unused-but-set-variable
|
||||||
-Wno-deprecated
|
|
||||||
>
|
|
||||||
$<$<NOT:$<CONFIG:Debug>>:-fno-strict-aliasing>
|
$<$<NOT:$<CONFIG:Debug>>:-fno-strict-aliasing>
|
||||||
# tweak gcc optimization for debug
|
# tweak gcc optimization for debug
|
||||||
$<$<AND:$<BOOL:${is_gcc}>,$<CONFIG:Debug>>:-O0>
|
$<$<AND:$<BOOL:${is_gcc}>,$<CONFIG:Debug>>:-O0>
|
||||||
@@ -45,7 +45,7 @@ if (static OR APPLE OR MSVC)
|
|||||||
set (OPENSSL_USE_STATIC_LIBS ON)
|
set (OPENSSL_USE_STATIC_LIBS ON)
|
||||||
endif ()
|
endif ()
|
||||||
set (OPENSSL_MSVC_STATIC_RT ON)
|
set (OPENSSL_MSVC_STATIC_RT ON)
|
||||||
find_dependency (OpenSSL 1.1.1 REQUIRED)
|
find_dependency (OpenSSL REQUIRED)
|
||||||
find_dependency (ZLIB)
|
find_dependency (ZLIB)
|
||||||
find_dependency (date)
|
find_dependency (date)
|
||||||
if (TARGET ZLIB::ZLIB)
|
if (TARGET ZLIB::ZLIB)
|
||||||
@@ -53,4 +53,4 @@ if (TARGET ZLIB::ZLIB)
|
|||||||
INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
|
INTERFACE_LINK_LIBRARIES ZLIB::ZLIB)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
include ("${CMAKE_CURRENT_LIST_DIR}/RippleTargets.cmake")
|
include ("${CMAKE_CURRENT_LIST_DIR}/XrplTargets.cmake")
|
||||||
@@ -13,7 +13,7 @@ set_target_properties(xrpl.libpb PROPERTIES UNITY_BUILD OFF)
|
|||||||
target_protobuf_sources(xrpl.libpb xrpl/proto
|
target_protobuf_sources(xrpl.libpb xrpl/proto
|
||||||
LANGUAGE cpp
|
LANGUAGE cpp
|
||||||
IMPORT_DIRS include/xrpl/proto
|
IMPORT_DIRS include/xrpl/proto
|
||||||
PROTOS include/xrpl/proto/ripple.proto
|
PROTOS include/xrpl/proto/xrpl.proto
|
||||||
)
|
)
|
||||||
|
|
||||||
file(GLOB_RECURSE protos "include/xrpl/proto/org/*.proto")
|
file(GLOB_RECURSE protos "include/xrpl/proto/org/*.proto")
|
||||||
@@ -53,14 +53,15 @@ add_library(xrpl.imports.main INTERFACE)
|
|||||||
|
|
||||||
target_link_libraries(xrpl.imports.main
|
target_link_libraries(xrpl.imports.main
|
||||||
INTERFACE
|
INTERFACE
|
||||||
LibArchive::LibArchive
|
|
||||||
OpenSSL::Crypto
|
|
||||||
Ripple::boost
|
|
||||||
Ripple::opts
|
|
||||||
Ripple::syslibs
|
|
||||||
absl::random_random
|
absl::random_random
|
||||||
date::date
|
date::date
|
||||||
ed25519::ed25519
|
ed25519::ed25519
|
||||||
|
LibArchive::LibArchive
|
||||||
|
OpenSSL::Crypto
|
||||||
|
Xrpl::boost
|
||||||
|
Xrpl::libs
|
||||||
|
Xrpl::opts
|
||||||
|
Xrpl::syslibs
|
||||||
secp256k1::secp256k1
|
secp256k1::secp256k1
|
||||||
xrpl.libpb
|
xrpl.libpb
|
||||||
xxHash::xxhash
|
xxHash::xxhash
|
||||||
@@ -99,9 +100,39 @@ target_link_libraries(xrpl.libxrpl.protocol PUBLIC
|
|||||||
add_module(xrpl resource)
|
add_module(xrpl resource)
|
||||||
target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol)
|
target_link_libraries(xrpl.libxrpl.resource PUBLIC xrpl.libxrpl.protocol)
|
||||||
|
|
||||||
|
# Level 06
|
||||||
|
add_module(xrpl net)
|
||||||
|
target_link_libraries(xrpl.libxrpl.net PUBLIC
|
||||||
|
xrpl.libxrpl.basics
|
||||||
|
xrpl.libxrpl.json
|
||||||
|
xrpl.libxrpl.protocol
|
||||||
|
xrpl.libxrpl.resource
|
||||||
|
)
|
||||||
|
|
||||||
add_module(xrpl server)
|
add_module(xrpl server)
|
||||||
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol)
|
target_link_libraries(xrpl.libxrpl.server PUBLIC xrpl.libxrpl.protocol)
|
||||||
|
|
||||||
|
add_module(xrpl nodestore)
|
||||||
|
target_link_libraries(xrpl.libxrpl.nodestore PUBLIC
|
||||||
|
xrpl.libxrpl.basics
|
||||||
|
xrpl.libxrpl.json
|
||||||
|
xrpl.libxrpl.protocol
|
||||||
|
)
|
||||||
|
|
||||||
|
add_module(xrpl shamap)
|
||||||
|
target_link_libraries(xrpl.libxrpl.shamap PUBLIC
|
||||||
|
xrpl.libxrpl.basics
|
||||||
|
xrpl.libxrpl.crypto
|
||||||
|
xrpl.libxrpl.protocol
|
||||||
|
xrpl.libxrpl.nodestore
|
||||||
|
)
|
||||||
|
|
||||||
|
add_module(xrpl ledger)
|
||||||
|
target_link_libraries(xrpl.libxrpl.ledger PUBLIC
|
||||||
|
xrpl.libxrpl.basics
|
||||||
|
xrpl.libxrpl.json
|
||||||
|
xrpl.libxrpl.protocol
|
||||||
|
)
|
||||||
|
|
||||||
add_library(xrpl.libxrpl)
|
add_library(xrpl.libxrpl)
|
||||||
set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl)
|
set_target_properties(xrpl.libxrpl PROPERTIES OUTPUT_NAME xrpl)
|
||||||
@@ -121,6 +152,10 @@ target_link_modules(xrpl PUBLIC
|
|||||||
protocol
|
protocol
|
||||||
resource
|
resource
|
||||||
server
|
server
|
||||||
|
nodestore
|
||||||
|
shamap
|
||||||
|
net
|
||||||
|
ledger
|
||||||
)
|
)
|
||||||
|
|
||||||
# All headers in libxrpl are in modules.
|
# All headers in libxrpl are in modules.
|
||||||
@@ -133,14 +168,14 @@ target_link_modules(xrpl PUBLIC
|
|||||||
# $<INSTALL_INTERFACE:include>)
|
# $<INSTALL_INTERFACE:include>)
|
||||||
|
|
||||||
if(xrpld)
|
if(xrpld)
|
||||||
add_executable(rippled)
|
add_executable(xrpld)
|
||||||
if(tests)
|
if(tests)
|
||||||
target_compile_definitions(rippled PUBLIC ENABLE_TESTS)
|
target_compile_definitions(xrpld PUBLIC ENABLE_TESTS)
|
||||||
target_compile_definitions(rippled PRIVATE
|
target_compile_definitions(xrpld PRIVATE
|
||||||
UNIT_TEST_REFERENCE_FEE=${UNIT_TEST_REFERENCE_FEE}
|
UNIT_TEST_REFERENCE_FEE=${UNIT_TEST_REFERENCE_FEE}
|
||||||
)
|
)
|
||||||
endif()
|
endif()
|
||||||
target_include_directories(rippled
|
target_include_directories(xrpld
|
||||||
PRIVATE
|
PRIVATE
|
||||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
|
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
|
||||||
)
|
)
|
||||||
@@ -148,36 +183,36 @@ if(xrpld)
|
|||||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/xrpld/*.cpp"
|
"${CMAKE_CURRENT_SOURCE_DIR}/src/xrpld/*.cpp"
|
||||||
)
|
)
|
||||||
target_sources(rippled PRIVATE ${sources})
|
target_sources(xrpld PRIVATE ${sources})
|
||||||
|
|
||||||
if(tests)
|
if(tests)
|
||||||
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
file(GLOB_RECURSE sources CONFIGURE_DEPENDS
|
||||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/test/*.cpp"
|
"${CMAKE_CURRENT_SOURCE_DIR}/src/test/*.cpp"
|
||||||
)
|
)
|
||||||
target_sources(rippled PRIVATE ${sources})
|
target_sources(xrpld PRIVATE ${sources})
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
target_link_libraries(rippled
|
target_link_libraries(xrpld
|
||||||
Ripple::boost
|
Xrpl::boost
|
||||||
Ripple::opts
|
Xrpl::opts
|
||||||
Ripple::libs
|
Xrpl::libs
|
||||||
xrpl.libxrpl
|
xrpl.libxrpl
|
||||||
)
|
)
|
||||||
exclude_if_included(rippled)
|
exclude_if_included(xrpld)
|
||||||
# define a macro for tests that might need to
|
# define a macro for tests that might need to
|
||||||
# be exluded or run differently in CI environment
|
# be exluded or run differently in CI environment
|
||||||
if(is_ci)
|
if(is_ci)
|
||||||
target_compile_definitions(rippled PRIVATE RIPPLED_RUNNING_IN_CI)
|
target_compile_definitions(xrpld PRIVATE XRPL_RUNNING_IN_CI)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if(voidstar)
|
if(voidstar)
|
||||||
target_compile_options(rippled
|
target_compile_options(xrpld
|
||||||
PRIVATE
|
PRIVATE
|
||||||
-fsanitize-coverage=trace-pc-guard
|
-fsanitize-coverage=trace-pc-guard
|
||||||
)
|
)
|
||||||
# rippled requires access to antithesis-sdk-cpp implementation file
|
# xrpld requires access to antithesis-sdk-cpp implementation file
|
||||||
# antithesis_instrumentation.h, which is not exported as INTERFACE
|
# antithesis_instrumentation.h, which is not exported as INTERFACE
|
||||||
target_include_directories(rippled
|
target_include_directories(xrpld
|
||||||
PRIVATE
|
PRIVATE
|
||||||
${CMAKE_SOURCE_DIR}/external/antithesis-sdk
|
${CMAKE_SOURCE_DIR}/external/antithesis-sdk
|
||||||
)
|
)
|
||||||
@@ -191,4 +226,6 @@ if(xrpld)
|
|||||||
src/test/ledger/Invariants_test.cpp
|
src/test/ledger/Invariants_test.cpp
|
||||||
PROPERTIES SKIP_UNITY_BUILD_INCLUSION TRUE)
|
PROPERTIES SKIP_UNITY_BUILD_INCLUSION TRUE)
|
||||||
endif()
|
endif()
|
||||||
|
# Create a symlink named "rippled" for backward compatibility.
|
||||||
|
add_custom_command(TARGET xrpld POST_BUILD COMMAND ${CMAKE_COMMAND} -E create_symlink "xrpld" "rippled")
|
||||||
endif()
|
endif()
|
||||||
@@ -31,8 +31,10 @@ list(APPEND GCOVR_ADDITIONAL_ARGS
|
|||||||
setup_target_for_coverage_gcovr(
|
setup_target_for_coverage_gcovr(
|
||||||
NAME coverage
|
NAME coverage
|
||||||
FORMAT ${coverage_format}
|
FORMAT ${coverage_format}
|
||||||
EXECUTABLE rippled
|
EXECUTABLE xrpld
|
||||||
EXECUTABLE_ARGS --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --unittest-jobs ${coverage_test_parallelism} --quiet --unittest-log
|
EXECUTABLE_ARGS --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --unittest-jobs ${coverage_test_parallelism} --quiet --unittest-log
|
||||||
EXCLUDE "src/test" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
|
EXCLUDE "src/test" "src/tests" "include/xrpl/beast/test" "include/xrpl/beast/unit_test" "${CMAKE_BINARY_DIR}/pb-xrpl.libpb"
|
||||||
DEPENDENCIES rippled
|
DEPENDENCIES xrpld
|
||||||
)
|
)
|
||||||
|
|
||||||
|
add_code_coverage_to_target(opts INTERFACE)
|
||||||
@@ -8,20 +8,25 @@ install (
|
|||||||
TARGETS
|
TARGETS
|
||||||
common
|
common
|
||||||
opts
|
opts
|
||||||
ripple_syslibs
|
xrpl_boost
|
||||||
ripple_boost
|
xrpl_libs
|
||||||
|
xrpl_syslibs
|
||||||
xrpl.imports.main
|
xrpl.imports.main
|
||||||
xrpl.libpb
|
xrpl.libpb
|
||||||
|
xrpl.libxrpl
|
||||||
xrpl.libxrpl.basics
|
xrpl.libxrpl.basics
|
||||||
xrpl.libxrpl.beast
|
xrpl.libxrpl.beast
|
||||||
xrpl.libxrpl.crypto
|
xrpl.libxrpl.crypto
|
||||||
xrpl.libxrpl.json
|
xrpl.libxrpl.json
|
||||||
|
xrpl.libxrpl.ledger
|
||||||
|
xrpl.libxrpl.net
|
||||||
|
xrpl.libxrpl.nodestore
|
||||||
xrpl.libxrpl.protocol
|
xrpl.libxrpl.protocol
|
||||||
xrpl.libxrpl.resource
|
xrpl.libxrpl.resource
|
||||||
xrpl.libxrpl.server
|
xrpl.libxrpl.server
|
||||||
xrpl.libxrpl
|
xrpl.libxrpl.shamap
|
||||||
antithesis-sdk-cpp
|
antithesis-sdk-cpp
|
||||||
EXPORT RippleExports
|
EXPORT XrplExports
|
||||||
LIBRARY DESTINATION lib
|
LIBRARY DESTINATION lib
|
||||||
ARCHIVE DESTINATION lib
|
ARCHIVE DESTINATION lib
|
||||||
RUNTIME DESTINATION bin
|
RUNTIME DESTINATION bin
|
||||||
@@ -36,22 +41,22 @@ install(CODE "
|
|||||||
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
|
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
|
||||||
include(create_symbolic_link)
|
include(create_symbolic_link)
|
||||||
create_symbolic_link(xrpl \
|
create_symbolic_link(xrpl \
|
||||||
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}/ripple)
|
\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR}/xrpl)
|
||||||
")
|
")
|
||||||
|
|
||||||
install (EXPORT RippleExports
|
install (EXPORT XrplExports
|
||||||
FILE RippleTargets.cmake
|
FILE XrplTargets.cmake
|
||||||
NAMESPACE Ripple::
|
NAMESPACE Xrpl::
|
||||||
DESTINATION lib/cmake/ripple)
|
DESTINATION lib/cmake/xrpl)
|
||||||
include (CMakePackageConfigHelpers)
|
include (CMakePackageConfigHelpers)
|
||||||
write_basic_package_version_file (
|
write_basic_package_version_file (
|
||||||
RippleConfigVersion.cmake
|
XrplConfigVersion.cmake
|
||||||
VERSION ${rippled_version}
|
VERSION ${xrpld_version}
|
||||||
COMPATIBILITY SameMajorVersion)
|
COMPATIBILITY SameMajorVersion)
|
||||||
|
|
||||||
if (is_root_project AND TARGET rippled)
|
if (is_root_project AND TARGET xrpld)
|
||||||
install (TARGETS rippled RUNTIME DESTINATION bin)
|
install (TARGETS xrpld RUNTIME DESTINATION bin)
|
||||||
set_target_properties(rippled PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON)
|
set_target_properties(xrpld PROPERTIES INSTALL_RPATH_USE_LINK_PATH ON)
|
||||||
# sample configs should not overwrite existing files
|
# sample configs should not overwrite existing files
|
||||||
# install if-not-exists workaround as suggested by
|
# install if-not-exists workaround as suggested by
|
||||||
# https://cmake.org/Bug/view.php?id=12646
|
# https://cmake.org/Bug/view.php?id=12646
|
||||||
@@ -69,13 +74,13 @@ if (is_root_project AND TARGET rippled)
|
|||||||
install(CODE "
|
install(CODE "
|
||||||
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
|
set(CMAKE_MODULE_PATH \"${CMAKE_MODULE_PATH}\")
|
||||||
include(create_symbolic_link)
|
include(create_symbolic_link)
|
||||||
create_symbolic_link(rippled${suffix} \
|
create_symbolic_link(xrpld${suffix} \
|
||||||
\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/xrpld${suffix})
|
\$ENV{DESTDIR}\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/xrpld${suffix})
|
||||||
")
|
")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
install (
|
install (
|
||||||
FILES
|
FILES
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/cmake/RippleConfig.cmake
|
${CMAKE_CURRENT_SOURCE_DIR}/cmake/XrplConfig.cmake
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/RippleConfigVersion.cmake
|
${CMAKE_CURRENT_BINARY_DIR}/XrplConfigVersion.cmake
|
||||||
DESTINATION lib/cmake/ripple)
|
DESTINATION lib/cmake/xrpl)
|
||||||
@@ -1,9 +1,9 @@
|
|||||||
#[===================================================================[
|
#[===================================================================[
|
||||||
rippled compile options/settings via an interface library
|
xrpld compile options/settings via an interface library
|
||||||
#]===================================================================]
|
#]===================================================================]
|
||||||
|
|
||||||
add_library (opts INTERFACE)
|
add_library (opts INTERFACE)
|
||||||
add_library (Ripple::opts ALIAS opts)
|
add_library (Xrpl::opts ALIAS opts)
|
||||||
target_compile_definitions (opts
|
target_compile_definitions (opts
|
||||||
INTERFACE
|
INTERFACE
|
||||||
BOOST_ASIO_DISABLE_HANDLER_TYPE_REQUIREMENTS
|
BOOST_ASIO_DISABLE_HANDLER_TYPE_REQUIREMENTS
|
||||||
@@ -21,22 +21,18 @@ target_compile_definitions (opts
|
|||||||
>
|
>
|
||||||
$<$<BOOL:${beast_no_unit_test_inline}>:BEAST_NO_UNIT_TEST_INLINE=1>
|
$<$<BOOL:${beast_no_unit_test_inline}>:BEAST_NO_UNIT_TEST_INLINE=1>
|
||||||
$<$<BOOL:${beast_disable_autolink}>:BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES=1>
|
$<$<BOOL:${beast_disable_autolink}>:BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES=1>
|
||||||
$<$<BOOL:${single_io_service_thread}>:RIPPLE_SINGLE_IO_SERVICE_THREAD=1>
|
$<$<BOOL:${single_io_service_thread}>:XRPL_SINGLE_IO_SERVICE_THREAD=1>
|
||||||
$<$<BOOL:${voidstar}>:ENABLE_VOIDSTAR>)
|
$<$<BOOL:${voidstar}>:ENABLE_VOIDSTAR>)
|
||||||
target_compile_options (opts
|
target_compile_options (opts
|
||||||
INTERFACE
|
INTERFACE
|
||||||
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
|
$<$<AND:$<BOOL:${is_gcc}>,$<COMPILE_LANGUAGE:CXX>>:-Wsuggest-override>
|
||||||
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized>
|
$<$<BOOL:${is_gcc}>:-Wno-maybe-uninitialized>
|
||||||
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
|
$<$<BOOL:${perf}>:-fno-omit-frame-pointer>
|
||||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
|
||||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
|
||||||
$<$<BOOL:${profile}>:-pg>
|
$<$<BOOL:${profile}>:-pg>
|
||||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||||
|
|
||||||
target_link_libraries (opts
|
target_link_libraries (opts
|
||||||
INTERFACE
|
INTERFACE
|
||||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${coverage}>>:-g --coverage -fprofile-abs-path>
|
|
||||||
$<$<AND:$<BOOL:${is_clang}>,$<BOOL:${coverage}>>:-g --coverage>
|
|
||||||
$<$<BOOL:${profile}>:-pg>
|
$<$<BOOL:${profile}>:-pg>
|
||||||
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
$<$<AND:$<BOOL:${is_gcc}>,$<BOOL:${profile}>>:-p>)
|
||||||
|
|
||||||
@@ -63,12 +59,12 @@ if (san)
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
#[===================================================================[
|
#[===================================================================[
|
||||||
rippled transitive library deps via an interface library
|
xrpld transitive library deps via an interface library
|
||||||
#]===================================================================]
|
#]===================================================================]
|
||||||
|
|
||||||
add_library (ripple_syslibs INTERFACE)
|
add_library (xrpl_syslibs INTERFACE)
|
||||||
add_library (Ripple::syslibs ALIAS ripple_syslibs)
|
add_library (Xrpl::syslibs ALIAS xrpl_syslibs)
|
||||||
target_link_libraries (ripple_syslibs
|
target_link_libraries (xrpl_syslibs
|
||||||
INTERFACE
|
INTERFACE
|
||||||
$<$<BOOL:${MSVC}>:
|
$<$<BOOL:${MSVC}>:
|
||||||
legacy_stdio_definitions.lib
|
legacy_stdio_definitions.lib
|
||||||
@@ -93,9 +89,9 @@ target_link_libraries (ripple_syslibs
|
|||||||
if (NOT MSVC)
|
if (NOT MSVC)
|
||||||
set (THREADS_PREFER_PTHREAD_FLAG ON)
|
set (THREADS_PREFER_PTHREAD_FLAG ON)
|
||||||
find_package (Threads)
|
find_package (Threads)
|
||||||
target_link_libraries (ripple_syslibs INTERFACE Threads::Threads)
|
target_link_libraries (xrpl_syslibs INTERFACE Threads::Threads)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
add_library (ripple_libs INTERFACE)
|
add_library (xrpl_libs INTERFACE)
|
||||||
add_library (Ripple::libs ALIAS ripple_libs)
|
add_library (Xrpl::libs ALIAS xrpl_libs)
|
||||||
target_link_libraries (ripple_libs INTERFACE Ripple::syslibs)
|
target_link_libraries (xrpl_libs INTERFACE Xrpl::syslibs)
|
||||||
@@ -2,16 +2,6 @@
|
|||||||
convenience variables and sanity checks
|
convenience variables and sanity checks
|
||||||
#]===================================================================]
|
#]===================================================================]
|
||||||
|
|
||||||
include(ProcessorCount)
|
|
||||||
|
|
||||||
if (NOT ep_procs)
|
|
||||||
ProcessorCount(ep_procs)
|
|
||||||
if (ep_procs GREATER 1)
|
|
||||||
# never use more than half of cores for EP builds
|
|
||||||
math (EXPR ep_procs "${ep_procs} / 2")
|
|
||||||
message (STATUS "Using ${ep_procs} cores for ExternalProject builds.")
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
|
||||||
|
|
||||||
set (CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE)
|
set (CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE)
|
||||||
@@ -71,8 +61,8 @@ if (MSVC AND CMAKE_GENERATOR_PLATFORM STREQUAL "Win32")
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (NOT CMAKE_SIZEOF_VOID_P EQUAL 8)
|
if (NOT CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||||
message (FATAL_ERROR "Rippled requires a 64 bit target architecture.\n"
|
message (FATAL_ERROR "Xrpld requires a 64 bit target architecture.\n"
|
||||||
"The most likely cause of this warning is trying to build rippled with a 32-bit OS.")
|
"The most likely cause of this warning is trying to build xrpld with a 32-bit OS.")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (APPLE AND NOT HOMEBREW)
|
if (APPLE AND NOT HOMEBREW)
|
||||||
@@ -18,7 +18,7 @@ if(tests)
|
|||||||
endif()
|
endif()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
option(unity "Creates a build using UNITY support in cmake. This is the default" ON)
|
option(unity "Creates a build using UNITY support in cmake." OFF)
|
||||||
if(unity)
|
if(unity)
|
||||||
if(NOT is_ci)
|
if(NOT is_ci)
|
||||||
set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
|
set(CMAKE_UNITY_BUILD_BATCH_SIZE 15 CACHE STRING "")
|
||||||
@@ -49,7 +49,7 @@ else()
|
|||||||
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
|
set(wextra OFF CACHE BOOL "gcc/clang only" FORCE)
|
||||||
endif()
|
endif()
|
||||||
if(is_linux)
|
if(is_linux)
|
||||||
option(BUILD_SHARED_LIBS "build shared ripple libraries" OFF)
|
option(BUILD_SHARED_LIBS "build shared xrpl libraries" OFF)
|
||||||
option(static "link protobuf, openssl, libc++, and boost statically" ON)
|
option(static "link protobuf, openssl, libc++, and boost statically" ON)
|
||||||
option(perf "Enables flags that assist with perf recording" OFF)
|
option(perf "Enables flags that assist with perf recording" OFF)
|
||||||
option(use_gold "enables detection of gold (binutils) linker" ON)
|
option(use_gold "enables detection of gold (binutils) linker" ON)
|
||||||
@@ -58,7 +58,7 @@ else()
|
|||||||
# we are not ready to allow shared-libs on windows because it would require
|
# we are not ready to allow shared-libs on windows because it would require
|
||||||
# export declarations. On macos it's more feasible, but static openssl
|
# export declarations. On macos it's more feasible, but static openssl
|
||||||
# produces odd linker errors, thus we disable shared lib builds for now.
|
# produces odd linker errors, thus we disable shared lib builds for now.
|
||||||
set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared ripple libraries - OFF for win/macos" FORCE)
|
set(BUILD_SHARED_LIBS OFF CACHE BOOL "build shared xrpl libraries - OFF for win/macos" FORCE)
|
||||||
set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
|
set(static ON CACHE BOOL "static link, linux only. ON for WIN/macos" FORCE)
|
||||||
set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
|
set(perf OFF CACHE BOOL "perf flags, linux only" FORCE)
|
||||||
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
|
set(use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
|
||||||
@@ -118,7 +118,7 @@ option(beast_no_unit_test_inline
|
|||||||
"Prevents unit test definitions from being inserted into global table"
|
"Prevents unit test definitions from being inserted into global table"
|
||||||
OFF)
|
OFF)
|
||||||
option(single_io_service_thread
|
option(single_io_service_thread
|
||||||
"Restricts the number of threads calling io_service::run to one. \
|
"Restricts the number of threads calling io_context::run to one. \
|
||||||
This can be useful when debugging."
|
This can be useful when debugging."
|
||||||
OFF)
|
OFF)
|
||||||
option(boost_show_deprecated
|
option(boost_show_deprecated
|
||||||
20
cmake/XrplValidatorKeys.cmake
Normal file
20
cmake/XrplValidatorKeys.cmake
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
option (validator_keys "Enables building of validator-keys tool as a separate target (imported via FetchContent)" OFF)
|
||||||
|
|
||||||
|
if (validator_keys)
|
||||||
|
git_branch (current_branch)
|
||||||
|
# default to tracking VK master branch unless we are on release
|
||||||
|
if (NOT (current_branch STREQUAL "release"))
|
||||||
|
set (current_branch "master")
|
||||||
|
endif ()
|
||||||
|
message (STATUS "Tracking ValidatorKeys branch: ${current_branch}")
|
||||||
|
|
||||||
|
FetchContent_Declare (
|
||||||
|
validator_keys
|
||||||
|
GIT_REPOSITORY https://github.com/ripple/validator-keys-tool.git
|
||||||
|
GIT_TAG "${current_branch}"
|
||||||
|
)
|
||||||
|
FetchContent_MakeAvailable(validator_keys)
|
||||||
|
set_target_properties(validator-keys PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}")
|
||||||
|
install(TARGETS validator-keys RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
|
||||||
|
|
||||||
|
endif ()
|
||||||
@@ -5,11 +5,11 @@
|
|||||||
file(STRINGS src/libxrpl/protocol/BuildInfo.cpp BUILD_INFO)
|
file(STRINGS src/libxrpl/protocol/BuildInfo.cpp BUILD_INFO)
|
||||||
foreach(line_ ${BUILD_INFO})
|
foreach(line_ ${BUILD_INFO})
|
||||||
if(line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"")
|
if(line_ MATCHES "versionString[ ]*=[ ]*\"(.+)\"")
|
||||||
set(rippled_version ${CMAKE_MATCH_1})
|
set(xrpld_version ${CMAKE_MATCH_1})
|
||||||
endif()
|
endif()
|
||||||
endforeach()
|
endforeach()
|
||||||
if(rippled_version)
|
if(xrpld_version)
|
||||||
message(STATUS "rippled version: ${rippled_version}")
|
message(STATUS "xrpld version: ${xrpld_version}")
|
||||||
else()
|
else()
|
||||||
message(FATAL_ERROR "unable to determine rippled version")
|
message(FATAL_ERROR "unable to determine xrpld version")
|
||||||
endif()
|
endif()
|
||||||
@@ -2,7 +2,6 @@ find_package(Boost 1.82 REQUIRED
|
|||||||
COMPONENTS
|
COMPONENTS
|
||||||
chrono
|
chrono
|
||||||
container
|
container
|
||||||
context
|
|
||||||
coroutine
|
coroutine
|
||||||
date_time
|
date_time
|
||||||
filesystem
|
filesystem
|
||||||
@@ -13,30 +12,25 @@ find_package(Boost 1.82 REQUIRED
|
|||||||
thread
|
thread
|
||||||
)
|
)
|
||||||
|
|
||||||
add_library(ripple_boost INTERFACE)
|
add_library(xrpl_boost INTERFACE)
|
||||||
add_library(Ripple::boost ALIAS ripple_boost)
|
add_library(Xrpl::boost ALIAS xrpl_boost)
|
||||||
if(XCODE)
|
|
||||||
target_include_directories(ripple_boost BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
|
|
||||||
target_compile_options(ripple_boost INTERFACE --system-header-prefix="boost/")
|
|
||||||
else()
|
|
||||||
target_include_directories(ripple_boost SYSTEM BEFORE INTERFACE ${Boost_INCLUDE_DIRS})
|
|
||||||
endif()
|
|
||||||
|
|
||||||
target_link_libraries(ripple_boost
|
target_link_libraries(xrpl_boost
|
||||||
INTERFACE
|
INTERFACE
|
||||||
Boost::boost
|
Boost::headers
|
||||||
Boost::chrono
|
Boost::chrono
|
||||||
Boost::container
|
Boost::container
|
||||||
Boost::coroutine
|
Boost::coroutine
|
||||||
Boost::date_time
|
Boost::date_time
|
||||||
Boost::filesystem
|
Boost::filesystem
|
||||||
Boost::json
|
Boost::json
|
||||||
|
Boost::process
|
||||||
Boost::program_options
|
Boost::program_options
|
||||||
Boost::regex
|
Boost::regex
|
||||||
Boost::system
|
Boost::system
|
||||||
Boost::thread)
|
Boost::thread)
|
||||||
if(Boost_COMPILER)
|
if(Boost_COMPILER)
|
||||||
target_link_libraries(ripple_boost INTERFACE Boost::disable_autolinking)
|
target_link_libraries(xrpl_boost INTERFACE Boost::disable_autolinking)
|
||||||
endif()
|
endif()
|
||||||
if(san AND is_clang)
|
if(san AND is_clang)
|
||||||
# TODO: gcc does not support -fsanitize-blacklist...can we do something else
|
# TODO: gcc does not support -fsanitize-blacklist...can we do something else
|
||||||
|
|||||||
56
conan.lock
Normal file
56
conan.lock
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
{
|
||||||
|
"version": "0.5",
|
||||||
|
"requires": [
|
||||||
|
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
|
||||||
|
"xxhash/0.8.3#681d36a0a6111fc56e5e45ea182c19cc%1756234289.683",
|
||||||
|
"sqlite3/3.49.1#8631739a4c9b93bd3d6b753bac548a63%1756234266.869",
|
||||||
|
"soci/4.0.3#a9f8d773cd33e356b5879a4b0564f287%1756234262.318",
|
||||||
|
"snappy/1.1.10#968fef506ff261592ec30c574d4a7809%1756234314.246",
|
||||||
|
"rocksdb/10.0.1#85537f46e538974d67da0c3977de48ac%1756234304.347",
|
||||||
|
"re2/20230301#dfd6e2bf050eb90ddd8729cfb4c844a4%1756234257.976",
|
||||||
|
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
|
||||||
|
"openssl/3.5.4#a1d5835cc6ed5c5b8f3cd5b9b5d24205%1759746684.671",
|
||||||
|
"nudb/2.0.9#c62cfd501e57055a7e0d8ee3d5e5427d%1756234237.107",
|
||||||
|
"lz4/1.10.0#59fc63cac7f10fbe8e05c7e62c2f3504%1756234228.999",
|
||||||
|
"libiconv/1.17#1e65319e945f2d31941a9d28cc13c058%1756223727.64",
|
||||||
|
"libbacktrace/cci.20210118#a7691bfccd8caaf66309df196790a5a1%1756230911.03",
|
||||||
|
"libarchive/3.8.1#5cf685686322e906cb42706ab7e099a8%1756234256.696",
|
||||||
|
"jemalloc/5.3.0#e951da9cf599e956cebc117880d2d9f8%1729241615.244",
|
||||||
|
"grpc/1.50.1#02291451d1e17200293a409410d1c4e1%1756234248.958",
|
||||||
|
"doctest/2.4.11#a4211dfc329a16ba9f280f9574025659%1756234220.819",
|
||||||
|
"date/3.0.4#f74bbba5a08fa388256688743136cb6f%1756234217.493",
|
||||||
|
"c-ares/1.34.5#b78b91e7cfb1f11ce777a285bbf169c6%1756234217.915",
|
||||||
|
"bzip2/1.0.8#00b4a4658791c1f06914e087f0e792f5%1756234261.716",
|
||||||
|
"boost/1.88.0#8852c0b72ce8271fb8ff7c53456d4983%1756223752.326",
|
||||||
|
"abseil/20230802.1#f0f91485b111dc9837a68972cb19ca7b%1756234220.907"
|
||||||
|
],
|
||||||
|
"build_requires": [
|
||||||
|
"zlib/1.3.1#b8bc2603263cf7eccbd6e17e66b0ed76%1756234269.497",
|
||||||
|
"strawberryperl/5.32.1.1#707032463aa0620fa17ec0d887f5fe41%1756234281.733",
|
||||||
|
"protobuf/3.21.12#d927114e28de9f4691a6bbcdd9a529d1%1756234251.614",
|
||||||
|
"nasm/2.16.01#31e26f2ee3c4346ecd347911bd126904%1756234232.901",
|
||||||
|
"msys2/cci.latest#5b73b10144f73cc5bfe0572ed9be39e1%1751977009.857",
|
||||||
|
"m4/1.4.19#b38ced39a01e31fef5435bc634461fd2%1700758725.451",
|
||||||
|
"cmake/3.31.8#dde3bde00bb843687e55aea5afa0e220%1756234232.89",
|
||||||
|
"b2/5.3.3#107c15377719889654eb9a162a673975%1756234226.28",
|
||||||
|
"automake/1.16.5#b91b7c384c3deaa9d535be02da14d04f%1755524470.56",
|
||||||
|
"autoconf/2.71#51077f068e61700d65bb05541ea1e4b0%1731054366.86"
|
||||||
|
],
|
||||||
|
"python_requires": [],
|
||||||
|
"overrides": {
|
||||||
|
"protobuf/3.21.12": [
|
||||||
|
null,
|
||||||
|
"protobuf/3.21.12"
|
||||||
|
],
|
||||||
|
"lz4/1.9.4": [
|
||||||
|
"lz4/1.10.0"
|
||||||
|
],
|
||||||
|
"boost/1.83.0": [
|
||||||
|
"boost/1.88.0"
|
||||||
|
],
|
||||||
|
"sqlite3/3.44.2": [
|
||||||
|
"sqlite3/3.49.1"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"config_requires": []
|
||||||
|
}
|
||||||
5
conan/global.conf
Normal file
5
conan/global.conf
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Global configuration for Conan. This is used to set the number of parallel
|
||||||
|
# downloads and uploads.
|
||||||
|
core:non_interactive=True
|
||||||
|
core.download:parallel={{ os.cpu_count() }}
|
||||||
|
core.upload:parallel={{ os.cpu_count() }}
|
||||||
31
conan/profiles/default
Normal file
31
conan/profiles/default
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
{% set os = detect_api.detect_os() %}
|
||||||
|
{% set arch = detect_api.detect_arch() %}
|
||||||
|
{% set compiler, version, compiler_exe = detect_api.detect_default_compiler() %}
|
||||||
|
{% set compiler_version = version %}
|
||||||
|
{% if os == "Linux" %}
|
||||||
|
{% set compiler_version = detect_api.default_compiler_version(compiler, version) %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
[settings]
|
||||||
|
os={{ os }}
|
||||||
|
arch={{ arch }}
|
||||||
|
build_type=Debug
|
||||||
|
compiler={{compiler}}
|
||||||
|
compiler.version={{ compiler_version }}
|
||||||
|
compiler.cppstd=20
|
||||||
|
{% if os == "Windows" %}
|
||||||
|
compiler.runtime=static
|
||||||
|
{% else %}
|
||||||
|
compiler.libcxx={{detect_api.detect_libcxx(compiler, version, compiler_exe)}}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
[conf]
|
||||||
|
{% if compiler == "clang" and compiler_version >= 19 %}
|
||||||
|
grpc/1.50.1:tools.build:cxxflags+=['-Wno-missing-template-arg-list-after-template-kw']
|
||||||
|
{% endif %}
|
||||||
|
{% if compiler == "apple-clang" and compiler_version >= 17 %}
|
||||||
|
grpc/1.50.1:tools.build:cxxflags+=['-Wno-missing-template-arg-list-after-template-kw']
|
||||||
|
{% endif %}
|
||||||
|
{% if compiler == "gcc" and compiler_version < 13 %}
|
||||||
|
tools.build:cxxflags+=['-Wno-restrict']
|
||||||
|
{% endif %}
|
||||||
53
conanfile.py
53
conanfile.py
@@ -25,15 +25,19 @@ class Xrpl(ConanFile):
|
|||||||
|
|
||||||
requires = [
|
requires = [
|
||||||
'grpc/1.50.1',
|
'grpc/1.50.1',
|
||||||
'libarchive/3.7.6',
|
'libarchive/3.8.1',
|
||||||
'nudb/2.0.8',
|
'nudb/2.0.9',
|
||||||
'openssl/1.1.1v',
|
'openssl/3.5.4',
|
||||||
'soci/4.0.3',
|
'soci/4.0.3',
|
||||||
'zlib/1.3.1',
|
'zlib/1.3.1',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
test_requires = [
|
||||||
|
'doctest/2.4.11',
|
||||||
|
]
|
||||||
|
|
||||||
tool_requires = [
|
tool_requires = [
|
||||||
'protobuf/3.21.9',
|
'protobuf/3.21.12',
|
||||||
]
|
]
|
||||||
|
|
||||||
default_options = {
|
default_options = {
|
||||||
@@ -85,34 +89,36 @@ class Xrpl(ConanFile):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def set_version(self):
|
def set_version(self):
|
||||||
path = f'{self.recipe_folder}/src/libxrpl/protocol/BuildInfo.cpp'
|
if self.version is None:
|
||||||
regex = r'versionString\s?=\s?\"(.*)\"'
|
path = f'{self.recipe_folder}/src/libxrpl/protocol/BuildInfo.cpp'
|
||||||
with open(path, 'r') as file:
|
regex = r'versionString\s?=\s?\"(.*)\"'
|
||||||
matches = (re.search(regex, line) for line in file)
|
with open(path, encoding='utf-8') as file:
|
||||||
match = next(m for m in matches if m)
|
matches = (re.search(regex, line) for line in file)
|
||||||
self.version = match.group(1)
|
match = next(m for m in matches if m)
|
||||||
|
self.version = match.group(1)
|
||||||
|
|
||||||
def configure(self):
|
def configure(self):
|
||||||
if self.settings.compiler == 'apple-clang':
|
if self.settings.compiler == 'apple-clang':
|
||||||
self.options['boost'].visibility = 'global'
|
self.options['boost'].visibility = 'global'
|
||||||
|
if self.settings.compiler in ['clang', 'gcc']:
|
||||||
|
self.options['boost'].without_cobalt = True
|
||||||
|
|
||||||
def requirements(self):
|
def requirements(self):
|
||||||
# Conan 2 requires transitive headers to be specified
|
# Conan 2 requires transitive headers to be specified
|
||||||
transitive_headers_opt = {'transitive_headers': True} if conan_version.split('.')[0] == '2' else {}
|
transitive_headers_opt = {'transitive_headers': True} if conan_version.split('.')[0] == '2' else {}
|
||||||
self.requires('boost/1.83.0', force=True, **transitive_headers_opt)
|
self.requires('boost/1.88.0', force=True, **transitive_headers_opt)
|
||||||
self.requires('date/3.0.3', **transitive_headers_opt)
|
self.requires('date/3.0.4', **transitive_headers_opt)
|
||||||
self.requires('lz4/1.10.0', force=True)
|
self.requires('lz4/1.10.0', force=True)
|
||||||
self.requires('protobuf/3.21.9', force=True)
|
self.requires('protobuf/3.21.12', force=True)
|
||||||
self.requires('sqlite3/3.47.0', force=True)
|
self.requires('sqlite3/3.49.1', force=True)
|
||||||
if self.options.jemalloc:
|
if self.options.jemalloc:
|
||||||
self.requires('jemalloc/5.3.0')
|
self.requires('jemalloc/5.3.0')
|
||||||
if self.options.rocksdb:
|
if self.options.rocksdb:
|
||||||
self.requires('rocksdb/9.7.3')
|
self.requires('rocksdb/10.0.1')
|
||||||
self.requires('xxhash/0.8.2', **transitive_headers_opt)
|
self.requires('xxhash/0.8.3', **transitive_headers_opt)
|
||||||
|
|
||||||
exports_sources = (
|
exports_sources = (
|
||||||
'CMakeLists.txt',
|
'CMakeLists.txt',
|
||||||
'bin/getRippledInfo',
|
|
||||||
'cfg/*',
|
'cfg/*',
|
||||||
'cmake/*',
|
'cmake/*',
|
||||||
'external/*',
|
'external/*',
|
||||||
@@ -163,7 +169,18 @@ class Xrpl(ConanFile):
|
|||||||
# `include/`, not `include/ripple/proto/`.
|
# `include/`, not `include/ripple/proto/`.
|
||||||
libxrpl.includedirs = ['include', 'include/ripple/proto']
|
libxrpl.includedirs = ['include', 'include/ripple/proto']
|
||||||
libxrpl.requires = [
|
libxrpl.requires = [
|
||||||
'boost::boost',
|
'boost::headers',
|
||||||
|
'boost::chrono',
|
||||||
|
'boost::container',
|
||||||
|
'boost::coroutine',
|
||||||
|
'boost::date_time',
|
||||||
|
'boost::filesystem',
|
||||||
|
'boost::json',
|
||||||
|
'boost::program_options',
|
||||||
|
'boost::process',
|
||||||
|
'boost::regex',
|
||||||
|
'boost::system',
|
||||||
|
'boost::thread',
|
||||||
'date::date',
|
'date::date',
|
||||||
'grpc::grpc++',
|
'grpc::grpc++',
|
||||||
'libarchive::libarchive',
|
'libarchive::libarchive',
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ the ledger (so the entire network has the same view). This will help the network
|
|||||||
see which validators are **currently** unreliable, and adjust their quorum
|
see which validators are **currently** unreliable, and adjust their quorum
|
||||||
calculation accordingly.
|
calculation accordingly.
|
||||||
|
|
||||||
*Improving the liveness of the network is the main motivation for the negative UNL.*
|
_Improving the liveness of the network is the main motivation for the negative UNL._
|
||||||
|
|
||||||
### Targeted Faults
|
### Targeted Faults
|
||||||
|
|
||||||
@@ -53,16 +53,17 @@ even if the number of remaining validators gets to 60%. Say we have a network
|
|||||||
with 10 validators on the UNL and everything is operating correctly. The quorum
|
with 10 validators on the UNL and everything is operating correctly. The quorum
|
||||||
required for this network would be 8 (80% of 10). When validators fail, the
|
required for this network would be 8 (80% of 10). When validators fail, the
|
||||||
quorum required would be as low as 6 (60% of 10), which is the absolute
|
quorum required would be as low as 6 (60% of 10), which is the absolute
|
||||||
***minimum quorum***. We need the absolute minimum quorum to be strictly greater
|
**_minimum quorum_**. We need the absolute minimum quorum to be strictly greater
|
||||||
than 50% of the original UNL so that there cannot be two partitions of
|
than 50% of the original UNL so that there cannot be two partitions of
|
||||||
well-behaved nodes headed in different directions. We arbitrarily choose 60% as
|
well-behaved nodes headed in different directions. We arbitrarily choose 60% as
|
||||||
the minimum quorum to give a margin of safety.
|
the minimum quorum to give a margin of safety.
|
||||||
|
|
||||||
Consider these events in the absence of negative UNL:
|
Consider these events in the absence of negative UNL:
|
||||||
|
|
||||||
1. 1:00pm - validator1 fails, votes vs. quorum: 9 >= 8, we have quorum
|
1. 1:00pm - validator1 fails, votes vs. quorum: 9 >= 8, we have quorum
|
||||||
1. 3:00pm - validator2 fails, votes vs. quorum: 8 >= 8, we have quorum
|
1. 3:00pm - validator2 fails, votes vs. quorum: 8 >= 8, we have quorum
|
||||||
1. 5:00pm - validator3 fails, votes vs. quorum: 7 < 8, we don’t have quorum
|
1. 5:00pm - validator3 fails, votes vs. quorum: 7 < 8, we don’t have quorum
|
||||||
* **network cannot validate new ledgers with 3 failed validators**
|
- **network cannot validate new ledgers with 3 failed validators**
|
||||||
|
|
||||||
We're below 80% agreement, so new ledgers cannot be validated. This is how the
|
We're below 80% agreement, so new ledgers cannot be validated. This is how the
|
||||||
XRP Ledger operates today, but if the negative UNL was enabled, the events would
|
XRP Ledger operates today, but if the negative UNL was enabled, the events would
|
||||||
@@ -70,18 +71,20 @@ happen as follows. (Please note that the events below are from a simplified
|
|||||||
version of our protocol.)
|
version of our protocol.)
|
||||||
|
|
||||||
1. 1:00pm - validator1 fails, votes vs. quorum: 9 >= 8, we have quorum
|
1. 1:00pm - validator1 fails, votes vs. quorum: 9 >= 8, we have quorum
|
||||||
1. 1:40pm - network adds validator1 to negative UNL, quorum changes to ceil(9 * 0.8), or 8
|
1. 1:40pm - network adds validator1 to negative UNL, quorum changes to ceil(9 \* 0.8), or 8
|
||||||
1. 3:00pm - validator2 fails, votes vs. quorum: 8 >= 8, we have quorum
|
1. 3:00pm - validator2 fails, votes vs. quorum: 8 >= 8, we have quorum
|
||||||
1. 3:40pm - network adds validator2 to negative UNL, quorum changes to ceil(8 * 0.8), or 7
|
1. 3:40pm - network adds validator2 to negative UNL, quorum changes to ceil(8 \* 0.8), or 7
|
||||||
1. 5:00pm - validator3 fails, votes vs. quorum: 7 >= 7, we have quorum
|
1. 5:00pm - validator3 fails, votes vs. quorum: 7 >= 7, we have quorum
|
||||||
1. 5:40pm - network adds validator3 to negative UNL, quorum changes to ceil(7 * 0.8), or 6
|
1. 5:40pm - network adds validator3 to negative UNL, quorum changes to ceil(7 \* 0.8), or 6
|
||||||
1. 7:00pm - validator4 fails, votes vs. quorum: 6 >= 6, we have quorum
|
1. 7:00pm - validator4 fails, votes vs. quorum: 6 >= 6, we have quorum
|
||||||
* **network can still validate new ledgers with 4 failed validators**
|
- **network can still validate new ledgers with 4 failed validators**
|
||||||
|
|
||||||
## External Interactions
|
## External Interactions
|
||||||
|
|
||||||
### Message Format Changes
|
### Message Format Changes
|
||||||
|
|
||||||
This proposal will:
|
This proposal will:
|
||||||
|
|
||||||
1. add a new pseudo-transaction type
|
1. add a new pseudo-transaction type
|
||||||
1. add the negative UNL to the ledger data structure.
|
1. add the negative UNL to the ledger data structure.
|
||||||
|
|
||||||
@@ -89,19 +92,20 @@ Any tools or systems that rely on the format of this data will have to be
|
|||||||
updated.
|
updated.
|
||||||
|
|
||||||
### Amendment
|
### Amendment
|
||||||
|
|
||||||
This feature **will** need an amendment to activate.
|
This feature **will** need an amendment to activate.
|
||||||
|
|
||||||
## Design
|
## Design
|
||||||
|
|
||||||
This section discusses the following topics about the Negative UNL design:
|
This section discusses the following topics about the Negative UNL design:
|
||||||
|
|
||||||
* [Negative UNL protocol overview](#Negative-UNL-Protocol-Overview)
|
- [Negative UNL protocol overview](#Negative-UNL-Protocol-Overview)
|
||||||
* [Validator reliability measurement](#Validator-Reliability-Measurement)
|
- [Validator reliability measurement](#Validator-Reliability-Measurement)
|
||||||
* [Format Changes](#Format-Changes)
|
- [Format Changes](#Format-Changes)
|
||||||
* [Negative UNL maintenance](#Negative-UNL-Maintenance)
|
- [Negative UNL maintenance](#Negative-UNL-Maintenance)
|
||||||
* [Quorum size calculation](#Quorum-Size-Calculation)
|
- [Quorum size calculation](#Quorum-Size-Calculation)
|
||||||
* [Filter validation messages](#Filter-Validation-Messages)
|
- [Filter validation messages](#Filter-Validation-Messages)
|
||||||
* [High level sequence diagram of code
|
- [High level sequence diagram of code
|
||||||
changes](#High-Level-Sequence-Diagram-of-Code-Changes)
|
changes](#High-Level-Sequence-Diagram-of-Code-Changes)
|
||||||
|
|
||||||
### Negative UNL Protocol Overview
|
### Negative UNL Protocol Overview
|
||||||
@@ -114,9 +118,9 @@ with V in their UNL adjust the quorum and V’s validation message is not counte
|
|||||||
when verifying if a ledger is fully validated. V’s flow of messages and network
|
when verifying if a ledger is fully validated. V’s flow of messages and network
|
||||||
interactions, however, will remain the same.
|
interactions, however, will remain the same.
|
||||||
|
|
||||||
We define the ***effective UNL** = original UNL - negative UNL*, and the
|
We define the **\*effective UNL** = original UNL - negative UNL\*, and the
|
||||||
***effective quorum*** as the quorum of the *effective UNL*. And we set
|
**_effective quorum_** as the quorum of the _effective UNL_. And we set
|
||||||
*effective quorum = Ceiling(80% * effective UNL)*.
|
_effective quorum = Ceiling(80% _ effective UNL)\*.
|
||||||
|
|
||||||
### Validator Reliability Measurement
|
### Validator Reliability Measurement
|
||||||
|
|
||||||
@@ -126,16 +130,16 @@ measure about its validators, but we have chosen ledger validation messages.
|
|||||||
This is because every validator shall send one and only one signed validation
|
This is because every validator shall send one and only one signed validation
|
||||||
message per ledger. This keeps the measurement simple and removes
|
message per ledger. This keeps the measurement simple and removes
|
||||||
timing/clock-sync issues. A node will measure the percentage of agreeing
|
timing/clock-sync issues. A node will measure the percentage of agreeing
|
||||||
validation messages (*PAV*) received from each validator on the node's UNL. Note
|
validation messages (_PAV_) received from each validator on the node's UNL. Note
|
||||||
that the node will only count the validation messages that agree with its own
|
that the node will only count the validation messages that agree with its own
|
||||||
validations.
|
validations.
|
||||||
|
|
||||||
We define the **PAV** as the **P**ercentage of **A**greed **V**alidation
|
We define the **PAV** as the **P**ercentage of **A**greed **V**alidation
|
||||||
messages received for the last N ledgers, where N = 256 by default.
|
messages received for the last N ledgers, where N = 256 by default.
|
||||||
|
|
||||||
When the PAV drops below the ***low-water mark***, the validator is considered
|
When the PAV drops below the **_low-water mark_**, the validator is considered
|
||||||
unreliable, and is a candidate to be disabled by being added to the negative
|
unreliable, and is a candidate to be disabled by being added to the negative
|
||||||
UNL. A validator must have a PAV higher than the ***high-water mark*** to be
|
UNL. A validator must have a PAV higher than the **_high-water mark_** to be
|
||||||
re-enabled. The validator is re-enabled by removing it from the negative UNL. In
|
re-enabled. The validator is re-enabled by removing it from the negative UNL. In
|
||||||
the implementation, we plan to set the low-water mark as 50% and the high-water
|
the implementation, we plan to set the low-water mark as 50% and the high-water
|
||||||
mark as 80%.
|
mark as 80%.
|
||||||
@@ -143,22 +147,24 @@ mark as 80%.
|
|||||||
### Format Changes
|
### Format Changes
|
||||||
|
|
||||||
The negative UNL component in a ledger contains three fields.
|
The negative UNL component in a ledger contains three fields.
|
||||||
* ***NegativeUNL***: The current negative UNL, a list of unreliable validators.
|
|
||||||
* ***ToDisable***: The validator to be added to the negative UNL on the next
|
- **_NegativeUNL_**: The current negative UNL, a list of unreliable validators.
|
||||||
|
- **_ToDisable_**: The validator to be added to the negative UNL on the next
|
||||||
flag ledger.
|
flag ledger.
|
||||||
* ***ToReEnable***: The validator to be removed from the negative UNL on the
|
- **_ToReEnable_**: The validator to be removed from the negative UNL on the
|
||||||
next flag ledger.
|
next flag ledger.
|
||||||
|
|
||||||
All three fields are optional. When the *ToReEnable* field exists, the
|
All three fields are optional. When the _ToReEnable_ field exists, the
|
||||||
*NegativeUNL* field cannot be empty.
|
_NegativeUNL_ field cannot be empty.
|
||||||
|
|
||||||
A new pseudo-transaction, ***UNLModify***, is added. It has three fields
|
A new pseudo-transaction, **_UNLModify_**, is added. It has three fields
|
||||||
* ***Disabling***: A flag indicating whether the modification is to disable or
|
|
||||||
|
- **_Disabling_**: A flag indicating whether the modification is to disable or
|
||||||
to re-enable a validator.
|
to re-enable a validator.
|
||||||
* ***Seq***: The ledger sequence number.
|
- **_Seq_**: The ledger sequence number.
|
||||||
* ***Validator***: The validator to be disabled or re-enabled.
|
- **_Validator_**: The validator to be disabled or re-enabled.
|
||||||
|
|
||||||
There would be at most one *disable* `UNLModify` and one *re-enable* `UNLModify`
|
There would be at most one _disable_ `UNLModify` and one _re-enable_ `UNLModify`
|
||||||
transaction per flag ledger. The full machinery is described further on.
|
transaction per flag ledger. The full machinery is described further on.
|
||||||
|
|
||||||
### Negative UNL Maintenance
|
### Negative UNL Maintenance
|
||||||
@@ -167,19 +173,19 @@ The negative UNL can only be modified on the flag ledgers. If a validator's
|
|||||||
reliability status changes, it takes two flag ledgers to modify the negative
|
reliability status changes, it takes two flag ledgers to modify the negative
|
||||||
UNL. Let's see an example of the algorithm:
|
UNL. Let's see an example of the algorithm:
|
||||||
|
|
||||||
* Ledger seq = 100: A validator V goes offline.
|
- Ledger seq = 100: A validator V goes offline.
|
||||||
* Ledger seq = 256: This is a flag ledger, and V's reliability measurement *PAV*
|
- Ledger seq = 256: This is a flag ledger, and V's reliability measurement _PAV_
|
||||||
is lower than the low-water mark. Other validators add `UNLModify`
|
is lower than the low-water mark. Other validators add `UNLModify`
|
||||||
pseudo-transactions `{true, 256, V}` to the transaction set which goes through
|
pseudo-transactions `{true, 256, V}` to the transaction set which goes through
|
||||||
the consensus. Then the pseudo-transaction is applied to the negative UNL
|
the consensus. Then the pseudo-transaction is applied to the negative UNL
|
||||||
ledger component by setting `ToDisable = V`.
|
ledger component by setting `ToDisable = V`.
|
||||||
* Ledger seq = 257 ~ 511: The negative UNL ledger component is copied from the
|
- Ledger seq = 257 ~ 511: The negative UNL ledger component is copied from the
|
||||||
parent ledger.
|
parent ledger.
|
||||||
* Ledger seq=512: This is a flag ledger, and the negative UNL is updated
|
- Ledger seq=512: This is a flag ledger, and the negative UNL is updated
|
||||||
`NegativeUNL = NegativeUNL + ToDisable`.
|
`NegativeUNL = NegativeUNL + ToDisable`.
|
||||||
|
|
||||||
The negative UNL may have up to `MaxNegativeListed = floor(original UNL * 25%)`
|
The negative UNL may have up to `MaxNegativeListed = floor(original UNL * 25%)`
|
||||||
validators. The 25% is because of 75% * 80% = 60%, where 75% = 100% - 25%, 80%
|
validators. The 25% is because of 75% \* 80% = 60%, where 75% = 100% - 25%, 80%
|
||||||
is the quorum of the effective UNL, and 60% is the absolute minimum quorum of
|
is the quorum of the effective UNL, and 60% is the absolute minimum quorum of
|
||||||
the original UNL. Adding more than 25% validators to the negative UNL does not
|
the original UNL. Adding more than 25% validators to the negative UNL does not
|
||||||
improve the liveness of the network, because adding more validators to the
|
improve the liveness of the network, because adding more validators to the
|
||||||
@@ -187,52 +193,43 @@ negative UNL cannot lower the effective quorum.
|
|||||||
|
|
||||||
The following is the detailed algorithm:
|
The following is the detailed algorithm:
|
||||||
|
|
||||||
* **If** the ledger seq = x is a flag ledger
|
- **If** the ledger seq = x is a flag ledger
|
||||||
|
1. Compute `NegativeUNL = NegativeUNL + ToDisable - ToReEnable` if they
|
||||||
|
exist in the parent ledger
|
||||||
|
|
||||||
1. Compute `NegativeUNL = NegativeUNL + ToDisable - ToReEnable` if they
|
1. Try to find a candidate to disable if `sizeof NegativeUNL < MaxNegativeListed`
|
||||||
exist in the parent ledger
|
|
||||||
|
|
||||||
1. Try to find a candidate to disable if `sizeof NegativeUNL < MaxNegativeListed`
|
1. Find a validator V that has a _PAV_ lower than the low-water
|
||||||
|
mark, but is not in `NegativeUNL`.
|
||||||
|
|
||||||
1. Find a validator V that has a *PAV* lower than the low-water
|
1. If two or more are found, their public keys are XORed with the hash
|
||||||
mark, but is not in `NegativeUNL`.
|
of the parent ledger and the one with the lowest XOR result is chosen.
|
||||||
|
1. If V is found, create a `UNLModify` pseudo-transaction
|
||||||
|
`TxDisableValidator = {true, x, V}`
|
||||||
|
1. Try to find a candidate to re-enable if `sizeof NegativeUNL > 0`:
|
||||||
|
1. Find a validator U that is in `NegativeUNL` and has a _PAV_ higher
|
||||||
|
than the high-water mark.
|
||||||
|
1. If U is not found, try to find one in `NegativeUNL` but not in the
|
||||||
|
local _UNL_.
|
||||||
|
1. If two or more are found, their public keys are XORed with the hash
|
||||||
|
of the parent ledger and the one with the lowest XOR result is chosen.
|
||||||
|
1. If U is found, create a `UNLModify` pseudo-transaction
|
||||||
|
`TxReEnableValidator = {false, x, U}`
|
||||||
|
|
||||||
1. If two or more are found, their public keys are XORed with the hash
|
1. If any `UNLModify` pseudo-transactions are created, add them to the
|
||||||
of the parent ledger and the one with the lowest XOR result is chosen.
|
transaction set. The transaction set goes through the consensus algorithm.
|
||||||
|
1. If have enough support, the `UNLModify` pseudo-transactions remain in the
|
||||||
1. If V is found, create a `UNLModify` pseudo-transaction
|
transaction set agreed by the validators. Then the pseudo-transactions are
|
||||||
`TxDisableValidator = {true, x, V}`
|
applied to the ledger:
|
||||||
|
|
||||||
1. Try to find a candidate to re-enable if `sizeof NegativeUNL > 0`:
|
|
||||||
|
|
||||||
1. Find a validator U that is in `NegativeUNL` and has a *PAV* higher
|
|
||||||
than the high-water mark.
|
|
||||||
|
|
||||||
1. If U is not found, try to find one in `NegativeUNL` but not in the
|
|
||||||
local *UNL*.
|
|
||||||
|
|
||||||
1. If two or more are found, their public keys are XORed with the hash
|
|
||||||
of the parent ledger and the one with the lowest XOR result is chosen.
|
|
||||||
|
|
||||||
1. If U is found, create a `UNLModify` pseudo-transaction
|
|
||||||
`TxReEnableValidator = {false, x, U}`
|
|
||||||
|
|
||||||
1. If any `UNLModify` pseudo-transactions are created, add them to the
|
|
||||||
transaction set. The transaction set goes through the consensus algorithm.
|
|
||||||
|
|
||||||
1. If have enough support, the `UNLModify` pseudo-transactions remain in the
|
|
||||||
transaction set agreed by the validators. Then the pseudo-transactions are
|
|
||||||
applied to the ledger:
|
|
||||||
|
|
||||||
1. If have `TxDisableValidator`, set `ToDisable=TxDisableValidator.V`.
|
|
||||||
Else clear `ToDisable`.
|
|
||||||
|
|
||||||
1. If have `TxReEnableValidator`, set
|
|
||||||
`ToReEnable=TxReEnableValidator.U`. Else clear `ToReEnable`.
|
|
||||||
|
|
||||||
* **Else** (not a flag ledger)
|
|
||||||
|
|
||||||
1. Copy the negative UNL ledger component from the parent ledger
|
1. If have `TxDisableValidator`, set `ToDisable=TxDisableValidator.V`.
|
||||||
|
Else clear `ToDisable`.
|
||||||
|
|
||||||
|
1. If have `TxReEnableValidator`, set
|
||||||
|
`ToReEnable=TxReEnableValidator.U`. Else clear `ToReEnable`.
|
||||||
|
|
||||||
|
- **Else** (not a flag ledger)
|
||||||
|
1. Copy the negative UNL ledger component from the parent ledger
|
||||||
|
|
||||||
The negative UNL is stored on each ledger because we don't know when a validator
|
The negative UNL is stored on each ledger because we don't know when a validator
|
||||||
may reconnect to the network. If the negative UNL was stored only on every flag
|
may reconnect to the network. If the negative UNL was stored only on every flag
|
||||||
@@ -273,31 +270,26 @@ not counted when checking if the ledger is fully validated.
|
|||||||
The diagram below is the sequence of one round of consensus. Classes and
|
The diagram below is the sequence of one round of consensus. Classes and
|
||||||
components with non-trivial changes are colored green.
|
components with non-trivial changes are colored green.
|
||||||
|
|
||||||
* The `ValidatorList` class is modified to compute the quorum of the effective
|
- The `ValidatorList` class is modified to compute the quorum of the effective
|
||||||
UNL.
|
UNL.
|
||||||
|
|
||||||
* The `Validations` class provides an interface for querying the validation
|
- The `Validations` class provides an interface for querying the validation
|
||||||
messages from trusted validators.
|
messages from trusted validators.
|
||||||
|
|
||||||
* The `ConsensusAdaptor` component:
|
- The `ConsensusAdaptor` component:
|
||||||
|
- The `RCLConsensus::Adaptor` class is modified for creating `UNLModify`
|
||||||
* The `RCLConsensus::Adaptor` class is modified for creating `UNLModify`
|
Pseudo-Transactions.
|
||||||
Pseudo-Transactions.
|
- The `Change` class is modified for applying `UNLModify`
|
||||||
|
Pseudo-Transactions.
|
||||||
* The `Change` class is modified for applying `UNLModify`
|
- The `Ledger` class is modified for creating and adjusting the negative UNL
|
||||||
Pseudo-Transactions.
|
ledger component.
|
||||||
|
- The `LedgerMaster` class is modified for filtering out validation messages
|
||||||
* The `Ledger` class is modified for creating and adjusting the negative UNL
|
from negative UNL validators when verifying if a ledger is fully
|
||||||
ledger component.
|
validated.
|
||||||
|
|
||||||
* The `LedgerMaster` class is modified for filtering out validation messages
|
|
||||||
from negative UNL validators when verifying if a ledger is fully
|
|
||||||
validated.
|
|
||||||
|
|
||||||

|
Changes")
|
||||||
|
|
||||||
|
|
||||||
## Roads Not Taken
|
## Roads Not Taken
|
||||||
|
|
||||||
### Use a Mechanism Like Fee Voting to Process UNLModify Pseudo-Transactions
|
### Use a Mechanism Like Fee Voting to Process UNLModify Pseudo-Transactions
|
||||||
@@ -311,7 +303,7 @@ and different quorums for the same ledger. As a result, the network's safety is
|
|||||||
impacted.
|
impacted.
|
||||||
|
|
||||||
This updated version does not impact safety though operates a bit more slowly.
|
This updated version does not impact safety though operates a bit more slowly.
|
||||||
The negative UNL modifications in the *UNLModify* pseudo-transaction approved by
|
The negative UNL modifications in the _UNLModify_ pseudo-transaction approved by
|
||||||
the consensus will take effect at the next flag ledger. The extra time of the
|
the consensus will take effect at the next flag ledger. The extra time of the
|
||||||
256 ledgers should be enough for nodes to be in sync of the negative UNL
|
256 ledgers should be enough for nodes to be in sync of the negative UNL
|
||||||
modifications.
|
modifications.
|
||||||
@@ -334,29 +326,28 @@ expiration approach cannot be simply applied.
|
|||||||
### Validator Reliability Measurement and Flag Ledger Frequency
|
### Validator Reliability Measurement and Flag Ledger Frequency
|
||||||
|
|
||||||
If the ledger time is about 4.5 seconds and the low-water mark is 50%, then in
|
If the ledger time is about 4.5 seconds and the low-water mark is 50%, then in
|
||||||
the worst case, it takes 48 minutes *((0.5 * 256 + 256 + 256) * 4.5 / 60 = 48)*
|
the worst case, it takes 48 minutes _((0.5 _ 256 + 256 + 256) _ 4.5 / 60 = 48)_
|
||||||
to put an offline validator on the negative UNL. We considered lowering the flag
|
to put an offline validator on the negative UNL. We considered lowering the flag
|
||||||
ledger frequency so that the negative UNL can be more responsive. We also
|
ledger frequency so that the negative UNL can be more responsive. We also
|
||||||
considered decoupling the reliability measurement and flag ledger frequency to
|
considered decoupling the reliability measurement and flag ledger frequency to
|
||||||
be more flexible. In practice, however, their benefits are not clear.
|
be more flexible. In practice, however, their benefits are not clear.
|
||||||
|
|
||||||
|
|
||||||
## New Attack Vectors
|
## New Attack Vectors
|
||||||
|
|
||||||
A group of malicious validators may try to frame a reliable validator and put it
|
A group of malicious validators may try to frame a reliable validator and put it
|
||||||
on the negative UNL. But they cannot succeed. Because:
|
on the negative UNL. But they cannot succeed. Because:
|
||||||
|
|
||||||
1. A reliable validator sends a signed validation message every ledger. A
|
1. A reliable validator sends a signed validation message every ledger. A
|
||||||
sufficient peer-to-peer network will propagate the validation messages to other
|
sufficient peer-to-peer network will propagate the validation messages to other
|
||||||
validators. The validators will decide if another validator is reliable or not
|
validators. The validators will decide if another validator is reliable or not
|
||||||
only by its local observation of the validation messages received. So an honest
|
only by its local observation of the validation messages received. So an honest
|
||||||
validator’s vote on another validator’s reliability is accurate.
|
validator’s vote on another validator’s reliability is accurate.
|
||||||
|
|
||||||
1. Given the votes are accurate, and one vote per validator, an honest validator
|
1. Given the votes are accurate, and one vote per validator, an honest validator
|
||||||
will not create a UNLModify transaction of a reliable validator.
|
will not create a UNLModify transaction of a reliable validator.
|
||||||
|
|
||||||
1. A validator can be added to a negative UNL only through a UNLModify
|
1. A validator can be added to a negative UNL only through a UNLModify
|
||||||
transaction.
|
transaction.
|
||||||
|
|
||||||
Assuming the group of malicious validators is less than the quorum, they cannot
|
Assuming the group of malicious validators is less than the quorum, they cannot
|
||||||
frame a reliable validator.
|
frame a reliable validator.
|
||||||
@@ -365,32 +356,32 @@ frame a reliable validator.
|
|||||||
|
|
||||||
The bullet points below briefly summarize the current proposal:
|
The bullet points below briefly summarize the current proposal:
|
||||||
|
|
||||||
* The motivation of the negative UNL is to improve the liveness of the network.
|
- The motivation of the negative UNL is to improve the liveness of the network.
|
||||||
|
|
||||||
* The targeted faults are the ones frequently observed in the production
|
- The targeted faults are the ones frequently observed in the production
|
||||||
network.
|
network.
|
||||||
|
|
||||||
* Validators propose negative UNL candidates based on their local measurements.
|
- Validators propose negative UNL candidates based on their local measurements.
|
||||||
|
|
||||||
* The absolute minimum quorum is 60% of the original UNL.
|
- The absolute minimum quorum is 60% of the original UNL.
|
||||||
|
|
||||||
* The format of the ledger is changed, and a new *UNLModify* pseudo-transaction
|
- The format of the ledger is changed, and a new _UNLModify_ pseudo-transaction
|
||||||
is added. Any tools or systems that rely on the format of these data will have
|
is added. Any tools or systems that rely on the format of these data will have
|
||||||
to be updated.
|
to be updated.
|
||||||
|
|
||||||
* The negative UNL can only be modified on the flag ledgers.
|
- The negative UNL can only be modified on the flag ledgers.
|
||||||
|
|
||||||
* At most one validator can be added to the negative UNL at a flag ledger.
|
- At most one validator can be added to the negative UNL at a flag ledger.
|
||||||
|
|
||||||
* At most one validator can be removed from the negative UNL at a flag ledger.
|
- At most one validator can be removed from the negative UNL at a flag ledger.
|
||||||
|
|
||||||
* If a validator's reliability status changes, it takes two flag ledgers to
|
- If a validator's reliability status changes, it takes two flag ledgers to
|
||||||
modify the negative UNL.
|
modify the negative UNL.
|
||||||
|
|
||||||
* The quorum is the larger of 80% of the effective UNL and 60% of the original
|
- The quorum is the larger of 80% of the effective UNL and 60% of the original
|
||||||
UNL.
|
UNL.
|
||||||
|
|
||||||
* If a validator is on the negative UNL, its validation messages are ignored
|
- If a validator is on the negative UNL, its validation messages are ignored
|
||||||
when the local node verifies if a ledger is fully validated.
|
when the local node verifies if a ledger is fully validated.
|
||||||
|
|
||||||
## FAQ
|
## FAQ
|
||||||
@@ -415,7 +406,7 @@ lower quorum size while keeping the network safe.
|
|||||||
validator removed from the negative UNL? </h3>
|
validator removed from the negative UNL? </h3>
|
||||||
|
|
||||||
A validator’s reliability is measured by other validators. If a validator
|
A validator’s reliability is measured by other validators. If a validator
|
||||||
becomes unreliable, at a flag ledger, other validators propose *UNLModify*
|
becomes unreliable, at a flag ledger, other validators propose _UNLModify_
|
||||||
pseudo-transactions which vote the validator to add to the negative UNL during
|
pseudo-transactions which vote the validator to add to the negative UNL during
|
||||||
the consensus session. If agreed, the validator is added to the negative UNL at
|
the consensus session. If agreed, the validator is added to the negative UNL at
|
||||||
the next flag ledger. The mechanism of removing a validator from the negative
|
the next flag ledger. The mechanism of removing a validator from the negative
|
||||||
@@ -423,32 +414,32 @@ UNL is the same.
|
|||||||
|
|
||||||
### Question: Given a negative UNL, what happens if the UNL changes?
|
### Question: Given a negative UNL, what happens if the UNL changes?
|
||||||
|
|
||||||
Answer: Let’s consider the cases:
|
Answer: Let’s consider the cases:
|
||||||
|
|
||||||
1. A validator is added to the UNL, and it is already in the negative UNL. This
|
1. A validator is added to the UNL, and it is already in the negative UNL. This
|
||||||
case could happen when not all the nodes have the same UNL. Note that the
|
case could happen when not all the nodes have the same UNL. Note that the
|
||||||
negative UNL on the ledger lists unreliable nodes that are not necessarily the
|
negative UNL on the ledger lists unreliable nodes that are not necessarily the
|
||||||
validators for everyone.
|
validators for everyone.
|
||||||
|
|
||||||
In this case, the liveness is affected negatively. Because the minimum
|
In this case, the liveness is affected negatively. Because the minimum
|
||||||
quorum could be larger but the usable validators are not increased.
|
quorum could be larger but the usable validators are not increased.
|
||||||
|
|
||||||
1. A validator is removed from the UNL, and it is in the negative UNL.
|
1. A validator is removed from the UNL, and it is in the negative UNL.
|
||||||
|
|
||||||
In this case, the liveness is affected positively. Because the quorum could
|
In this case, the liveness is affected positively. Because the quorum could
|
||||||
be smaller but the usable validators are not reduced.
|
be smaller but the usable validators are not reduced.
|
||||||
|
|
||||||
1. A validator is added to the UNL, and it is not in the negative UNL.
|
1. A validator is added to the UNL, and it is not in the negative UNL.
|
||||||
1. A validator is removed from the UNL, and it is not in the negative UNL.
|
1. A validator is removed from the UNL, and it is not in the negative UNL.
|
||||||
|
|
||||||
Case 3 and 4 are not affected by the negative UNL protocol.
|
Case 3 and 4 are not affected by the negative UNL protocol.
|
||||||
|
|
||||||
### Question: Can we simply lower the quorum to 60% without the negative UNL?
|
### Question: Can we simply lower the quorum to 60% without the negative UNL?
|
||||||
|
|
||||||
Answer: No, because the negative UNL approach is safer.
|
Answer: No, because the negative UNL approach is safer.
|
||||||
|
|
||||||
First let’s compare the two approaches intuitively, (1) the *negative UNL*
|
First let’s compare the two approaches intuitively, (1) the _negative UNL_
|
||||||
approach, and (2) *lower quorum*: simply lowering the quorum from 80% to 60%
|
approach, and (2) _lower quorum_: simply lowering the quorum from 80% to 60%
|
||||||
without the negative UNL. The negative UNL approach uses consensus to come up
|
without the negative UNL. The negative UNL approach uses consensus to come up
|
||||||
with a list of unreliable validators, which are then removed from the effective
|
with a list of unreliable validators, which are then removed from the effective
|
||||||
UNL temporarily. With this approach, the list of unreliable validators is agreed
|
UNL temporarily. With this approach, the list of unreliable validators is agreed
|
||||||
@@ -462,75 +453,75 @@ Next we compare the two approaches quantitatively with examples, and apply
|
|||||||
Theorem 8 of [Analysis of the XRP Ledger Consensus
|
Theorem 8 of [Analysis of the XRP Ledger Consensus
|
||||||
Protocol](https://arxiv.org/abs/1802.07242) paper:
|
Protocol](https://arxiv.org/abs/1802.07242) paper:
|
||||||
|
|
||||||
*XRP LCP guarantees fork safety if **O<sub>i,j</sub> > n<sub>j</sub> / 2 +
|
_XRP LCP guarantees fork safety if **O<sub>i,j</sub> > n<sub>j</sub> / 2 +
|
||||||
n<sub>i</sub> − q<sub>i</sub> + t<sub>i,j</sub>** for every pair of nodes
|
n<sub>i</sub> − q<sub>i</sub> + t<sub>i,j</sub>** for every pair of nodes
|
||||||
P<sub>i</sub>, P<sub>j</sub>,*
|
P<sub>i</sub>, P<sub>j</sub>,_
|
||||||
|
|
||||||
where *O<sub>i,j</sub>* is the overlapping requirement, n<sub>j</sub> and
|
where _O<sub>i,j</sub>_ is the overlapping requirement, n<sub>j</sub> and
|
||||||
n<sub>i</sub> are UNL sizes, q<sub>i</sub> is the quorum size of P<sub>i</sub>,
|
n<sub>i</sub> are UNL sizes, q<sub>i</sub> is the quorum size of P<sub>i</sub>,
|
||||||
*t<sub>i,j</sub> = min(t<sub>i</sub>, t<sub>j</sub>, O<sub>i,j</sub>)*, and
|
_t<sub>i,j</sub> = min(t<sub>i</sub>, t<sub>j</sub>, O<sub>i,j</sub>)_, and
|
||||||
t<sub>i</sub> and t<sub>j</sub> are the number of faults can be tolerated by
|
t<sub>i</sub> and t<sub>j</sub> are the number of faults can be tolerated by
|
||||||
P<sub>i</sub> and P<sub>j</sub>.
|
P<sub>i</sub> and P<sub>j</sub>.
|
||||||
|
|
||||||
We denote *UNL<sub>i</sub>* as *P<sub>i</sub>'s UNL*, and *|UNL<sub>i</sub>|* as
|
We denote _UNL<sub>i</sub>_ as _P<sub>i</sub>'s UNL_, and _|UNL<sub>i</sub>|_ as
|
||||||
the size of *P<sub>i</sub>'s UNL*.
|
the size of _P<sub>i</sub>'s UNL_.
|
||||||
|
|
||||||
Assuming *|UNL<sub>i</sub>| = |UNL<sub>j</sub>|*, let's consider the following
|
Assuming _|UNL<sub>i</sub>| = |UNL<sub>j</sub>|_, let's consider the following
|
||||||
three cases:
|
three cases:
|
||||||
|
|
||||||
1. With 80% quorum and 20% faults, *O<sub>i,j</sub> > 100% / 2 + 100% - 80% +
|
1. With 80% quorum and 20% faults, _O<sub>i,j</sub> > 100% / 2 + 100% - 80% +
|
||||||
20% = 90%*. I.e. fork safety requires > 90% UNL overlaps. This is one of the
|
20% = 90%_. I.e. fork safety requires > 90% UNL overlaps. This is one of the
|
||||||
results in the analysis paper.
|
results in the analysis paper.
|
||||||
|
|
||||||
1. If the quorum is 60%, the relationship between the overlapping requirement
|
1. If the quorum is 60%, the relationship between the overlapping requirement
|
||||||
and the faults that can be tolerated is *O<sub>i,j</sub> > 90% +
|
and the faults that can be tolerated is _O<sub>i,j</sub> > 90% +
|
||||||
t<sub>i,j</sub>*. Under the same overlapping condition (i.e. 90%), to guarantee
|
t<sub>i,j</sub>_. Under the same overlapping condition (i.e. 90%), to guarantee
|
||||||
the fork safety, the network cannot tolerate any faults. So under the same
|
the fork safety, the network cannot tolerate any faults. So under the same
|
||||||
overlapping condition, if the quorum is simply lowered, the network can tolerate
|
overlapping condition, if the quorum is simply lowered, the network can tolerate
|
||||||
fewer faults.
|
fewer faults.
|
||||||
|
|
||||||
1. With the negative UNL approach, we want to argue that the inequation
|
1. With the negative UNL approach, we want to argue that the inequation
|
||||||
*O<sub>i,j</sub> > n<sub>j</sub> / 2 + n<sub>i</sub> − q<sub>i</sub> +
|
_O<sub>i,j</sub> > n<sub>j</sub> / 2 + n<sub>i</sub> − q<sub>i</sub> +
|
||||||
t<sub>i,j</sub>* is always true to guarantee fork safety, while the negative UNL
|
t<sub>i,j</sub>_ is always true to guarantee fork safety, while the negative UNL
|
||||||
protocol runs, i.e. the effective quorum is lowered without weakening the
|
protocol runs, i.e. the effective quorum is lowered without weakening the
|
||||||
network's fault tolerance. To make the discussion easier, we rewrite the
|
network's fault tolerance. To make the discussion easier, we rewrite the
|
||||||
inequation as *O<sub>i,j</sub> > n<sub>j</sub> / 2 + (n<sub>i</sub> −
|
inequation as _O<sub>i,j</sub> > n<sub>j</sub> / 2 + (n<sub>i</sub> −
|
||||||
q<sub>i</sub>) + min(t<sub>i</sub>, t<sub>j</sub>)*, where O<sub>i,j</sub> is
|
q<sub>i</sub>) + min(t<sub>i</sub>, t<sub>j</sub>)_, where O<sub>i,j</sub> is
|
||||||
dropped from the definition of t<sub>i,j</sub> because *O<sub>i,j</sub> >
|
dropped from the definition of t<sub>i,j</sub> because _O<sub>i,j</sub> >
|
||||||
min(t<sub>i</sub>, t<sub>j</sub>)* always holds under the parameters we will
|
min(t<sub>i</sub>, t<sub>j</sub>)_ always holds under the parameters we will
|
||||||
use. Assuming a validator V is added to the negative UNL, now let's consider the
|
use. Assuming a validator V is added to the negative UNL, now let's consider the
|
||||||
4 cases:
|
4 cases:
|
||||||
|
|
||||||
1. V is not on UNL<sub>i</sub> nor UNL<sub>j</sub>
|
1. V is not on UNL<sub>i</sub> nor UNL<sub>j</sub>
|
||||||
|
|
||||||
The inequation holds because none of the variables change.
|
The inequation holds because none of the variables change.
|
||||||
|
|
||||||
1. V is on UNL<sub>i</sub> but not on UNL<sub>j</sub>
|
1. V is on UNL<sub>i</sub> but not on UNL<sub>j</sub>
|
||||||
|
|
||||||
The value of *(n<sub>i</sub> − q<sub>i</sub>)* is smaller. The value of
|
The value of *(n<sub>i</sub> − q<sub>i</sub>)* is smaller. The value of
|
||||||
*min(t<sub>i</sub>, t<sub>j</sub>)* could be smaller too. Other
|
*min(t<sub>i</sub>, t<sub>j</sub>)* could be smaller too. Other
|
||||||
variables do not change. Overall, the left side of the inequation does
|
variables do not change. Overall, the left side of the inequation does
|
||||||
not change, but the right side is smaller. So the inequation holds.
|
not change, but the right side is smaller. So the inequation holds.
|
||||||
|
|
||||||
1. V is not on UNL<sub>i</sub> but on UNL<sub>j</sub>
|
|
||||||
|
|
||||||
The value of *n<sub>j</sub> / 2* is smaller. The value of
|
1. V is not on UNL<sub>i</sub> but on UNL<sub>j</sub>
|
||||||
*min(t<sub>i</sub>, t<sub>j</sub>)* could be smaller too. Other
|
|
||||||
variables do not change. Overall, the left side of the inequation does
|
|
||||||
not change, but the right side is smaller. So the inequation holds.
|
|
||||||
|
|
||||||
1. V is on both UNL<sub>i</sub> and UNL<sub>j</sub>
|
|
||||||
|
|
||||||
The value of *O<sub>i,j</sub>* is reduced by 1. The values of
|
The value of *n<sub>j</sub> / 2* is smaller. The value of
|
||||||
*n<sub>j</sub> / 2*, *(n<sub>i</sub> − q<sub>i</sub>)*, and
|
*min(t<sub>i</sub>, t<sub>j</sub>)* could be smaller too. Other
|
||||||
*min(t<sub>i</sub>, t<sub>j</sub>)* are reduced by 0.5, 0.2, and 1
|
variables do not change. Overall, the left side of the inequation does
|
||||||
respectively. The right side is reduced by 1.7. Overall, the left side
|
not change, but the right side is smaller. So the inequation holds.
|
||||||
of the inequation is reduced by 1, and the right side is reduced by 1.7.
|
|
||||||
So the inequation holds.
|
|
||||||
|
|
||||||
The inequation holds for all the cases. So with the negative UNL approach,
|
1. V is on both UNL<sub>i</sub> and UNL<sub>j</sub>
|
||||||
the network's fork safety is preserved, while the quorum is lowered that
|
|
||||||
increases the network's liveness.
|
The value of *O<sub>i,j</sub>* is reduced by 1. The values of
|
||||||
|
*n<sub>j</sub> / 2*, *(n<sub>i</sub> − q<sub>i</sub>)*, and
|
||||||
|
*min(t<sub>i</sub>, t<sub>j</sub>)* are reduced by 0.5, 0.2, and 1
|
||||||
|
respectively. The right side is reduced by 1.7. Overall, the left side
|
||||||
|
of the inequation is reduced by 1, and the right side is reduced by 1.7.
|
||||||
|
So the inequation holds.
|
||||||
|
|
||||||
|
The inequation holds for all the cases. So with the negative UNL approach,
|
||||||
|
the network's fork safety is preserved, while the quorum is lowered that
|
||||||
|
increases the network's liveness.
|
||||||
|
|
||||||
<h3> Question: We have observed that occasionally a validator wanders off on its
|
<h3> Question: We have observed that occasionally a validator wanders off on its
|
||||||
own chain. How is this case handled by the negative UNL algorithm? </h3>
|
own chain. How is this case handled by the negative UNL algorithm? </h3>
|
||||||
@@ -565,11 +556,11 @@ will be used after that. We want to see the test cases still pass with real
|
|||||||
network delay. A test case specifies:
|
network delay. A test case specifies:
|
||||||
|
|
||||||
1. a UNL with different number of validators for different test cases,
|
1. a UNL with different number of validators for different test cases,
|
||||||
1. a network with zero or more non-validator nodes,
|
1. a network with zero or more non-validator nodes,
|
||||||
1. a sequence of validator reliability change events (by killing/restarting
|
1. a sequence of validator reliability change events (by killing/restarting
|
||||||
nodes, or by running modified rippled that does not send all validation
|
nodes, or by running modified rippled that does not send all validation
|
||||||
messages),
|
messages),
|
||||||
1. the correct outcomes.
|
1. the correct outcomes.
|
||||||
|
|
||||||
For all the test cases, the correct outcomes are verified by examining logs. We
|
For all the test cases, the correct outcomes are verified by examining logs. We
|
||||||
will grep the log to see if the correct negative UNLs are generated, and whether
|
will grep the log to see if the correct negative UNLs are generated, and whether
|
||||||
@@ -579,6 +570,7 @@ timing parameters of rippled will be changed to have faster ledger time. Most if
|
|||||||
not all test cases do not need client transactions.
|
not all test cases do not need client transactions.
|
||||||
|
|
||||||
For example, the test cases for the prototype:
|
For example, the test cases for the prototype:
|
||||||
|
|
||||||
1. A 10-validator UNL.
|
1. A 10-validator UNL.
|
||||||
1. The network does not have other nodes.
|
1. The network does not have other nodes.
|
||||||
1. The validators will be started from the genesis. Once they start to produce
|
1. The validators will be started from the genesis. Once they start to produce
|
||||||
@@ -587,11 +579,11 @@ For example, the test cases for the prototype:
|
|||||||
1. A sequence of events (or the lack of events) such as a killed validator is
|
1. A sequence of events (or the lack of events) such as a killed validator is
|
||||||
added to the negative UNL.
|
added to the negative UNL.
|
||||||
|
|
||||||
#### Roads Not Taken: Test with Extended CSF
|
#### Roads Not Taken: Test with Extended CSF
|
||||||
|
|
||||||
We considered testing with the current unit test framework, specifically the
|
We considered testing with the current unit test framework, specifically the
|
||||||
[Consensus Simulation
|
[Consensus Simulation
|
||||||
Framework](https://github.com/ripple/rippled/blob/develop/src/test/csf/README.md)
|
Framework](https://github.com/ripple/rippled/blob/develop/src/test/csf/README.md)
|
||||||
(CSF). However, the CSF currently can only test the generic consensus algorithm
|
(CSF). However, the CSF currently can only test the generic consensus algorithm
|
||||||
as in the paper: [Analysis of the XRP Ledger Consensus
|
as in the paper: [Analysis of the XRP Ledger Consensus
|
||||||
Protocol](https://arxiv.org/abs/1802.07242).
|
Protocol](https://arxiv.org/abs/1802.07242).
|
||||||
|
|||||||
@@ -5,8 +5,8 @@ skinparam roundcorner 20
|
|||||||
skinparam maxmessagesize 160
|
skinparam maxmessagesize 160
|
||||||
|
|
||||||
actor "Rippled Start" as RS
|
actor "Rippled Start" as RS
|
||||||
participant "Timer" as T
|
participant "Timer" as T
|
||||||
participant "NetworkOPs" as NOP
|
participant "NetworkOPs" as NOP
|
||||||
participant "ValidatorList" as VL #lightgreen
|
participant "ValidatorList" as VL #lightgreen
|
||||||
participant "Consensus" as GC
|
participant "Consensus" as GC
|
||||||
participant "ConsensusAdaptor" as CA #lightgreen
|
participant "ConsensusAdaptor" as CA #lightgreen
|
||||||
@@ -20,7 +20,7 @@ VL -> NOP
|
|||||||
NOP -> VL: update trusted validators
|
NOP -> VL: update trusted validators
|
||||||
activate VL
|
activate VL
|
||||||
VL -> VL: re-calculate quorum
|
VL -> VL: re-calculate quorum
|
||||||
hnote over VL#lightgreen: ignore negative listed validators\nwhen calculate quorum
|
hnote over VL#lightgreen: ignore negative listed validators\nwhen calculate quorum
|
||||||
VL -> NOP
|
VL -> NOP
|
||||||
deactivate VL
|
deactivate VL
|
||||||
NOP -> GC: start round
|
NOP -> GC: start round
|
||||||
@@ -36,14 +36,14 @@ activate GC
|
|||||||
end
|
end
|
||||||
|
|
||||||
alt phase == OPEN
|
alt phase == OPEN
|
||||||
alt should close ledger
|
alt should close ledger
|
||||||
GC -> GC: phase = ESTABLISH
|
GC -> GC: phase = ESTABLISH
|
||||||
GC -> CA: onClose
|
GC -> CA: onClose
|
||||||
activate CA
|
activate CA
|
||||||
alt sqn%256==0
|
alt sqn%256==0
|
||||||
CA -[#green]> RM: <font color=green>getValidations
|
CA -[#green]> RM: <font color=green>getValidations
|
||||||
CA -[#green]> CA: <font color=green>create UNLModify Tx
|
CA -[#green]> CA: <font color=green>create UNLModify Tx
|
||||||
hnote over CA#lightgreen: use validatations of the last 256 ledgers\nto figure out UNLModify Tx candidates.\nIf any, create UNLModify Tx, and add to TxSet.
|
hnote over CA#lightgreen: use validatations of the last 256 ledgers\nto figure out UNLModify Tx candidates.\nIf any, create UNLModify Tx, and add to TxSet.
|
||||||
end
|
end
|
||||||
CA -> GC
|
CA -> GC
|
||||||
GC -> CA: propose
|
GC -> CA: propose
|
||||||
@@ -61,14 +61,14 @@ else phase == ESTABLISH
|
|||||||
CA -> CA : build LCL
|
CA -> CA : build LCL
|
||||||
hnote over CA #lightgreen: copy negative UNL from parent ledger
|
hnote over CA #lightgreen: copy negative UNL from parent ledger
|
||||||
alt sqn%256==0
|
alt sqn%256==0
|
||||||
CA -[#green]> CA: <font color=green>Adjust negative UNL
|
CA -[#green]> CA: <font color=green>Adjust negative UNL
|
||||||
CA -[#green]> CA: <font color=green>apply UNLModify Tx
|
CA -[#green]> CA: <font color=green>apply UNLModify Tx
|
||||||
end
|
end
|
||||||
CA -> CA : validate and send validation message
|
CA -> CA : validate and send validation message
|
||||||
activate NOP
|
activate NOP
|
||||||
CA -> NOP : end consensus and\n<b>begin next consensus round
|
CA -> NOP : end consensus and\n<b>begin next consensus round
|
||||||
deactivate NOP
|
deactivate NOP
|
||||||
deactivate CA
|
deactivate CA
|
||||||
hnote over RM: receive validations
|
hnote over RM: receive validations
|
||||||
end
|
end
|
||||||
else phase == ACCEPTED
|
else phase == ACCEPTED
|
||||||
@@ -76,4 +76,4 @@ else phase == ACCEPTED
|
|||||||
end
|
end
|
||||||
deactivate GC
|
deactivate GC
|
||||||
|
|
||||||
@enduml
|
@enduml
|
||||||
|
|||||||
@@ -82,7 +82,9 @@ pattern and the way coroutines are implemented, where every yield saves the spot
|
|||||||
in the code where it left off and every resume jumps back to that spot.
|
in the code where it left off and every resume jumps back to that spot.
|
||||||
|
|
||||||
### Sequence Diagram
|
### Sequence Diagram
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
### Class Diagram
|
### Class Diagram
|
||||||
|
|
||||||

|

|
||||||
|
|||||||
@@ -4,7 +4,7 @@ class TimeoutCounter {
|
|||||||
#app_ : Application&
|
#app_ : Application&
|
||||||
}
|
}
|
||||||
|
|
||||||
TimeoutCounter o-- "1" Application
|
TimeoutCounter o-- "1" Application
|
||||||
': app_
|
': app_
|
||||||
|
|
||||||
Stoppable <.. Application
|
Stoppable <.. Application
|
||||||
@@ -14,13 +14,13 @@ class Application {
|
|||||||
-m_inboundLedgers : uptr<InboundLedgers>
|
-m_inboundLedgers : uptr<InboundLedgers>
|
||||||
}
|
}
|
||||||
|
|
||||||
Application *-- "1" LedgerReplayer
|
Application *-- "1" LedgerReplayer
|
||||||
': m_ledgerReplayer
|
': m_ledgerReplayer
|
||||||
Application *-- "1" InboundLedgers
|
Application *-- "1" InboundLedgers
|
||||||
': m_inboundLedgers
|
': m_inboundLedgers
|
||||||
|
|
||||||
Stoppable <.. InboundLedgers
|
Stoppable <.. InboundLedgers
|
||||||
Application "1" --o InboundLedgers
|
Application "1" --o InboundLedgers
|
||||||
': app_
|
': app_
|
||||||
|
|
||||||
class InboundLedgers {
|
class InboundLedgers {
|
||||||
@@ -28,9 +28,9 @@ class InboundLedgers {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Stoppable <.. LedgerReplayer
|
Stoppable <.. LedgerReplayer
|
||||||
InboundLedgers "1" --o LedgerReplayer
|
InboundLedgers "1" --o LedgerReplayer
|
||||||
': inboundLedgers_
|
': inboundLedgers_
|
||||||
Application "1" --o LedgerReplayer
|
Application "1" --o LedgerReplayer
|
||||||
': app_
|
': app_
|
||||||
|
|
||||||
class LedgerReplayer {
|
class LedgerReplayer {
|
||||||
@@ -42,17 +42,17 @@ class LedgerReplayer {
|
|||||||
-skipLists_ : hash_map<u256, wptr<SkipListAcquire>>
|
-skipLists_ : hash_map<u256, wptr<SkipListAcquire>>
|
||||||
}
|
}
|
||||||
|
|
||||||
LedgerReplayer *-- LedgerReplayTask
|
LedgerReplayer *-- LedgerReplayTask
|
||||||
': tasks_
|
': tasks_
|
||||||
LedgerReplayer o-- LedgerDeltaAcquire
|
LedgerReplayer o-- LedgerDeltaAcquire
|
||||||
': deltas_
|
': deltas_
|
||||||
LedgerReplayer o-- SkipListAcquire
|
LedgerReplayer o-- SkipListAcquire
|
||||||
': skipLists_
|
': skipLists_
|
||||||
|
|
||||||
TimeoutCounter <.. LedgerReplayTask
|
TimeoutCounter <.. LedgerReplayTask
|
||||||
InboundLedgers "1" --o LedgerReplayTask
|
InboundLedgers "1" --o LedgerReplayTask
|
||||||
': inboundLedgers_
|
': inboundLedgers_
|
||||||
LedgerReplayer "1" --o LedgerReplayTask
|
LedgerReplayer "1" --o LedgerReplayTask
|
||||||
': replayer_
|
': replayer_
|
||||||
|
|
||||||
class LedgerReplayTask {
|
class LedgerReplayTask {
|
||||||
@@ -63,15 +63,15 @@ class LedgerReplayTask {
|
|||||||
+addDelta(sptr<LedgerDeltaAcquire>)
|
+addDelta(sptr<LedgerDeltaAcquire>)
|
||||||
}
|
}
|
||||||
|
|
||||||
LedgerReplayTask *-- "1" SkipListAcquire
|
LedgerReplayTask *-- "1" SkipListAcquire
|
||||||
': skipListAcquirer_
|
': skipListAcquirer_
|
||||||
LedgerReplayTask *-- LedgerDeltaAcquire
|
LedgerReplayTask *-- LedgerDeltaAcquire
|
||||||
': deltas_
|
': deltas_
|
||||||
|
|
||||||
TimeoutCounter <.. SkipListAcquire
|
TimeoutCounter <.. SkipListAcquire
|
||||||
InboundLedgers "1" --o SkipListAcquire
|
InboundLedgers "1" --o SkipListAcquire
|
||||||
': inboundLedgers_
|
': inboundLedgers_
|
||||||
LedgerReplayer "1" --o SkipListAcquire
|
LedgerReplayer "1" --o SkipListAcquire
|
||||||
': replayer_
|
': replayer_
|
||||||
LedgerReplayTask --o SkipListAcquire : implicit via callback
|
LedgerReplayTask --o SkipListAcquire : implicit via callback
|
||||||
|
|
||||||
@@ -83,9 +83,9 @@ class SkipListAcquire {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TimeoutCounter <.. LedgerDeltaAcquire
|
TimeoutCounter <.. LedgerDeltaAcquire
|
||||||
InboundLedgers "1" --o LedgerDeltaAcquire
|
InboundLedgers "1" --o LedgerDeltaAcquire
|
||||||
': inboundLedgers_
|
': inboundLedgers_
|
||||||
LedgerReplayer "1" --o LedgerDeltaAcquire
|
LedgerReplayer "1" --o LedgerDeltaAcquire
|
||||||
': replayer_
|
': replayer_
|
||||||
LedgerReplayTask --o LedgerDeltaAcquire : implicit via callback
|
LedgerReplayTask --o LedgerDeltaAcquire : implicit via callback
|
||||||
|
|
||||||
@@ -95,4 +95,4 @@ class LedgerDeltaAcquire {
|
|||||||
-replayer_ : LedgerReplayer&
|
-replayer_ : LedgerReplayer&
|
||||||
-dataReadyCallbacks_ : vector<callback>
|
-dataReadyCallbacks_ : vector<callback>
|
||||||
}
|
}
|
||||||
@enduml
|
@enduml
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ deactivate lr
|
|||||||
loop
|
loop
|
||||||
lr -> lda : make_shared(ledgerId, ledgerSeq)
|
lr -> lda : make_shared(ledgerId, ledgerSeq)
|
||||||
return delta
|
return delta
|
||||||
lr -> lrt : addDelta(delta)
|
lr -> lrt : addDelta(delta)
|
||||||
lrt -> lda : addDataCallback(callback)
|
lrt -> lda : addDataCallback(callback)
|
||||||
return
|
return
|
||||||
return
|
return
|
||||||
@@ -62,7 +62,7 @@ deactivate peer
|
|||||||
lr -> lda : processData(ledgerHeader, txns)
|
lr -> lda : processData(ledgerHeader, txns)
|
||||||
lda -> lda : notify()
|
lda -> lda : notify()
|
||||||
note over lda: call the callbacks added by\naddDataCallback(callback).
|
note over lda: call the callbacks added by\naddDataCallback(callback).
|
||||||
lda -> lrt : callback(ledgerId)
|
lda -> lrt : callback(ledgerId)
|
||||||
lrt -> lrt : deltaReady(ledgerId)
|
lrt -> lrt : deltaReady(ledgerId)
|
||||||
lrt -> lrt : tryAdvance()
|
lrt -> lrt : tryAdvance()
|
||||||
loop as long as child can be built
|
loop as long as child can be built
|
||||||
@@ -82,4 +82,4 @@ deactivate peer
|
|||||||
deactivate peer
|
deactivate peer
|
||||||
|
|
||||||
|
|
||||||
@enduml
|
@enduml
|
||||||
|
|||||||
@@ -16,5 +16,5 @@
|
|||||||
## Function
|
## Function
|
||||||
|
|
||||||
- Minimize external dependencies
|
- Minimize external dependencies
|
||||||
* Pass options in the ctor instead of using theConfig
|
- Pass options in the ctor instead of using theConfig
|
||||||
* Use as few other classes as possible
|
- Use as few other classes as possible
|
||||||
|
|||||||
@@ -1,18 +1,18 @@
|
|||||||
# Coding Standards
|
# Coding Standards
|
||||||
|
|
||||||
Coding standards used here gradually evolve and propagate through
|
Coding standards used here gradually evolve and propagate through
|
||||||
code reviews. Some aspects are enforced more strictly than others.
|
code reviews. Some aspects are enforced more strictly than others.
|
||||||
|
|
||||||
## Rules
|
## Rules
|
||||||
|
|
||||||
These rules only apply to our own code. We can't enforce any sort of
|
These rules only apply to our own code. We can't enforce any sort of
|
||||||
style on the external repositories and libraries we include. The best
|
style on the external repositories and libraries we include. The best
|
||||||
guideline is to maintain the standards that are used in those libraries.
|
guideline is to maintain the standards that are used in those libraries.
|
||||||
|
|
||||||
* Tab inserts 4 spaces. No tab characters.
|
- Tab inserts 4 spaces. No tab characters.
|
||||||
* Braces are indented in the [Allman style][1].
|
- Braces are indented in the [Allman style][1].
|
||||||
* Modern C++ principles. No naked ```new``` or ```delete```.
|
- Modern C++ principles. No naked `new` or `delete`.
|
||||||
* Line lengths limited to 80 characters. Exceptions limited to data and tables.
|
- Line lengths limited to 80 characters. Exceptions limited to data and tables.
|
||||||
|
|
||||||
## Guidelines
|
## Guidelines
|
||||||
|
|
||||||
@@ -21,17 +21,17 @@ why you're doing it. Think, use common sense, and consider that this
|
|||||||
your changes will probably need to be maintained long after you've
|
your changes will probably need to be maintained long after you've
|
||||||
moved on to other projects.
|
moved on to other projects.
|
||||||
|
|
||||||
* Use white space and blank lines to guide the eye and keep your intent clear.
|
- Use white space and blank lines to guide the eye and keep your intent clear.
|
||||||
* Put private data members at the top of a class, and the 6 public special
|
- Put private data members at the top of a class, and the 6 public special
|
||||||
members immediately after, in the following order:
|
members immediately after, in the following order:
|
||||||
* Destructor
|
- Destructor
|
||||||
* Default constructor
|
- Default constructor
|
||||||
* Copy constructor
|
- Copy constructor
|
||||||
* Copy assignment
|
- Copy assignment
|
||||||
* Move constructor
|
- Move constructor
|
||||||
* Move assignment
|
- Move assignment
|
||||||
* Don't over-inline by defining large functions within the class
|
- Don't over-inline by defining large functions within the class
|
||||||
declaration, not even for template classes.
|
declaration, not even for template classes.
|
||||||
|
|
||||||
## Formatting
|
## Formatting
|
||||||
|
|
||||||
@@ -39,44 +39,44 @@ The goal of source code formatting should always be to make things as easy to
|
|||||||
read as possible. White space is used to guide the eye so that details are not
|
read as possible. White space is used to guide the eye so that details are not
|
||||||
overlooked. Blank lines are used to separate code into "paragraphs."
|
overlooked. Blank lines are used to separate code into "paragraphs."
|
||||||
|
|
||||||
* Always place a space before and after all binary operators,
|
- Always place a space before and after all binary operators,
|
||||||
especially assignments (`operator=`).
|
especially assignments (`operator=`).
|
||||||
* The `!` operator should be preceded by a space, but not followed by one.
|
- The `!` operator should be preceded by a space, but not followed by one.
|
||||||
* The `~` operator should be preceded by a space, but not followed by one.
|
- The `~` operator should be preceded by a space, but not followed by one.
|
||||||
* The `++` and `--` operators should have no spaces between the operator and
|
- The `++` and `--` operators should have no spaces between the operator and
|
||||||
the operand.
|
the operand.
|
||||||
* A space never appears before a comma, and always appears after a comma.
|
- A space never appears before a comma, and always appears after a comma.
|
||||||
* Don't put spaces after a parenthesis. A typical member function call might
|
- Don't put spaces after a parenthesis. A typical member function call might
|
||||||
look like this: `foobar (1, 2, 3);`
|
look like this: `foobar (1, 2, 3);`
|
||||||
* In general, leave a blank line before an `if` statement.
|
- In general, leave a blank line before an `if` statement.
|
||||||
* In general, leave a blank line after a closing brace `}`.
|
- In general, leave a blank line after a closing brace `}`.
|
||||||
* Do not place code on the same line as any opening or
|
- Do not place code on the same line as any opening or
|
||||||
closing brace.
|
closing brace.
|
||||||
* Do not write `if` statements all-on-one-line. The exception to this is when
|
- Do not write `if` statements all-on-one-line. The exception to this is when
|
||||||
you've got a sequence of similar `if` statements, and are aligning them all
|
you've got a sequence of similar `if` statements, and are aligning them all
|
||||||
vertically to highlight their similarities.
|
vertically to highlight their similarities.
|
||||||
* In an `if-else` statement, if you surround one half of the statement with
|
- In an `if-else` statement, if you surround one half of the statement with
|
||||||
braces, you also need to put braces around the other half, to match.
|
braces, you also need to put braces around the other half, to match.
|
||||||
* When writing a pointer type, use this spacing: `SomeObject* myObject`.
|
- When writing a pointer type, use this spacing: `SomeObject* myObject`.
|
||||||
Technically, a more correct spacing would be `SomeObject *myObject`, but
|
Technically, a more correct spacing would be `SomeObject *myObject`, but
|
||||||
it makes more sense for the asterisk to be grouped with the type name,
|
it makes more sense for the asterisk to be grouped with the type name,
|
||||||
since being a pointer is part of the type, not the variable name. The only
|
since being a pointer is part of the type, not the variable name. The only
|
||||||
time that this can lead to any problems is when you're declaring multiple
|
time that this can lead to any problems is when you're declaring multiple
|
||||||
pointers of the same type in the same statement - which leads on to the next
|
pointers of the same type in the same statement - which leads on to the next
|
||||||
rule:
|
rule:
|
||||||
* When declaring multiple pointers, never do so in a single statement, e.g.
|
- When declaring multiple pointers, never do so in a single statement, e.g.
|
||||||
`SomeObject* p1, *p2;` - instead, always split them out onto separate lines
|
`SomeObject* p1, *p2;` - instead, always split them out onto separate lines
|
||||||
and write the type name again, to make it quite clear what's going on, and
|
and write the type name again, to make it quite clear what's going on, and
|
||||||
avoid the danger of missing out any vital asterisks.
|
avoid the danger of missing out any vital asterisks.
|
||||||
* The previous point also applies to references, so always put the `&` next to
|
- The previous point also applies to references, so always put the `&` next to
|
||||||
the type rather than the variable, e.g. `void foo (Thing const& thing)`. And
|
the type rather than the variable, e.g. `void foo (Thing const& thing)`. And
|
||||||
don't put a space on both sides of the `*` or `&` - always put a space after
|
don't put a space on both sides of the `*` or `&` - always put a space after
|
||||||
it, but never before it.
|
it, but never before it.
|
||||||
* The word `const` should be placed to the right of the thing that it modifies,
|
- The word `const` should be placed to the right of the thing that it modifies,
|
||||||
for consistency. For example `int const` refers to an int which is const.
|
for consistency. For example `int const` refers to an int which is const.
|
||||||
`int const*` is a pointer to an int which is const. `int *const` is a const
|
`int const*` is a pointer to an int which is const. `int *const` is a const
|
||||||
pointer to an int.
|
pointer to an int.
|
||||||
* Always place a space in between the template angle brackets and the type
|
- Always place a space in between the template angle brackets and the type
|
||||||
name. Template code is already hard enough to read!
|
name. Template code is already hard enough to read!
|
||||||
|
|
||||||
[1]: http://en.wikipedia.org/wiki/Indent_style#Allman_style
|
[1]: http://en.wikipedia.org/wiki/Indent_style#Allman_style
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ and header under /opt/local/include:
|
|||||||
|
|
||||||
$ scons clang profile-jemalloc=/opt/local
|
$ scons clang profile-jemalloc=/opt/local
|
||||||
|
|
||||||
----------------------
|
---
|
||||||
|
|
||||||
## Using the jemalloc library from within the code
|
## Using the jemalloc library from within the code
|
||||||
|
|
||||||
@@ -60,4 +60,3 @@ Linking against the jemalloc library will override
|
|||||||
the system's default `malloc()` and related functions with jemalloc's
|
the system's default `malloc()` and related functions with jemalloc's
|
||||||
implementation. This is the case even if the code is not instrumented
|
implementation. This is the case even if the code is not instrumented
|
||||||
to use jemalloc's specific API.
|
to use jemalloc's specific API.
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user