Compare commits
2 Commits
tapanito/f
...
0.50.3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
82de944b30 | ||
|
|
fb31380abd |
103
.clang-format
@@ -1,103 +0,0 @@
|
||||
---
|
||||
BreakBeforeBraces: Custom
|
||||
BraceWrapping:
|
||||
AfterClass: true
|
||||
AfterControlStatement: true
|
||||
AfterEnum: false
|
||||
AfterFunction: true
|
||||
AfterNamespace: false
|
||||
AfterObjCDeclaration: true
|
||||
AfterStruct: true
|
||||
AfterUnion: true
|
||||
BeforeCatch: true
|
||||
BeforeElse: true
|
||||
IndentBraces: false
|
||||
KeepEmptyLinesAtTheStartOfBlocks: false
|
||||
MaxEmptyLinesToKeep: 1
|
||||
---
|
||||
Language: Cpp
|
||||
AccessModifierOffset: -4
|
||||
AlignAfterOpenBracket: AlwaysBreak
|
||||
AlignConsecutiveAssignments: false
|
||||
AlignConsecutiveDeclarations: false
|
||||
AlignEscapedNewlinesLeft: true
|
||||
AlignOperands: false
|
||||
AlignTrailingComments: true
|
||||
AllowAllParametersOfDeclarationOnNextLine: false
|
||||
AllowShortBlocksOnASingleLine: false
|
||||
AllowShortCaseLabelsOnASingleLine: false
|
||||
AllowShortFunctionsOnASingleLine: false
|
||||
AllowShortIfStatementsOnASingleLine: false
|
||||
AllowShortLoopsOnASingleLine: false
|
||||
AlwaysBreakAfterReturnType: All
|
||||
AlwaysBreakBeforeMultilineStrings: true
|
||||
AlwaysBreakTemplateDeclarations: true
|
||||
BinPackArguments: false
|
||||
BinPackParameters: false
|
||||
BreakBeforeBinaryOperators: false
|
||||
BreakBeforeTernaryOperators: true
|
||||
BreakConstructorInitializersBeforeComma: true
|
||||
ColumnLimit: 80
|
||||
CommentPragmas: "^ IWYU pragma:"
|
||||
ConstructorInitializerAllOnOneLineOrOnePerLine: true
|
||||
ConstructorInitializerIndentWidth: 4
|
||||
ContinuationIndentWidth: 4
|
||||
Cpp11BracedListStyle: true
|
||||
DerivePointerAlignment: false
|
||||
DisableFormat: false
|
||||
ExperimentalAutoDetectBinPacking: false
|
||||
ForEachMacros: [Q_FOREACH, BOOST_FOREACH]
|
||||
IncludeBlocks: Regroup
|
||||
IncludeCategories:
|
||||
- Regex: "^<(test)/"
|
||||
Priority: 0
|
||||
- Regex: "^<(xrpld)/"
|
||||
Priority: 1
|
||||
- Regex: "^<(xrpl)/"
|
||||
Priority: 2
|
||||
- Regex: "^<(boost)/"
|
||||
Priority: 3
|
||||
- Regex: "^.*/"
|
||||
Priority: 4
|
||||
- Regex: '^.*\.h'
|
||||
Priority: 5
|
||||
- Regex: ".*"
|
||||
Priority: 6
|
||||
IncludeIsMainRegex: "$"
|
||||
IndentCaseLabels: true
|
||||
IndentFunctionDeclarationAfterType: false
|
||||
IndentRequiresClause: true
|
||||
IndentWidth: 4
|
||||
IndentWrappedFunctionNames: false
|
||||
NamespaceIndentation: None
|
||||
ObjCSpaceAfterProperty: false
|
||||
ObjCSpaceBeforeProtocolList: false
|
||||
PenaltyBreakBeforeFirstCallParameter: 1
|
||||
PenaltyBreakComment: 300
|
||||
PenaltyBreakFirstLessLess: 120
|
||||
PenaltyBreakString: 1000
|
||||
PenaltyExcessCharacter: 1000000
|
||||
PenaltyReturnTypeOnItsOwnLine: 200
|
||||
PointerAlignment: Left
|
||||
ReflowComments: true
|
||||
RequiresClausePosition: OwnLine
|
||||
SortIncludes: true
|
||||
SpaceAfterCStyleCast: false
|
||||
SpaceBeforeAssignmentOperators: true
|
||||
SpaceBeforeParens: ControlStatements
|
||||
SpaceInEmptyParentheses: false
|
||||
SpacesBeforeTrailingComments: 2
|
||||
SpacesInAngles: false
|
||||
SpacesInContainerLiterals: true
|
||||
SpacesInCStyleCastParentheses: false
|
||||
SpacesInParentheses: false
|
||||
SpacesInSquareBrackets: false
|
||||
Standard: Cpp11
|
||||
TabWidth: 8
|
||||
UseTab: Never
|
||||
QualifierAlignment: Right
|
||||
---
|
||||
Language: Proto
|
||||
BasedOnStyle: Google
|
||||
ColumnLimit: 0
|
||||
IndentWidth: 2
|
||||
37
.codecov.yml
@@ -1,37 +0,0 @@
|
||||
codecov:
|
||||
require_ci_to_pass: true
|
||||
|
||||
comment:
|
||||
behavior: default
|
||||
layout: reach,diff,flags,tree,reach
|
||||
show_carryforward_flags: false
|
||||
|
||||
coverage:
|
||||
range: "70..85"
|
||||
precision: 1
|
||||
round: nearest
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: 75%
|
||||
threshold: 2%
|
||||
patch:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 2%
|
||||
changes: false
|
||||
|
||||
github_checks:
|
||||
annotations: true
|
||||
|
||||
parsers:
|
||||
cobertura:
|
||||
partials_as_hits: true
|
||||
handle_missing_conditions: true
|
||||
|
||||
slack_app: false
|
||||
|
||||
ignore:
|
||||
- "src/test/"
|
||||
- "include/xrpl/beast/test/"
|
||||
- "include/xrpl/beast/unit_test/"
|
||||
@@ -1,16 +0,0 @@
|
||||
# This feature requires Git >= 2.24
|
||||
# To use it by default in git blame:
|
||||
# git config blame.ignoreRevsFile .git-blame-ignore-revs
|
||||
50760c693510894ca368e90369b0cc2dabfd07f3
|
||||
e2384885f5f630c8f0ffe4bf21a169b433a16858
|
||||
241b9ddde9e11beb7480600fd5ed90e1ef109b21
|
||||
760f16f56835663d9286bd29294d074de26a7ba6
|
||||
0eebe6a5f4246fced516d52b83ec4e7f47373edd
|
||||
2189cc950c0cebb89e4e2fa3b2d8817205bf7cef
|
||||
b9d007813378ad0ff45660dc07285b823c7e9855
|
||||
fe9a5365b8a52d4acc42eb27369247e6f238a4f9
|
||||
9a93577314e6a8d4b4a8368cc9d2b15a5d8303e8
|
||||
552377c76f55b403a1c876df873a23d780fcc81c
|
||||
97f0747e103f13e26e45b731731059b32f7679ac
|
||||
b13370ac0d207217354f1fc1c29aef87769fb8a1
|
||||
896b8c3b54a22b0497cb0d1ce95e1095f9a227ce
|
||||
8
.github/CODEOWNERS
vendored
@@ -1,8 +0,0 @@
|
||||
# Allow anyone to review any change by default.
|
||||
*
|
||||
|
||||
# Require the rpc-reviewers team to review changes to the rpc code.
|
||||
include/xrpl/protocol/ @xrplf/rpc-reviewers
|
||||
src/libxrpl/protocol/ @xrplf/rpc-reviewers
|
||||
src/xrpld/rpc/ @xrplf/rpc-reviewers
|
||||
src/xrpld/app/misc/ @xrplf/rpc-reviewers
|
||||
36
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,36 +0,0 @@
|
||||
---
|
||||
name: Bug Report
|
||||
about: Create a report to help us improve rippled
|
||||
title: "[Title with short description] (Version: [rippled version])"
|
||||
labels: ""
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
<!-- Please search existing issues to avoid creating duplicates.-->
|
||||
|
||||
## Issue Description
|
||||
|
||||
<!--Provide a summary for your issue/bug.-->
|
||||
|
||||
## Steps to Reproduce
|
||||
|
||||
<!--List in detail the exact steps to reproduce the unexpected behavior of the software.-->
|
||||
|
||||
## Expected Result
|
||||
|
||||
<!--Explain in detail what behavior you expected to happen.-->
|
||||
|
||||
## Actual Result
|
||||
|
||||
<!--Explain in detail what behavior actually happened.-->
|
||||
|
||||
## Environment
|
||||
|
||||
<!--Please describe your environment setup (such as Ubuntu 18.04 with Boost 1.70).-->
|
||||
<!-- If you are using a formal release, please use the version returned by './rippled --version' as the version number-->
|
||||
<!-- If you are working off of develop, please add the git hash via 'git rev-parse HEAD'-->
|
||||
|
||||
## Supporting Files
|
||||
|
||||
<!--If you have supporting files such as a log, feel free to post a link here using Github Gist.-->
|
||||
<!--Consider adding configuration files with private information removed via Github Gist. -->
|
||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,8 +0,0 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: XRP Ledger Documentation
|
||||
url: https://xrpl.org/
|
||||
about: All things about XRPL
|
||||
- name: Security bug bounty program
|
||||
url: https://ripple.com/bug-bounty/
|
||||
about: Please report security-relevant bugs in our software here.
|
||||
25
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,25 +0,0 @@
|
||||
---
|
||||
name: Feature Request
|
||||
about: Suggest a new feature for the rippled project
|
||||
title: "[Title with short description] (Version: [rippled version])"
|
||||
labels: Feature Request
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
<!-- Please search existing issues to avoid creating duplicates.-->
|
||||
|
||||
## Summary
|
||||
|
||||
<!-- Provide a summary to the feature request-->
|
||||
|
||||
## Motivation
|
||||
|
||||
<!-- Why do we need this feature?-->
|
||||
|
||||
## Solution
|
||||
|
||||
<!-- What is the solution?-->
|
||||
|
||||
## Paths Not Taken
|
||||
|
||||
<!-- What other alternatives have been considered?-->
|
||||
62
.github/actions/build-deps/action.yml
vendored
@@ -1,62 +0,0 @@
|
||||
# This action installs and optionally uploads Conan dependencies to a remote
|
||||
# repository. The dependencies will only be uploaded if the credentials are
|
||||
# provided.
|
||||
name: Build Conan dependencies
|
||||
|
||||
# Note that actions do not support 'type' and all inputs are strings, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
conan_remote_name:
|
||||
description: "The name of the Conan remote to use."
|
||||
required: true
|
||||
conan_remote_url:
|
||||
description: "The URL of the Conan endpoint to use."
|
||||
required: true
|
||||
conan_remote_username:
|
||||
description: "The username for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||
required: false
|
||||
default: ""
|
||||
conan_remote_password:
|
||||
description: "The password for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||
required: false
|
||||
default: ""
|
||||
force_build:
|
||||
description: 'Force building of all dependencies ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
force_upload:
|
||||
description: 'Force uploading of all dependencies ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install Conan dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'Installing dependencies.'
|
||||
mkdir -p ${{ inputs.build_dir }}
|
||||
cd ${{ inputs.build_dir }}
|
||||
conan install \
|
||||
--output-folder . \
|
||||
--build ${{ inputs.force_build == 'true' && '"*"' || 'missing' }} \
|
||||
--options:host '&:tests=True' \
|
||||
--options:host '&:xrpld=True' \
|
||||
--settings:all build_type=${{ inputs.build_type }} \
|
||||
--format=json ..
|
||||
- name: Upload Conan dependencies
|
||||
if: ${{ inputs.conan_remote_username != '' && inputs.conan_remote_password != '' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo "Logging into Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
|
||||
conan remote login ${{ inputs.conan_remote_name }} "${{ inputs.conan_remote_username }}" --password "${{ inputs.conan_remote_password }}"
|
||||
echo 'Uploading dependencies.'
|
||||
conan upload '*' --confirm --check ${{ inputs.force_upload == 'true' && '--force' || '' }} --remote=${{ inputs.conan_remote_name }}
|
||||
95
.github/actions/build-test/action.yml
vendored
@@ -1,95 +0,0 @@
|
||||
# This action build and tests the binary. The Conan dependencies must have
|
||||
# already been installed (see the build-deps action).
|
||||
name: Build and Test
|
||||
|
||||
# Note that actions do not support 'type' and all inputs are strings, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/metadata-syntax#inputs.
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: true
|
||||
build_only:
|
||||
description: 'Whether to only build or to build and test the code ("true", "false").'
|
||||
required: false
|
||||
default: "false"
|
||||
build_type:
|
||||
description: 'The build type to use ("Debug", "Release").'
|
||||
required: true
|
||||
cmake_args:
|
||||
description: "Additional arguments to pass to CMake."
|
||||
required: false
|
||||
default: ""
|
||||
cmake_target:
|
||||
description: "The CMake target to build."
|
||||
required: true
|
||||
codecov_token:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: false
|
||||
default: ""
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Configure CMake
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Configuring CMake.'
|
||||
cmake \
|
||||
-G '${{ inputs.os == 'windows' && 'Visual Studio 17 2022' || 'Ninja' }}' \
|
||||
-DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
|
||||
-DCMAKE_BUILD_TYPE=${{ inputs.build_type }} \
|
||||
${{ inputs.cmake_args }} \
|
||||
..
|
||||
- name: Build the binary
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Building binary.'
|
||||
cmake \
|
||||
--build . \
|
||||
--config ${{ inputs.build_type }} \
|
||||
--parallel $(nproc) \
|
||||
--target ${{ inputs.cmake_target }}
|
||||
- name: Check linking
|
||||
if: ${{ inputs.os == 'linux' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Checking linking.'
|
||||
ldd ./rippled
|
||||
if [ "$(ldd ./rippled | grep -E '(libstdc\+\+|libgcc)' | wc -l)" -eq 0 ]; then
|
||||
echo 'The binary is statically linked.'
|
||||
else
|
||||
echo 'The binary is dynamically linked.'
|
||||
exit 1
|
||||
fi
|
||||
- name: Verify voidstar
|
||||
if: ${{ contains(inputs.cmake_args, '-Dvoidstar=ON') }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}
|
||||
run: |
|
||||
echo 'Verifying presence of instrumentation.'
|
||||
./rippled --version | grep libvoidstar
|
||||
- name: Test the binary
|
||||
if: ${{ inputs.build_only == 'false' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.build_dir }}/${{ inputs.os == 'windows' && inputs.build_type || '' }}
|
||||
run: |
|
||||
echo 'Testing binary.'
|
||||
./rippled --unittest --unittest-jobs $(nproc)
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
- name: Upload coverage report
|
||||
if: ${{ inputs.cmake_target == 'coverage' }}
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
with:
|
||||
disable_search: true
|
||||
disable_telem: true
|
||||
fail_ci_if_error: true
|
||||
files: ${{ inputs.build_dir }}/coverage.xml
|
||||
plugins: noop
|
||||
token: ${{ inputs.codecov_token }}
|
||||
verbose: true
|
||||
85
.github/pull_request_template.md
vendored
@@ -1,85 +0,0 @@
|
||||
<!--
|
||||
This PR template helps you to write a good pull request description.
|
||||
Please feel free to include additional useful information even beyond what is requested below.
|
||||
|
||||
If your branch is on a personal fork and has a name that allows it to
|
||||
run CI build/test jobs (e.g. "ci/foo"), remember to rename it BEFORE
|
||||
opening the PR. This avoids unnecessary redundant test runs. Renaming
|
||||
the branch after opening the PR will close the PR.
|
||||
https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/managing-branches-in-your-repository/renaming-a-branch
|
||||
-->
|
||||
|
||||
## High Level Overview of Change
|
||||
|
||||
<!--
|
||||
Please include a summary of the changes.
|
||||
This may be a direct input to the release notes.
|
||||
If too broad, please consider splitting into multiple PRs.
|
||||
If a relevant task or issue, please link it here.
|
||||
-->
|
||||
|
||||
### Context of Change
|
||||
|
||||
<!--
|
||||
Please include the context of a change.
|
||||
If a bug fix, when was the bug introduced? What was the behavior?
|
||||
If a new feature, why was this architecture chosen? What were the alternatives?
|
||||
If a refactor, how is this better than the previous implementation?
|
||||
|
||||
If there is a spec or design document for this feature, please link it here.
|
||||
-->
|
||||
|
||||
### Type of Change
|
||||
|
||||
<!--
|
||||
Please check [x] relevant options, delete irrelevant ones.
|
||||
-->
|
||||
|
||||
- [ ] Bug fix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- [ ] Refactor (non-breaking change that only restructures code)
|
||||
- [ ] Performance (increase or change in throughput and/or latency)
|
||||
- [ ] Tests (you added tests for code that already exists, or your new feature included in this PR)
|
||||
- [ ] Documentation update
|
||||
- [ ] Chore (no impact to binary, e.g. `.gitignore`, formatting, dropping support for older tooling)
|
||||
- [ ] Release
|
||||
|
||||
### API Impact
|
||||
|
||||
<!--
|
||||
Please check [x] relevant options, delete irrelevant ones.
|
||||
|
||||
* If there is any impact to the public API methods (HTTP / WebSocket), please update https://github.com/xrplf/rippled/blob/develop/API-CHANGELOG.md
|
||||
* Update API-CHANGELOG.md and add the change directly in this PR by pushing to your PR branch.
|
||||
* libxrpl: See https://github.com/XRPLF/rippled/blob/develop/docs/build/depend.md
|
||||
* Peer Protocol: See https://xrpl.org/peer-protocol.html
|
||||
-->
|
||||
|
||||
- [ ] Public API: New feature (new methods and/or new fields)
|
||||
- [ ] Public API: Breaking change (in general, breaking changes should only impact the next api_version)
|
||||
- [ ] `libxrpl` change (any change that may affect `libxrpl` or dependents of `libxrpl`)
|
||||
- [ ] Peer protocol change (must be backward compatible or bump the peer protocol version)
|
||||
|
||||
<!--
|
||||
## Before / After
|
||||
If relevant, use this section for an English description of the change at a technical level.
|
||||
If this change affects an API, examples should be included here.
|
||||
|
||||
For performance-impacting changes, please provide these details:
|
||||
1. Is this a new feature, bug fix, or improvement to existing functionality?
|
||||
2. What behavior/functionality does the change impact?
|
||||
3. In what processing can the impact be measured? Be as specific as possible - e.g. RPC client call, payment transaction that involves LOB, AMM, caching, DB operations, etc.
|
||||
4. Does this change affect concurrent processing - e.g. does it involve acquiring locks, multi-threaded processing, or async processing?
|
||||
-->
|
||||
|
||||
<!--
|
||||
## Test Plan
|
||||
If helpful, please describe the tests that you ran to verify your changes and provide instructions so that others can reproduce.
|
||||
This section may not be needed if your change includes thoroughly commented unit tests.
|
||||
-->
|
||||
|
||||
<!--
|
||||
## Future Tasks
|
||||
For future tasks related to PR.
|
||||
-->
|
||||
114
.github/scripts/levelization/README.md
vendored
@@ -1,114 +0,0 @@
|
||||
# Levelization
|
||||
|
||||
Levelization is the term used to describe efforts to prevent rippled from
|
||||
having or creating cyclic dependencies.
|
||||
|
||||
rippled code is organized into directories under `src/rippled` (and
|
||||
`src/test`) representing modules. The modules are intended to be
|
||||
organized into "tiers" or "levels" such that a module from one level can
|
||||
only include code from lower levels. Additionally, a module
|
||||
in one level should never include code in an `impl` folder of any level
|
||||
other than it's own.
|
||||
|
||||
Unfortunately, over time, enforcement of levelization has been
|
||||
inconsistent, so the current state of the code doesn't necessarily
|
||||
reflect these rules. Whenever possible, developers should refactor any
|
||||
levelization violations they find (by moving files or individual
|
||||
classes). At the very least, don't make things worse.
|
||||
|
||||
The table below summarizes the _desired_ division of modules, based on the
|
||||
state of the rippled code when it was created. The levels are numbered from
|
||||
the bottom up with the lower level, lower numbered, more independent
|
||||
modules listed first, and the higher level, higher numbered modules with
|
||||
more dependencies listed later.
|
||||
|
||||
**tl;dr:** The modules listed first are more independent than the modules
|
||||
listed later.
|
||||
|
||||
| Level / Tier | Module(s) |
|
||||
| ------------ | -------------------------------------------------------------------------------------------------------- |
|
||||
| 01 | ripple/beast ripple/unity |
|
||||
| 02 | ripple/basics |
|
||||
| 03 | ripple/json ripple/crypto |
|
||||
| 04 | ripple/protocol |
|
||||
| 05 | ripple/core ripple/conditions ripple/consensus ripple/resource ripple/server |
|
||||
| 06 | ripple/peerfinder ripple/ledger ripple/nodestore ripple/net |
|
||||
| 07 | ripple/shamap ripple/overlay |
|
||||
| 08 | ripple/app |
|
||||
| 09 | ripple/rpc |
|
||||
| 10 | ripple/perflog |
|
||||
| 11 | test/jtx test/beast test/csf |
|
||||
| 12 | test/unit_test |
|
||||
| 13 | test/crypto test/conditions test/json test/resource test/shamap test/peerfinder test/basics test/overlay |
|
||||
| 14 | test |
|
||||
| 15 | test/net test/protocol test/ledger test/consensus test/core test/server test/nodestore |
|
||||
| 16 | test/rpc test/app |
|
||||
|
||||
(Note that `test` levelization is _much_ less important and _much_ less
|
||||
strictly enforced than `ripple` levelization, other than the requirement
|
||||
that `test` code should _never_ be included in `ripple` code.)
|
||||
|
||||
## Validation
|
||||
|
||||
The [levelization](generate.sh) script takes no parameters,
|
||||
reads no environment variables, and can be run from any directory,
|
||||
as long as it is in the expected location in the rippled repo.
|
||||
It can be run at any time from within a checked out repo, and will
|
||||
do an analysis of all the `#include`s in
|
||||
the rippled source. The only caveat is that it runs much slower
|
||||
under Windows than in Linux. It hasn't yet been tested under MacOS.
|
||||
It generates many files of [results](results):
|
||||
|
||||
- `rawincludes.txt`: The raw dump of the `#includes`
|
||||
- `paths.txt`: A second dump grouping the source module
|
||||
to the destination module, deduped, and with frequency counts.
|
||||
- `includes/`: A directory where each file represents a module and
|
||||
contains a list of modules and counts that the module _includes_.
|
||||
- `includedby/`: Similar to `includes/`, but the other way around. Each
|
||||
file represents a module and contains a list of modules and counts
|
||||
that _include_ the module.
|
||||
- [`loops.txt`](results/loops.txt): A list of direct loops detected
|
||||
between modules as they actually exist, as opposed to how they are
|
||||
desired as described above. In a perfect repo, this file will be
|
||||
empty.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../workflows/check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
- [`ordering.txt`](results/ordering.txt): A list showing relationships
|
||||
between modules where there are no loops as they actually exist, as
|
||||
opposed to how they are desired as described above.
|
||||
This file is committed to the repo, and is used by the [levelization
|
||||
Github workflow](../../workflows/check-levelization.yml) to validate
|
||||
that nothing changed.
|
||||
- [`levelization.yml`](../../workflows/check-levelization.yml)
|
||||
Github Actions workflow to test that levelization loops haven't
|
||||
changed. Unfortunately, if changes are detected, it can't tell if
|
||||
they are improvements or not, so if you have resolved any issues or
|
||||
done anything else to improve levelization, run `levelization.sh`,
|
||||
and commit the updated results.
|
||||
|
||||
The `loops.txt` and `ordering.txt` files relate the modules
|
||||
using comparison signs, which indicate the number of times each
|
||||
module is included in the other.
|
||||
|
||||
- `A > B` means that A should probably be at a higher level than B,
|
||||
because B is included in A significantly more than A is included in B.
|
||||
These results can be included in both `loops.txt` and `ordering.txt`.
|
||||
Because `ordering.txt`only includes relationships where B is not
|
||||
included in A at all, it will only include these types of results.
|
||||
- `A ~= B` means that A and B are included in each other a different
|
||||
number of times, but the values are so close that the script can't
|
||||
definitively say that one should be above the other. These results
|
||||
will only be included in `loops.txt`.
|
||||
- `A == B` means that A and B include each other the same number of
|
||||
times, so the script has no clue which should be higher. These results
|
||||
will only be included in `loops.txt`.
|
||||
|
||||
The committed files hide the detailed values intentionally, to
|
||||
prevent false alarms and merging issues, and because it's easy to
|
||||
get those details locally.
|
||||
|
||||
1. Run `levelization.sh`
|
||||
2. Grep the modules in `paths.txt`.
|
||||
- For example, if a cycle is found `A ~= B`, simply `grep -w
|
||||
A .github/scripts/levelization/results/paths.txt | grep -w B`
|
||||
130
.github/scripts/levelization/generate.sh
vendored
@@ -1,130 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Usage: generate.sh
|
||||
# This script takes no parameters, reads no environment variables,
|
||||
# and can be run from any directory, as long as it is in the expected
|
||||
# location in the repo.
|
||||
|
||||
pushd $( dirname $0 )
|
||||
|
||||
if [ -v PS1 ]
|
||||
then
|
||||
# if the shell is interactive, clean up any flotsam before analyzing
|
||||
git clean -ix
|
||||
fi
|
||||
|
||||
# Ensure all sorting is ASCII-order consistently across platforms.
|
||||
export LANG=C
|
||||
|
||||
rm -rfv results
|
||||
mkdir results
|
||||
includes="$( pwd )/results/rawincludes.txt"
|
||||
pushd ../../..
|
||||
echo Raw includes:
|
||||
grep -r '^[ ]*#include.*/.*\.h' include src | \
|
||||
grep -v boost | tee ${includes}
|
||||
popd
|
||||
pushd results
|
||||
|
||||
oldifs=${IFS}
|
||||
IFS=:
|
||||
mkdir includes
|
||||
mkdir includedby
|
||||
echo Build levelization paths
|
||||
exec 3< ${includes} # open rawincludes.txt for input
|
||||
while read -r -u 3 file include
|
||||
do
|
||||
level=$( echo ${file} | cut -d/ -f 2,3 )
|
||||
# If the "level" indicates a file, cut off the filename
|
||||
if [[ "${level##*.}" != "${level}" ]]
|
||||
then
|
||||
# Use the "toplevel" label as a workaround for `sort`
|
||||
# inconsistencies between different utility versions
|
||||
level="$( dirname ${level} )/toplevel"
|
||||
fi
|
||||
level=$( echo ${level} | tr '/' '.' )
|
||||
|
||||
includelevel=$( echo ${include} | sed 's/.*["<]//; s/[">].*//' | \
|
||||
cut -d/ -f 1,2 )
|
||||
if [[ "${includelevel##*.}" != "${includelevel}" ]]
|
||||
then
|
||||
# Use the "toplevel" label as a workaround for `sort`
|
||||
# inconsistencies between different utility versions
|
||||
includelevel="$( dirname ${includelevel} )/toplevel"
|
||||
fi
|
||||
includelevel=$( echo ${includelevel} | tr '/' '.' )
|
||||
|
||||
if [[ "$level" != "$includelevel" ]]
|
||||
then
|
||||
echo $level $includelevel | tee -a paths.txt
|
||||
fi
|
||||
done
|
||||
echo Sort and dedup paths
|
||||
sort -ds paths.txt | uniq -c | tee sortedpaths.txt
|
||||
mv sortedpaths.txt paths.txt
|
||||
exec 3>&- #close fd 3
|
||||
IFS=${oldifs}
|
||||
unset oldifs
|
||||
|
||||
echo Split into flat-file database
|
||||
exec 4<paths.txt # open paths.txt for input
|
||||
while read -r -u 4 count level include
|
||||
do
|
||||
echo ${include} ${count} | tee -a includes/${level}
|
||||
echo ${level} ${count} | tee -a includedby/${include}
|
||||
done
|
||||
exec 4>&- #close fd 4
|
||||
|
||||
loops="$( pwd )/loops.txt"
|
||||
ordering="$( pwd )/ordering.txt"
|
||||
pushd includes
|
||||
echo Search for loops
|
||||
# Redirect stdout to a file
|
||||
exec 4>&1
|
||||
exec 1>"${loops}"
|
||||
for source in *
|
||||
do
|
||||
if [[ -f "$source" ]]
|
||||
then
|
||||
exec 5<"${source}" # open for input
|
||||
while read -r -u 5 include includefreq
|
||||
do
|
||||
if [[ -f $include ]]
|
||||
then
|
||||
if grep -q -w $source $include
|
||||
then
|
||||
if grep -q -w "Loop: $include $source" "${loops}"
|
||||
then
|
||||
continue
|
||||
fi
|
||||
sourcefreq=$( grep -w $source $include | cut -d\ -f2 )
|
||||
echo "Loop: $source $include"
|
||||
# If the counts are close, indicate that the two modules are
|
||||
# on the same level, though they shouldn't be
|
||||
if [[ $(( $includefreq - $sourcefreq )) -gt 3 ]]
|
||||
then
|
||||
echo -e " $source > $include\n"
|
||||
elif [[ $(( $sourcefreq - $includefreq )) -gt 3 ]]
|
||||
then
|
||||
echo -e " $include > $source\n"
|
||||
elif [[ $sourcefreq -eq $includefreq ]]
|
||||
then
|
||||
echo -e " $include == $source\n"
|
||||
else
|
||||
echo -e " $include ~= $source\n"
|
||||
fi
|
||||
else
|
||||
echo "$source > $include" >> "${ordering}"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
exec 5>&- #close fd 5
|
||||
fi
|
||||
done
|
||||
exec 1>&4 #close fd 1
|
||||
exec 4>&- #close fd 4
|
||||
cat "${ordering}"
|
||||
cat "${loops}"
|
||||
popd
|
||||
popd
|
||||
popd
|
||||
33
.github/scripts/levelization/results/loops.txt
vendored
@@ -1,33 +0,0 @@
|
||||
Loop: test.jtx test.toplevel
|
||||
test.toplevel > test.jtx
|
||||
|
||||
Loop: test.jtx test.unit_test
|
||||
test.unit_test == test.jtx
|
||||
|
||||
Loop: xrpld.app xrpld.core
|
||||
xrpld.app > xrpld.core
|
||||
|
||||
Loop: xrpld.app xrpld.ledger
|
||||
xrpld.app > xrpld.ledger
|
||||
|
||||
Loop: xrpld.app xrpld.overlay
|
||||
xrpld.overlay > xrpld.app
|
||||
|
||||
Loop: xrpld.app xrpld.peerfinder
|
||||
xrpld.peerfinder ~= xrpld.app
|
||||
|
||||
Loop: xrpld.app xrpld.rpc
|
||||
xrpld.rpc > xrpld.app
|
||||
|
||||
Loop: xrpld.app xrpld.shamap
|
||||
xrpld.app > xrpld.shamap
|
||||
|
||||
Loop: xrpld.core xrpld.perflog
|
||||
xrpld.perflog == xrpld.core
|
||||
|
||||
Loop: xrpld.overlay xrpld.rpc
|
||||
xrpld.rpc ~= xrpld.overlay
|
||||
|
||||
Loop: xrpld.perflog xrpld.rpc
|
||||
xrpld.rpc ~= xrpld.perflog
|
||||
|
||||
198
.github/scripts/levelization/results/ordering.txt
vendored
@@ -1,198 +0,0 @@
|
||||
libxrpl.basics > xrpl.basics
|
||||
libxrpl.crypto > xrpl.basics
|
||||
libxrpl.json > xrpl.basics
|
||||
libxrpl.json > xrpl.json
|
||||
libxrpl.net > xrpl.basics
|
||||
libxrpl.net > xrpl.net
|
||||
libxrpl.protocol > xrpl.basics
|
||||
libxrpl.protocol > xrpl.json
|
||||
libxrpl.protocol > xrpl.protocol
|
||||
libxrpl.resource > xrpl.basics
|
||||
libxrpl.resource > xrpl.json
|
||||
libxrpl.resource > xrpl.resource
|
||||
libxrpl.server > xrpl.basics
|
||||
libxrpl.server > xrpl.json
|
||||
libxrpl.server > xrpl.protocol
|
||||
libxrpl.server > xrpl.server
|
||||
test.app > test.jtx
|
||||
test.app > test.rpc
|
||||
test.app > test.toplevel
|
||||
test.app > test.unit_test
|
||||
test.app > xrpl.basics
|
||||
test.app > xrpld.app
|
||||
test.app > xrpld.core
|
||||
test.app > xrpld.ledger
|
||||
test.app > xrpld.nodestore
|
||||
test.app > xrpld.overlay
|
||||
test.app > xrpld.rpc
|
||||
test.app > xrpl.json
|
||||
test.app > xrpl.protocol
|
||||
test.app > xrpl.resource
|
||||
test.basics > test.jtx
|
||||
test.basics > test.unit_test
|
||||
test.basics > xrpl.basics
|
||||
test.basics > xrpld.perflog
|
||||
test.basics > xrpld.rpc
|
||||
test.basics > xrpl.json
|
||||
test.basics > xrpl.protocol
|
||||
test.beast > xrpl.basics
|
||||
test.conditions > xrpl.basics
|
||||
test.conditions > xrpld.conditions
|
||||
test.consensus > test.csf
|
||||
test.consensus > test.toplevel
|
||||
test.consensus > test.unit_test
|
||||
test.consensus > xrpl.basics
|
||||
test.consensus > xrpld.app
|
||||
test.consensus > xrpld.consensus
|
||||
test.consensus > xrpld.ledger
|
||||
test.consensus > xrpl.json
|
||||
test.core > test.jtx
|
||||
test.core > test.toplevel
|
||||
test.core > test.unit_test
|
||||
test.core > xrpl.basics
|
||||
test.core > xrpld.core
|
||||
test.core > xrpld.perflog
|
||||
test.core > xrpl.json
|
||||
test.core > xrpl.server
|
||||
test.csf > xrpl.basics
|
||||
test.csf > xrpld.consensus
|
||||
test.csf > xrpl.json
|
||||
test.csf > xrpl.protocol
|
||||
test.json > test.jtx
|
||||
test.json > xrpl.json
|
||||
test.jtx > xrpl.basics
|
||||
test.jtx > xrpld.app
|
||||
test.jtx > xrpld.core
|
||||
test.jtx > xrpld.ledger
|
||||
test.jtx > xrpld.rpc
|
||||
test.jtx > xrpl.json
|
||||
test.jtx > xrpl.net
|
||||
test.jtx > xrpl.protocol
|
||||
test.jtx > xrpl.resource
|
||||
test.jtx > xrpl.server
|
||||
test.ledger > test.jtx
|
||||
test.ledger > test.toplevel
|
||||
test.ledger > xrpl.basics
|
||||
test.ledger > xrpld.app
|
||||
test.ledger > xrpld.core
|
||||
test.ledger > xrpld.ledger
|
||||
test.ledger > xrpl.protocol
|
||||
test.nodestore > test.jtx
|
||||
test.nodestore > test.toplevel
|
||||
test.nodestore > test.unit_test
|
||||
test.nodestore > xrpl.basics
|
||||
test.nodestore > xrpld.core
|
||||
test.nodestore > xrpld.nodestore
|
||||
test.nodestore > xrpld.unity
|
||||
test.overlay > test.jtx
|
||||
test.overlay > test.toplevel
|
||||
test.overlay > test.unit_test
|
||||
test.overlay > xrpl.basics
|
||||
test.overlay > xrpld.app
|
||||
test.overlay > xrpld.overlay
|
||||
test.overlay > xrpld.peerfinder
|
||||
test.overlay > xrpld.shamap
|
||||
test.overlay > xrpl.protocol
|
||||
test.peerfinder > test.beast
|
||||
test.peerfinder > test.unit_test
|
||||
test.peerfinder > xrpl.basics
|
||||
test.peerfinder > xrpld.core
|
||||
test.peerfinder > xrpld.peerfinder
|
||||
test.peerfinder > xrpl.protocol
|
||||
test.protocol > test.toplevel
|
||||
test.protocol > xrpl.basics
|
||||
test.protocol > xrpl.json
|
||||
test.protocol > xrpl.protocol
|
||||
test.resource > test.unit_test
|
||||
test.resource > xrpl.basics
|
||||
test.resource > xrpl.resource
|
||||
test.rpc > test.jtx
|
||||
test.rpc > test.toplevel
|
||||
test.rpc > xrpl.basics
|
||||
test.rpc > xrpld.app
|
||||
test.rpc > xrpld.core
|
||||
test.rpc > xrpld.overlay
|
||||
test.rpc > xrpld.rpc
|
||||
test.rpc > xrpl.json
|
||||
test.rpc > xrpl.protocol
|
||||
test.rpc > xrpl.resource
|
||||
test.server > test.jtx
|
||||
test.server > test.toplevel
|
||||
test.server > test.unit_test
|
||||
test.server > xrpl.basics
|
||||
test.server > xrpld.app
|
||||
test.server > xrpld.core
|
||||
test.server > xrpld.rpc
|
||||
test.server > xrpl.json
|
||||
test.server > xrpl.server
|
||||
test.shamap > test.unit_test
|
||||
test.shamap > xrpl.basics
|
||||
test.shamap > xrpld.nodestore
|
||||
test.shamap > xrpld.shamap
|
||||
test.shamap > xrpl.protocol
|
||||
test.toplevel > test.csf
|
||||
test.toplevel > xrpl.json
|
||||
test.unit_test > xrpl.basics
|
||||
tests.libxrpl > xrpl.basics
|
||||
xrpl.json > xrpl.basics
|
||||
xrpl.net > xrpl.basics
|
||||
xrpl.protocol > xrpl.basics
|
||||
xrpl.protocol > xrpl.json
|
||||
xrpl.resource > xrpl.basics
|
||||
xrpl.resource > xrpl.json
|
||||
xrpl.resource > xrpl.protocol
|
||||
xrpl.server > xrpl.basics
|
||||
xrpl.server > xrpl.json
|
||||
xrpl.server > xrpl.protocol
|
||||
xrpld.app > test.unit_test
|
||||
xrpld.app > xrpl.basics
|
||||
xrpld.app > xrpld.conditions
|
||||
xrpld.app > xrpld.consensus
|
||||
xrpld.app > xrpld.nodestore
|
||||
xrpld.app > xrpld.perflog
|
||||
xrpld.app > xrpl.json
|
||||
xrpld.app > xrpl.net
|
||||
xrpld.app > xrpl.protocol
|
||||
xrpld.app > xrpl.resource
|
||||
xrpld.conditions > xrpl.basics
|
||||
xrpld.conditions > xrpl.protocol
|
||||
xrpld.consensus > xrpl.basics
|
||||
xrpld.consensus > xrpl.json
|
||||
xrpld.consensus > xrpl.protocol
|
||||
xrpld.core > xrpl.basics
|
||||
xrpld.core > xrpl.json
|
||||
xrpld.core > xrpl.net
|
||||
xrpld.core > xrpl.protocol
|
||||
xrpld.ledger > xrpl.basics
|
||||
xrpld.ledger > xrpl.json
|
||||
xrpld.ledger > xrpl.protocol
|
||||
xrpld.nodestore > xrpl.basics
|
||||
xrpld.nodestore > xrpld.core
|
||||
xrpld.nodestore > xrpld.unity
|
||||
xrpld.nodestore > xrpl.json
|
||||
xrpld.nodestore > xrpl.protocol
|
||||
xrpld.overlay > xrpl.basics
|
||||
xrpld.overlay > xrpld.core
|
||||
xrpld.overlay > xrpld.peerfinder
|
||||
xrpld.overlay > xrpld.perflog
|
||||
xrpld.overlay > xrpl.json
|
||||
xrpld.overlay > xrpl.protocol
|
||||
xrpld.overlay > xrpl.resource
|
||||
xrpld.overlay > xrpl.server
|
||||
xrpld.peerfinder > xrpl.basics
|
||||
xrpld.peerfinder > xrpld.core
|
||||
xrpld.peerfinder > xrpl.protocol
|
||||
xrpld.perflog > xrpl.basics
|
||||
xrpld.perflog > xrpl.json
|
||||
xrpld.rpc > xrpl.basics
|
||||
xrpld.rpc > xrpld.core
|
||||
xrpld.rpc > xrpld.ledger
|
||||
xrpld.rpc > xrpld.nodestore
|
||||
xrpld.rpc > xrpl.json
|
||||
xrpld.rpc > xrpl.net
|
||||
xrpld.rpc > xrpl.protocol
|
||||
xrpld.rpc > xrpl.resource
|
||||
xrpld.rpc > xrpl.server
|
||||
xrpld.shamap > xrpl.basics
|
||||
xrpld.shamap > xrpld.nodestore
|
||||
xrpld.shamap > xrpl.protocol
|
||||
178
.github/scripts/strategy-matrix/generate.py
vendored
@@ -1,178 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import itertools
|
||||
import json
|
||||
import re
|
||||
|
||||
'''
|
||||
Generate a strategy matrix for GitHub Actions CI.
|
||||
|
||||
On each PR commit we will build a selection of Debian, RHEL, Ubuntu, MacOS, and
|
||||
Windows configurations, while upon merge into the develop, release, or master
|
||||
branches, we will build all configurations, and test most of them.
|
||||
|
||||
We will further set additional CMake arguments as follows:
|
||||
- All builds will have the `tests`, `werr`, and `xrpld` options.
|
||||
- All builds will have the `wextra` option except for GCC 12 and Clang 16.
|
||||
- All release builds will have the `assert` option.
|
||||
- Certain Debian Bookworm configurations will change the reference fee, enable
|
||||
codecov, and enable voidstar in PRs.
|
||||
'''
|
||||
def generate_strategy_matrix(all: bool, architecture: list[dict], os: list[dict], build_type: list[str], cmake_args: list[str]) -> dict:
|
||||
configurations = []
|
||||
for architecture, os, build_type, cmake_args in itertools.product(architecture, os, build_type, cmake_args):
|
||||
# The default CMake target is 'all' for Linux and MacOS and 'install'
|
||||
# for Windows, but it can get overridden for certain configurations.
|
||||
cmake_target = 'install' if os["distro_name"] == 'windows' else 'all'
|
||||
|
||||
# We build and test all configurations by default, except for Windows in
|
||||
# Debug, because it is too slow, as well as when code coverage is
|
||||
# enabled as that mode already runs the tests.
|
||||
build_only = False
|
||||
if os['distro_name'] == 'windows' and build_type == 'Debug':
|
||||
build_only = True
|
||||
|
||||
# Only generate a subset of configurations in PRs.
|
||||
if not all:
|
||||
# Debian:
|
||||
# - Bookworm using GCC 13: Release and Unity on linux/arm64, set
|
||||
# the reference fee to 500.
|
||||
# - Bookworm using GCC 15: Debug and no Unity on linux/amd64, enable
|
||||
# code coverage (which will be done below).
|
||||
# - Bookworm using Clang 16: Debug and no Unity on linux/arm64,
|
||||
# enable voidstar.
|
||||
# - Bookworm using Clang 17: Release and no Unity on linux/amd64,
|
||||
# set the reference fee to 1000.
|
||||
# - Bookworm using Clang 20: Debug and Unity on linux/amd64.
|
||||
if os['distro_name'] == 'debian':
|
||||
skip = True
|
||||
if os['distro_version'] == 'bookworm':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-13' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/arm64':
|
||||
cmake_args = f'-DUNIT_TEST_REFERENCE_FEE=500 {cmake_args}'
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-15' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-16' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/arm64':
|
||||
cmake_args = f'-Dvoidstar=ON {cmake_args}'
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-17' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
cmake_args = f'-DUNIT_TEST_REFERENCE_FEE=1000 {cmake_args}'
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-20' and build_type == 'Debug' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# RHEL:
|
||||
# - 9.4 using GCC 12: Debug and Unity on linux/amd64.
|
||||
# - 9.6 using Clang: Release and no Unity on linux/amd64.
|
||||
if os['distro_name'] == 'rhel':
|
||||
skip = True
|
||||
if os['distro_version'] == '9.4':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-12' and build_type == 'Debug' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
elif os['distro_version'] == '9.6':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-any' and build_type == 'Release' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# Ubuntu:
|
||||
# - Jammy using GCC 12: Debug and no Unity on linux/arm64.
|
||||
# - Noble using GCC 14: Release and Unity on linux/amd64.
|
||||
# - Noble using Clang 18: Debug and no Unity on linux/amd64.
|
||||
# - Noble using Clang 19: Release and Unity on linux/arm64.
|
||||
if os['distro_name'] == 'ubuntu':
|
||||
skip = True
|
||||
if os['distro_version'] == 'jammy':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-12' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/arm64':
|
||||
skip = False
|
||||
elif os['distro_version'] == 'noble':
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-14' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-18' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
skip = False
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-19' and build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'linux/arm64':
|
||||
skip = False
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# MacOS:
|
||||
# - Debug and no Unity on macos/arm64.
|
||||
if os['distro_name'] == 'macos' and not (build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'macos/arm64'):
|
||||
continue
|
||||
|
||||
# Windows:
|
||||
# - Release and Unity on windows/amd64.
|
||||
if os['distro_name'] == 'windows' and not (build_type == 'Release' and '-Dunity=ON' in cmake_args and architecture['platform'] == 'windows/amd64'):
|
||||
continue
|
||||
|
||||
|
||||
# Additional CMake arguments.
|
||||
cmake_args = f'{cmake_args} -Dtests=ON -Dwerr=ON -Dxrpld=ON'
|
||||
if not f'{os['compiler_name']}-{os['compiler_version']}' in ['gcc-12', 'clang-16']:
|
||||
cmake_args = f'{cmake_args} -Dwextra=ON'
|
||||
if build_type == 'Release':
|
||||
cmake_args = f'{cmake_args} -Dassert=ON'
|
||||
|
||||
# We skip all RHEL on arm64 due to a build failure that needs further
|
||||
# investigation.
|
||||
if os['distro_name'] == 'rhel' and architecture['platform'] == 'linux/arm64':
|
||||
continue
|
||||
|
||||
# We skip all clang-20 on arm64 due to boost 1.86 build error
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'clang-20' and architecture['platform'] == 'linux/arm64':
|
||||
continue
|
||||
|
||||
# Enable code coverage for Debian Bookworm using GCC 15 in Debug and no
|
||||
# Unity on linux/amd64
|
||||
if f'{os['compiler_name']}-{os['compiler_version']}' == 'gcc-15' and build_type == 'Debug' and '-Dunity=OFF' in cmake_args and architecture['platform'] == 'linux/amd64':
|
||||
cmake_args = f'-Dcoverage=ON -Dcoverage_format=xml -DCODE_COVERAGE_VERBOSE=ON -DCMAKE_C_FLAGS=-O0 -DCMAKE_CXX_FLAGS=-O0 {cmake_args}'
|
||||
cmake_target = 'coverage'
|
||||
build_only = True
|
||||
|
||||
# Generate a unique name for the configuration, e.g. macos-arm64-debug
|
||||
# or debian-bookworm-gcc-12-amd64-release-unity.
|
||||
config_name = os['distro_name']
|
||||
if (n := os['distro_version']) != '':
|
||||
config_name += f'-{n}'
|
||||
if (n := os['compiler_name']) != '':
|
||||
config_name += f'-{n}'
|
||||
if (n := os['compiler_version']) != '':
|
||||
config_name += f'-{n}'
|
||||
config_name += f'-{architecture['platform'][architecture['platform'].find('/')+1:]}'
|
||||
config_name += f'-{build_type.lower()}'
|
||||
if '-Dunity=ON' in cmake_args:
|
||||
config_name += '-unity'
|
||||
|
||||
# Add the configuration to the list, with the most unique fields first,
|
||||
# so that they are easier to identify in the GitHub Actions UI, as long
|
||||
# names get truncated.
|
||||
configurations.append({
|
||||
'config_name': config_name,
|
||||
'cmake_args': cmake_args,
|
||||
'cmake_target': cmake_target,
|
||||
'build_only': 'true' if build_only else 'false',
|
||||
'build_type': build_type,
|
||||
'os': os,
|
||||
'architecture': architecture,
|
||||
})
|
||||
|
||||
return {'include': configurations}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-a', '--all', help='Set to generate all configurations (generally used when merging a PR) or leave unset to generate a subset of configurations (generally used when committing to a PR).', action="store_true")
|
||||
parser.add_argument('-c', '--config', help='Path to the JSON file containing the strategy matrix configurations.', required=True, type=str)
|
||||
args = parser.parse_args()
|
||||
|
||||
# Load the JSON configuration file.
|
||||
config = None
|
||||
with open(args.config, 'r') as f:
|
||||
config = json.load(f)
|
||||
if config['architecture'] is None or config['os'] is None or config['build_type'] is None or config['cmake_args'] is None:
|
||||
raise Exception('Invalid configuration file.')
|
||||
|
||||
# Generate the strategy matrix.
|
||||
print(f'matrix={json.dumps(generate_strategy_matrix(args.all, config['architecture'], config['os'], config['build_type'], config['cmake_args']))}')
|
||||
154
.github/scripts/strategy-matrix/linux.json
vendored
@@ -1,154 +0,0 @@
|
||||
{
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "linux/amd64",
|
||||
"runner": ["self-hosted", "Linux", "X64", "heavy"]
|
||||
},
|
||||
{
|
||||
"platform": "linux/arm64",
|
||||
"runner": ["self-hosted", "Linux", "ARM64", "heavy-arm64"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "15"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19"
|
||||
},
|
||||
{
|
||||
"distro_name": "debian",
|
||||
"distro_version": "bookworm",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "20"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.4",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any"
|
||||
},
|
||||
{
|
||||
"distro_name": "rhel",
|
||||
"distro_version": "9.6",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "any"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "jammy",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "12"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "13"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "gcc",
|
||||
"compiler_version": "14"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "16"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "17"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "18"
|
||||
},
|
||||
{
|
||||
"distro_name": "ubuntu",
|
||||
"distro_version": "noble",
|
||||
"compiler_name": "clang",
|
||||
"compiler_version": "19"
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
|
||||
}
|
||||
21
.github/scripts/strategy-matrix/macos.json
vendored
@@ -1,21 +0,0 @@
|
||||
{
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "macos/arm64",
|
||||
"runner": ["self-hosted", "macOS", "ARM64", "mac-runner-m1"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
{
|
||||
"distro_name": "macos",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": [
|
||||
"-Dunity=OFF -DCMAKE_POLICY_VERSION_MINIMUM=3.5",
|
||||
"-Dunity=ON -DCMAKE_POLICY_VERSION_MINIMUM=3.5"
|
||||
]
|
||||
}
|
||||
18
.github/scripts/strategy-matrix/windows.json
vendored
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"architecture": [
|
||||
{
|
||||
"platform": "windows/amd64",
|
||||
"runner": ["windows-latest"]
|
||||
}
|
||||
],
|
||||
"os": [
|
||||
{
|
||||
"distro_name": "windows",
|
||||
"distro_version": "",
|
||||
"compiler_name": "",
|
||||
"compiler_version": ""
|
||||
}
|
||||
],
|
||||
"build_type": ["Debug", "Release"],
|
||||
"cmake_args": ["-Dunity=OFF", "-Dunity=ON"]
|
||||
}
|
||||
199
.github/workflows/build-test.yml
vendored
@@ -1,199 +0,0 @@
|
||||
# This workflow builds and tests the binary for various configurations.
|
||||
name: Build and test
|
||||
|
||||
# This workflow can only be triggered by other workflows. Note that the
|
||||
# workflow_call event does not support the 'choice' input type, see
|
||||
# https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#onworkflow_callinputsinput_idtype,
|
||||
# so we use 'string' instead.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_dir:
|
||||
description: "The directory where to build."
|
||||
required: false
|
||||
type: string
|
||||
default: ".build"
|
||||
conan_remote_name:
|
||||
description: "The name of the Conan remote to use."
|
||||
required: true
|
||||
type: string
|
||||
conan_remote_url:
|
||||
description: "The URL of the Conan endpoint to use."
|
||||
required: true
|
||||
type: string
|
||||
dependencies_force_build:
|
||||
description: "Force building of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
dependencies_force_upload:
|
||||
description: "Force uploading of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
os:
|
||||
description: 'The operating system to use for the build ("linux", "macos", "windows").'
|
||||
required: true
|
||||
type: string
|
||||
strategy_matrix:
|
||||
# TODO: Support additional strategies, e.g. "ubuntu" for generating all Ubuntu configurations.
|
||||
description: 'The strategy matrix to use for generating the configurations ("minimal", "all").'
|
||||
required: false
|
||||
type: string
|
||||
default: "minimal"
|
||||
secrets:
|
||||
codecov_token:
|
||||
description: "The Codecov token to use for uploading coverage reports."
|
||||
required: false
|
||||
conan_remote_username:
|
||||
description: "The username for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||
required: false
|
||||
conan_remote_password:
|
||||
description: "The password for logging into the Conan remote. If not provided, the dependencies will not be uploaded."
|
||||
required: false
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-${{ inputs.os }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# Generate the strategy matrix to be used by the following job.
|
||||
generate-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: 3.13
|
||||
- name: Generate strategy matrix
|
||||
working-directory: .github/scripts/strategy-matrix
|
||||
id: generate
|
||||
run: python generate.py ${{ inputs.strategy_matrix == 'all' && '--all' || '' }} --config=${{ inputs.os }}.json >> "${GITHUB_OUTPUT}"
|
||||
outputs:
|
||||
matrix: ${{ steps.generate.outputs.matrix }}
|
||||
|
||||
# Build and test the binary.
|
||||
build-test:
|
||||
needs:
|
||||
- generate-matrix
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
||||
runs-on: ${{ matrix.architecture.runner }}
|
||||
container: ${{ inputs.os == 'linux' && format('ghcr.io/xrplf/ci/{0}-{1}:{2}-{3}', matrix.os.distro_name, matrix.os.distro_version, matrix.os.compiler_name, matrix.os.compiler_version) || null }}
|
||||
steps:
|
||||
- name: Check strategy matrix
|
||||
run: |
|
||||
echo 'Operating system distro name: ${{ matrix.os.distro_name }}'
|
||||
echo 'Operating system distro version: ${{ matrix.os.distro_version }}'
|
||||
echo 'Operating system compiler name: ${{ matrix.os.compiler_name }}'
|
||||
echo 'Operating system compiler version: ${{ matrix.os.compiler_version }}'
|
||||
echo 'Architecture platform: ${{ matrix.architecture.platform }}'
|
||||
echo 'Architecture runner: ${{ toJson(matrix.architecture.runner) }}'
|
||||
echo 'Build type: ${{ matrix.build_type }}'
|
||||
echo 'Build only: ${{ matrix.build_only }}'
|
||||
echo 'CMake arguments: ${{ matrix.cmake_args }}'
|
||||
echo 'CMake target: ${{ matrix.cmake_target }}'
|
||||
echo 'Config name: ${{ matrix.config_name }}'
|
||||
|
||||
- name: Clean workspace (MacOS)
|
||||
if: ${{ inputs.os == 'macos' }}
|
||||
run: |
|
||||
WORKSPACE=${{ github.workspace }}
|
||||
echo "Cleaning workspace '${WORKSPACE}'."
|
||||
if [ -z "${WORKSPACE}" ] || [ "${WORKSPACE}" = "/" ]; then
|
||||
echo "Invalid working directory '${WORKSPACE}'."
|
||||
exit 1
|
||||
fi
|
||||
find "${WORKSPACE}" -depth 1 | xargs rm -rfv
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Prepare runner
|
||||
uses: XRPLF/actions/.github/actions/prepare-runner@638e0dc11ea230f91bd26622fb542116bb5254d5
|
||||
|
||||
- name: Check configuration (Windows)
|
||||
if: ${{ inputs.os == 'windows' }}
|
||||
run: |
|
||||
echo 'Checking environment variables.'
|
||||
set
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
- name: Check configuration (Linux and MacOS)
|
||||
if: ${{ inputs.os == 'linux' || inputs.os == 'macos' }}
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking compiler version.'
|
||||
${{ inputs.os == 'linux' && '${CC}' || 'clang' }} --version
|
||||
|
||||
echo 'Checking Conan version.'
|
||||
conan --version
|
||||
|
||||
echo 'Checking Ninja version.'
|
||||
ninja --version
|
||||
|
||||
echo 'Checking nproc version.'
|
||||
nproc --version
|
||||
|
||||
- name: Set up Conan configuration
|
||||
run: |
|
||||
echo 'Installing configuration.'
|
||||
cat conan/global.conf ${{ inputs.os == 'linux' && '>>' || '>' }} $(conan config home)/global.conf
|
||||
|
||||
echo 'Conan configuration:'
|
||||
conan config show '*'
|
||||
- name: Set up Conan profile
|
||||
run: |
|
||||
echo 'Installing profile.'
|
||||
conan config install conan/profiles/default -tf $(conan config home)/profiles/
|
||||
|
||||
echo 'Conan profile:'
|
||||
conan profile show
|
||||
- name: Set up Conan remote
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Adding Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
|
||||
conan remote add --index 0 --force ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
|
||||
|
||||
echo 'Listing Conan remotes.'
|
||||
conan remote list
|
||||
|
||||
- name: Build dependencies
|
||||
uses: ./.github/actions/build-deps
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
conan_remote_name: ${{ inputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ inputs.conan_remote_url }}
|
||||
conan_remote_username: ${{ secrets.conan_remote_username }}
|
||||
conan_remote_password: ${{ secrets.conan_remote_password }}
|
||||
force_build: ${{ inputs.dependencies_force_build }}
|
||||
force_upload: ${{ inputs.dependencies_force_upload }}
|
||||
- name: Build and test binary
|
||||
uses: ./.github/actions/build-test
|
||||
with:
|
||||
build_dir: ${{ inputs.build_dir }}
|
||||
build_only: ${{ matrix.build_only }}
|
||||
build_type: ${{ matrix.build_type }}
|
||||
cmake_args: ${{ matrix.cmake_args }}
|
||||
cmake_target: ${{ matrix.cmake_target }}
|
||||
codecov_token: ${{ secrets.codecov_token }}
|
||||
os: ${{ inputs.os }}
|
||||
75
.github/workflows/check-format.yml
vendored
@@ -1,75 +0,0 @@
|
||||
# This workflow checks if the code is properly formatted.
|
||||
name: Check format
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on: workflow_call
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-format
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
pre-commit:
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-pre-commit
|
||||
steps:
|
||||
# The $GITHUB_WORKSPACE and ${{ github.workspace }} might not point to the
|
||||
# same directory for jobs running in containers. The actions/checkout step
|
||||
# is *supposed* to checkout into $GITHUB_WORKSPACE and then add it to
|
||||
# safe.directory (see instructions at https://github.com/actions/checkout)
|
||||
# but that is apparently not happening for some container images. We
|
||||
# therefore preemptively add both directories to safe.directory. See also
|
||||
# https://github.com/actions/runner/issues/2058 for more details.
|
||||
- name: Configure git safe.directory
|
||||
run: |
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE
|
||||
git config --global --add safe.directory ${{ github.workspace }}
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Check configuration
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking pre-commit version.'
|
||||
pre-commit --version
|
||||
|
||||
echo 'Checking clang-format version.'
|
||||
clang-format --version
|
||||
|
||||
echo 'Checking NPM version.'
|
||||
npm --version
|
||||
|
||||
echo 'Checking Node.js version.'
|
||||
node --version
|
||||
|
||||
echo 'Checking prettier version.'
|
||||
prettier --version
|
||||
- name: Format code
|
||||
run: pre-commit run --show-diff-on-failure --color=always --all-files
|
||||
- name: Check for differences
|
||||
env:
|
||||
MESSAGE: |
|
||||
One or more files did not conform to the formatting. Maybe you did
|
||||
not run 'pre-commit' before committing, or your version of
|
||||
'clang-format' or 'prettier' has an incompatibility with the ones
|
||||
used here (see the "Check configuration" step above).
|
||||
|
||||
Run 'pre-commit run --all-files' in your repo, and then commit and
|
||||
push the changes.
|
||||
run: |
|
||||
DIFF=$(git status --porcelain)
|
||||
if [ -n "${DIFF}" ]; then
|
||||
# Print the files that changed to give the contributor a hint about
|
||||
# what to expect when running pre-commit on their own machine.
|
||||
git status
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
fi
|
||||
46
.github/workflows/check-levelization.yml
vendored
@@ -1,46 +0,0 @@
|
||||
# This workflow checks if the dependencies between the modules are correctly
|
||||
# indexed.
|
||||
name: Check levelization
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on: workflow_call
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-levelization
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
levelization:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Check levelization
|
||||
run: .github/scripts/levelization/generate.sh
|
||||
- name: Check for differences
|
||||
env:
|
||||
MESSAGE: |
|
||||
|
||||
The dependency relationships between the modules in rippled have
|
||||
changed, which may be an improvement or a regression.
|
||||
|
||||
A rule of thumb is that if your changes caused something to be
|
||||
removed from loops.txt, it's probably an improvement, while if
|
||||
something was added, it's probably a regression.
|
||||
|
||||
Run '.github/scripts/levelization/generate.sh' in your repo, commit
|
||||
and push the changes. See .github/scripts/levelization/README.md for
|
||||
more info.
|
||||
run: |
|
||||
DIFF=$(git status --porcelain)
|
||||
if [ -n "${DIFF}" ]; then
|
||||
# Print the differences to give the contributor a hint about what to
|
||||
# expect when running levelization on their own machine.
|
||||
git diff
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
fi
|
||||
62
.github/workflows/check-missing-commits.yml
vendored
@@ -1,62 +0,0 @@
|
||||
# This workflow checks that all commits in the "master" branch are also in the
|
||||
# "release" and "develop" branches, and that all commits in the "release" branch
|
||||
# are also in the "develop" branch.
|
||||
name: Check for missing commits
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on: workflow_call
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-missing-commits
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check for missing commits
|
||||
env:
|
||||
MESSAGE: |
|
||||
|
||||
If you are reading this, then the commits indicated above are missing
|
||||
from the "develop" and/or "release" branch. Do a reverse-merge as soon
|
||||
as possible. See CONTRIBUTING.md for instructions.
|
||||
run: |
|
||||
set -o pipefail
|
||||
# Branches are ordered by how "canonical" they are. Every commit in one
|
||||
# branch should be in all the branches behind it.
|
||||
order=(master release develop)
|
||||
branches=()
|
||||
for branch in "${order[@]}"; do
|
||||
# Check that the branches exist so that this job will work on forked
|
||||
# repos, which don't necessarily have master and release branches.
|
||||
echo "Checking if ${branch} exists."
|
||||
if git ls-remote --exit-code --heads origin \
|
||||
refs/heads/${branch} > /dev/null; then
|
||||
branches+=(origin/${branch})
|
||||
fi
|
||||
done
|
||||
|
||||
prior=()
|
||||
for branch in "${branches[@]}"; do
|
||||
if [[ ${#prior[@]} -ne 0 ]]; then
|
||||
echo "Checking ${prior[@]} for commits missing from ${branch}."
|
||||
git log --oneline --no-merges "${prior[@]}" \
|
||||
^$branch | tee -a "missing-commits.txt"
|
||||
echo
|
||||
fi
|
||||
prior+=("${branch}")
|
||||
done
|
||||
|
||||
if [[ $(cat missing-commits.txt | wc -l) -ne 0 ]]; then
|
||||
echo "${MESSAGE}"
|
||||
exit 1
|
||||
fi
|
||||
80
.github/workflows/notify-clio.yml
vendored
@@ -1,80 +0,0 @@
|
||||
# This workflow exports the built libxrpl package to the Conan remote on a
|
||||
# a channel named after the pull request, and notifies the Clio repository about
|
||||
# the new version so it can check for compatibility.
|
||||
name: Notify Clio
|
||||
|
||||
# This workflow can only be triggered by other workflows.
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
conan_remote_name:
|
||||
description: "The name of the Conan remote to use."
|
||||
required: true
|
||||
type: string
|
||||
conan_remote_url:
|
||||
description: "The URL of the Conan endpoint to use."
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
clio_notify_token:
|
||||
description: "The GitHub token to notify Clio about new versions."
|
||||
required: true
|
||||
conan_remote_username:
|
||||
description: "The username for logging into the Conan remote."
|
||||
required: true
|
||||
conan_remote_password:
|
||||
description: "The password for logging into the Conan remote."
|
||||
required: true
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-clio
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/ubuntu-noble:gcc-13
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Generate outputs
|
||||
id: generate
|
||||
run: |
|
||||
echo 'Generating user and channel.'
|
||||
echo "user=clio" >> "${GITHUB_OUTPUT}"
|
||||
echo "channel=pr_${{ github.event.pull_request.number }}" >> "${GITHUB_OUTPUT}"
|
||||
echo 'Extracting version.'
|
||||
echo "version=$(cat src/libxrpl/protocol/BuildInfo.cpp | grep "versionString =" | awk -F '"' '{print $2}')" >> "${GITHUB_OUTPUT}"
|
||||
- name: Add Conan remote
|
||||
run: |
|
||||
echo "Adding Conan remote '${{ inputs.conan_remote_name }}' at ${{ inputs.conan_remote_url }}."
|
||||
conan remote add --index 0 --force ${{ inputs.conan_remote_name }} ${{ inputs.conan_remote_url }}
|
||||
echo 'Listing Conan remotes.'
|
||||
conan remote list
|
||||
- name: Log into Conan remote
|
||||
run: conan remote login ${{ inputs.conan_remote_name }} "${{ secrets.conan_remote_username }}" --password "${{ secrets.conan_remote_password }}"
|
||||
- name: Upload package
|
||||
run: |
|
||||
conan export --user=${{ steps.generate.outputs.user }} --channel=${{ steps.generate.outputs.channel }} .
|
||||
conan upload --confirm --check --remote=${{ inputs.conan_remote_name }} xrpl/${{ steps.generate.outputs.version }}@${{ steps.generate.outputs.user }}/${{ steps.generate.outputs.channel }}
|
||||
outputs:
|
||||
channel: ${{ steps.generate.outputs.channel }}
|
||||
version: ${{ steps.generate.outputs.version }}
|
||||
|
||||
notify:
|
||||
needs: upload
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.clio_notify_token }}
|
||||
steps:
|
||||
- name: Notify Clio
|
||||
run: |
|
||||
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
/repos/xrplf/clio/dispatches -f "event_type=check_libxrpl" \
|
||||
-F "client_payload[version]=${{ needs.upload.outputs.version }}@${{ needs.upload.outputs.user }}/${{ needs.upload.outputs.channel }}" \
|
||||
-F "client_payload[pr]=${{ github.event.pull_request.number }}"
|
||||
154
.github/workflows/on-pr.yml
vendored
@@ -1,154 +0,0 @@
|
||||
# This workflow runs all workflows to check, build and test the project on
|
||||
# various Linux flavors, as well as on MacOS and Windows, on every push to a
|
||||
# user branch. However, it will not run if the pull request is a draft unless it
|
||||
# has the 'DraftRunCI' label.
|
||||
name: PR
|
||||
|
||||
on:
|
||||
merge_group:
|
||||
types:
|
||||
- checks_requested
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
- ready_for_review
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
CONAN_REMOTE_NAME: xrplf
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
|
||||
jobs:
|
||||
# This job determines whether the rest of the workflow should run. It runs
|
||||
# when the PR is not a draft (which should also cover merge-group) or
|
||||
# has the 'DraftRunCI' label.
|
||||
should-run:
|
||||
if: ${{ !github.event.pull_request.draft || contains(github.event.pull_request.labels.*.name, 'DraftRunCI') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Determine changed files
|
||||
# This step checks whether any files have changed that should
|
||||
# cause the next jobs to run. We do it this way rather than
|
||||
# using `paths` in the `on:` section, because all required
|
||||
# checks must pass, even for changes that do not modify anything
|
||||
# that affects those checks. We would therefore like to make the
|
||||
# checks required only if the job runs, but GitHub does not
|
||||
# support that directly. By always executing the workflow on new
|
||||
# commits and by using the changed-files action below, we ensure
|
||||
# that Github considers any skipped jobs to have passed, and in
|
||||
# turn the required checks as well.
|
||||
id: changes
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
# These paths are unique to `on-pr.yml`.
|
||||
.github/scripts/levelization/**
|
||||
.github/workflows/check-format.yml
|
||||
.github/workflows/check-levelization.yml
|
||||
.github/workflows/notify-clio.yml
|
||||
.github/workflows/on-pr.yml
|
||||
.clang-format
|
||||
.pre-commit-config.yaml
|
||||
|
||||
# Keep the paths below in sync with those in `on-trigger.yml`.
|
||||
.github/actions/build-deps/**
|
||||
.github/actions/build-test/**
|
||||
.github/scripts/strategy-matrix/**
|
||||
.github/workflows/build-test.yml
|
||||
.codecov.yml
|
||||
cmake/**
|
||||
conan/**
|
||||
external/**
|
||||
include/**
|
||||
src/**
|
||||
tests/**
|
||||
CMakeLists.txt
|
||||
conanfile.py
|
||||
- name: Check whether to run
|
||||
# This step determines whether the rest of the workflow should
|
||||
# run. The rest of the workflow will run if this job runs AND at
|
||||
# least one of:
|
||||
# * Any of the files checked in the `changes` step were modified
|
||||
# * The PR is NOT a draft and is labeled "Ready to merge"
|
||||
# * The workflow is running from the merge queue
|
||||
id: go
|
||||
env:
|
||||
FILES: ${{ steps.changes.outputs.any_changed }}
|
||||
DRAFT: ${{ github.event.pull_request.draft }}
|
||||
READY: ${{ contains(github.event.pull_request.labels.*.name, 'Ready to merge') }}
|
||||
MERGE: ${{ github.event_name == 'merge_group' }}
|
||||
run: |
|
||||
echo "go=${{ (env.DRAFT != 'true' && env.READY == 'true') || env.FILES == 'true' || env.MERGE == 'true' }}" >> "${GITHUB_OUTPUT}"
|
||||
cat "${GITHUB_OUTPUT}"
|
||||
outputs:
|
||||
go: ${{ steps.go.outputs.go == 'true' }}
|
||||
|
||||
check-format:
|
||||
needs: should-run
|
||||
if: needs.should-run.outputs.go == 'true'
|
||||
uses: ./.github/workflows/check-format.yml
|
||||
|
||||
check-levelization:
|
||||
needs: should-run
|
||||
if: needs.should-run.outputs.go == 'true'
|
||||
uses: ./.github/workflows/check-levelization.yml
|
||||
|
||||
# This job works around the limitation that GitHub Actions does not support
|
||||
# using environment variables as inputs for reusable workflows.
|
||||
generate-outputs:
|
||||
needs: should-run
|
||||
if: needs.should-run.outputs.go == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: No-op
|
||||
run: true
|
||||
outputs:
|
||||
conan_remote_name: ${{ env.CONAN_REMOTE_NAME }}
|
||||
conan_remote_url: ${{ env.CONAN_REMOTE_URL }}
|
||||
|
||||
build-test:
|
||||
needs: generate-outputs
|
||||
uses: ./.github/workflows/build-test.yml
|
||||
strategy:
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
with:
|
||||
conan_remote_name: ${{ needs.generate-outputs.outputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ needs.generate-outputs.outputs.conan_remote_url }}
|
||||
os: ${{ matrix.os }}
|
||||
secrets:
|
||||
codecov_token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
notify-clio:
|
||||
needs:
|
||||
- generate-outputs
|
||||
- build-test
|
||||
uses: ./.github/workflows/notify-clio.yml
|
||||
with:
|
||||
conan_remote_name: ${{ needs.generate-outputs.outputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ needs.generate-outputs.outputs.conan_remote_url }}
|
||||
secrets:
|
||||
clio_notify_token: ${{ secrets.CLIO_NOTIFY_TOKEN }}
|
||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
|
||||
passed:
|
||||
needs:
|
||||
- build-test
|
||||
- check-format
|
||||
- check-levelization
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: No-op
|
||||
run: true
|
||||
118
.github/workflows/on-trigger.yml
vendored
@@ -1,118 +0,0 @@
|
||||
# This workflow runs all workflows to build the dependencies required for the
|
||||
# project on various Linux flavors, as well as on MacOS and Windows, on a
|
||||
# scheduled basis, on merge into the 'develop', 'release', or 'master' branches,
|
||||
# or manually. The missing commits check is only run when the code is merged
|
||||
# into the 'develop' or 'release' branches, and the documentation is built when
|
||||
# the code is merged into the 'develop' branch.
|
||||
name: Trigger
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- release
|
||||
- master
|
||||
paths:
|
||||
# These paths are unique to `on-trigger.yml`.
|
||||
- ".github/workflows/check-missing-commits.yml"
|
||||
- ".github/workflows/on-trigger.yml"
|
||||
- ".github/workflows/publish-docs.yml"
|
||||
|
||||
# Keep the paths below in sync with those in `on-pr.yml`.
|
||||
- ".github/actions/build-deps/**"
|
||||
- ".github/actions/build-test/**"
|
||||
- ".github/scripts/strategy-matrix/**"
|
||||
- ".github/workflows/build-test.yml"
|
||||
- ".codecov.yml"
|
||||
- "cmake/**"
|
||||
- "conan/**"
|
||||
- "external/**"
|
||||
- "include/**"
|
||||
- "src/**"
|
||||
- "tests/**"
|
||||
- "CMakeLists.txt"
|
||||
- "conanfile.py"
|
||||
|
||||
# Run at 06:32 UTC on every day of the week from Monday through Friday. This
|
||||
# will force all dependencies to be rebuilt, which is useful to verify that
|
||||
# all dependencies can be built successfully. Only the dependencies that
|
||||
# are actually missing from the remote will be uploaded.
|
||||
schedule:
|
||||
- cron: "32 6 * * 1-5"
|
||||
|
||||
# Run when manually triggered via the GitHub UI or API. If `force_upload` is
|
||||
# true, then the dependencies that were missing (`force_rebuild` is false) or
|
||||
# rebuilt (`force_rebuild` is true) will be uploaded, overwriting existing
|
||||
# dependencies if needed.
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dependencies_force_build:
|
||||
description: "Force building of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
dependencies_force_upload:
|
||||
description: "Force uploading of all dependencies."
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
CONAN_REMOTE_NAME: xrplf
|
||||
CONAN_REMOTE_URL: https://conan.ripplex.io
|
||||
|
||||
jobs:
|
||||
check-missing-commits:
|
||||
if: ${{ github.event_name == 'push' && github.ref_type == 'branch' && contains(fromJSON('["develop", "release"]'), github.ref_name) }}
|
||||
uses: ./.github/workflows/check-missing-commits.yml
|
||||
|
||||
# This job works around the limitation that GitHub Actions does not support
|
||||
# using environment variables as inputs for reusable workflows. It also sets
|
||||
# outputs that depend on the event that triggered the workflow.
|
||||
generate-outputs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check inputs and set outputs
|
||||
id: generate
|
||||
run: |
|
||||
if [[ '${{ github.event_name }}' == 'push' ]]; then
|
||||
echo 'dependencies_force_build=false' >> "${GITHUB_OUTPUT}"
|
||||
echo 'dependencies_force_upload=false' >> "${GITHUB_OUTPUT}"
|
||||
elif [[ '${{ github.event_name }}' == 'schedule' ]]; then
|
||||
echo 'dependencies_force_build=true' >> "${GITHUB_OUTPUT}"
|
||||
echo 'dependencies_force_upload=false' >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo 'dependencies_force_build=${{ inputs.dependencies_force_build }}' >> "${GITHUB_OUTPUT}"
|
||||
echo 'dependencies_force_upload=${{ inputs.dependencies_force_upload }}' >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
outputs:
|
||||
conan_remote_name: ${{ env.CONAN_REMOTE_NAME }}
|
||||
conan_remote_url: ${{ env.CONAN_REMOTE_URL }}
|
||||
dependencies_force_build: ${{ steps.generate.outputs.dependencies_force_build }}
|
||||
dependencies_force_upload: ${{ steps.generate.outputs.dependencies_force_upload }}
|
||||
|
||||
build-test:
|
||||
needs: generate-outputs
|
||||
uses: ./.github/workflows/build-test.yml
|
||||
strategy:
|
||||
matrix:
|
||||
os: [linux, macos, windows]
|
||||
with:
|
||||
conan_remote_name: ${{ needs.generate-outputs.outputs.conan_remote_name }}
|
||||
conan_remote_url: ${{ needs.generate-outputs.outputs.conan_remote_url }}
|
||||
dependencies_force_build: ${{ needs.generate-outputs.outputs.dependencies_force_build == 'true' }}
|
||||
dependencies_force_upload: ${{ needs.generate-outputs.outputs.dependencies_force_upload == 'true' }}
|
||||
os: ${{ matrix.os }}
|
||||
strategy_matrix: "all"
|
||||
secrets:
|
||||
codecov_token: ${{ secrets.CODECOV_TOKEN }}
|
||||
conan_remote_username: ${{ secrets.CONAN_REMOTE_USERNAME }}
|
||||
conan_remote_password: ${{ secrets.CONAN_REMOTE_PASSWORD }}
|
||||
60
.github/workflows/publish-docs.yml
vendored
@@ -1,60 +0,0 @@
|
||||
# This workflow builds the documentation for the repository, and publishes it to
|
||||
# GitHub Pages when changes are merged into the default branch.
|
||||
name: Build and publish documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- ".github/workflows/publish-docs.yml"
|
||||
- "*.md"
|
||||
- "**/*.md"
|
||||
- "docs/**"
|
||||
- "include/**"
|
||||
- "src/libxrpl/**"
|
||||
- "src/xrpld/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
BUILD_DIR: .build
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/xrplf/ci/tools-rippled-documentation
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- name: Check configuration
|
||||
run: |
|
||||
echo 'Checking path.'
|
||||
echo ${PATH} | tr ':' '\n'
|
||||
|
||||
echo 'Checking environment variables.'
|
||||
env | sort
|
||||
|
||||
echo 'Checking CMake version.'
|
||||
cmake --version
|
||||
|
||||
echo 'Checking Doxygen version.'
|
||||
doxygen --version
|
||||
- name: Build documentation
|
||||
run: |
|
||||
mkdir -p ${{ env.BUILD_DIR }}
|
||||
cd ${{ env.BUILD_DIR }}
|
||||
cmake -Donly_docs=ON ..
|
||||
cmake --build . --target docs --parallel $(nproc)
|
||||
- name: Publish documentation
|
||||
if: ${{ github.ref_type == 'branch' && github.ref_name == github.event.repository.default_branch }}
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ${{ env.BUILD_DIR }}/docs/html
|
||||
27
.gitignore
vendored
@@ -21,12 +21,9 @@ bin/project-cache.jam
|
||||
|
||||
# Ignore object files.
|
||||
*.o
|
||||
.nih_c
|
||||
build
|
||||
tags
|
||||
TAGS
|
||||
GTAGS
|
||||
GRTAGS
|
||||
GPATH
|
||||
bin/rippled
|
||||
Debug/*.*
|
||||
Release/*.*
|
||||
@@ -36,10 +33,8 @@ Release/*.*
|
||||
*.gcda
|
||||
*.gcov
|
||||
|
||||
# Levelization checking
|
||||
.github/scripts/levelization/results/*
|
||||
!.github/scripts/levelization/results/loops.txt
|
||||
!.github/scripts/levelization/results/ordering.txt
|
||||
# Ignore locally installed node_modules
|
||||
/node_modules
|
||||
|
||||
# Ignore tmp directory.
|
||||
tmp
|
||||
@@ -55,15 +50,15 @@ debug_log.txt
|
||||
# Ignore customized configs
|
||||
rippled.cfg
|
||||
validators.txt
|
||||
test/config.js
|
||||
|
||||
# Doxygen generated documentation output
|
||||
HtmlDocumentation
|
||||
docs/html_doc
|
||||
|
||||
# Xcode user-specific project settings
|
||||
# Xcode
|
||||
.DS_Store
|
||||
/build/
|
||||
*/build/*
|
||||
*.pbxuser
|
||||
!default.pbxuser
|
||||
*.mode1v3
|
||||
@@ -99,15 +94,3 @@ Builds/VisualStudio2015/*.sdf
|
||||
# MSVC
|
||||
*.pdb
|
||||
.vs/
|
||||
CMakeSettings.json
|
||||
compile_commands.json
|
||||
.clangd
|
||||
packages
|
||||
pkg_out
|
||||
pkg
|
||||
CMakeUserPresets.json
|
||||
bld.rippled/
|
||||
.vscode
|
||||
|
||||
# Suggested in-tree build directory
|
||||
/.build*/
|
||||
|
||||
12
.gitmodules
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
[submodule "docs/docca"]
|
||||
path = docs/docca
|
||||
url = https://github.com/vinniefalco/docca.git
|
||||
[submodule "src/nudb/extras/beast"]
|
||||
path = src/nudb/extras/beast
|
||||
url = https://github.com/vinniefalco/Beast.git
|
||||
[submodule "src/nudb/extras/rocksdb"]
|
||||
path = src/nudb/extras/rocksdb
|
||||
url = https://github.com/facebook/rocksdb.git
|
||||
[submodule "src/nudb/doc/docca"]
|
||||
path = src/nudb/doc/docca
|
||||
url = https://github.com/vinniefalco/docca.git
|
||||
@@ -1,64 +0,0 @@
|
||||
# To run pre-commit hooks, first install pre-commit:
|
||||
# - `pip install pre-commit==${PRE_COMMIT_VERSION}`
|
||||
# - `pip install pre-commit-hooks==${PRE_COMMIT_HOOKS_VERSION}`
|
||||
#
|
||||
# Depending on your system, you can use `brew install` or `apt install` as well
|
||||
# for installing the pre-commit package, but `pip` is needed to install the
|
||||
# hooks; you can also use `pipx` if you prefer.
|
||||
# Next, install the required formatters:
|
||||
# - `pip install clang-format==${CLANG_VERSION}`
|
||||
# - `npm install prettier@${PRETTIER_VERSION}`
|
||||
#
|
||||
# See https://github.com/XRPLF/ci/blob/main/.github/workflows/tools-rippled.yml
|
||||
# for the versions used in the CI pipeline. You will need to have the exact same
|
||||
# versions of the tools installed on your system to produce the same results as
|
||||
# the pipeline.
|
||||
#
|
||||
# Then, run the following command to install the git hook scripts:
|
||||
# - `pre-commit install`
|
||||
# You can run all configured hooks against all files with:
|
||||
# - `pre-commit run --all-files`
|
||||
# To manually run a specific hook, use:
|
||||
# - `pre-commit run <hook_id> --all-files`
|
||||
# To run the hooks against only the files changed in the current commit, use:
|
||||
# - `pre-commit run`
|
||||
repos:
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: clang-format
|
||||
name: clang-format
|
||||
language: system
|
||||
entry: clang-format -i
|
||||
files: '\.(cpp|hpp|h|ipp|proto)$'
|
||||
- id: trailing-whitespace
|
||||
name: trailing-whitespace
|
||||
entry: trailing-whitespace-fixer
|
||||
language: system
|
||||
types: [text]
|
||||
- id: end-of-file
|
||||
name: end-of-file
|
||||
entry: end-of-file-fixer
|
||||
language: system
|
||||
types: [text]
|
||||
- id: mixed-line-ending
|
||||
name: mixed-line-ending
|
||||
entry: mixed-line-ending
|
||||
language: system
|
||||
types: [text]
|
||||
- id: check-merge-conflict
|
||||
name: check-merge-conflict
|
||||
entry: check-merge-conflict --assume-in-merge
|
||||
language: system
|
||||
types: [text]
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: prettier
|
||||
name: prettier
|
||||
language: system
|
||||
entry: prettier --ignore-unknown --write
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
external/.*|
|
||||
.github/scripts/levelization/results/.*\.txt
|
||||
)$
|
||||
@@ -1 +0,0 @@
|
||||
external
|
||||
71
.travis.yml
Normal file
@@ -0,0 +1,71 @@
|
||||
sudo: false
|
||||
language: cpp
|
||||
|
||||
env:
|
||||
global:
|
||||
- LLVM_VERSION=3.8.0
|
||||
# Maintenance note: to move to a new version
|
||||
# of boost, update both BOOST_ROOT and BOOST_URL.
|
||||
# Note that for simplicity, BOOST_ROOT's final
|
||||
# namepart must match the folder name internal
|
||||
# to boost's .tar.gz.
|
||||
- LCOV_ROOT=$HOME/lcov
|
||||
- BOOST_ROOT=$HOME/boost_1_60_0
|
||||
- BOOST_URL='http://sourceforge.net/projects/boost/files/boost/1.60.0/boost_1_60_0.tar.gz'
|
||||
|
||||
addons:
|
||||
apt:
|
||||
sources: ['ubuntu-toolchain-r-test']
|
||||
packages:
|
||||
- gcc-5
|
||||
- g++-5
|
||||
- python-software-properties
|
||||
- protobuf-compiler
|
||||
- libprotobuf-dev
|
||||
- libssl-dev
|
||||
- libstdc++6
|
||||
- binutils-gold
|
||||
# Provides a backtrace if the unittests crash
|
||||
- gdb
|
||||
|
||||
matrix:
|
||||
include:
|
||||
# Default BUILD is "scons".
|
||||
|
||||
- compiler: gcc
|
||||
env: GCC_VER=5 BUILD=cmake TARGET=debug.nounity PATH=$PWD/cmake/bin:$PATH
|
||||
|
||||
- compiler: gcc
|
||||
env: GCC_VER=5 TARGET=coverage
|
||||
|
||||
- compiler: clang
|
||||
env: GCC_VER=5 TARGET=debug CLANG_VER=3.8 PATH=$PWD/llvm-$LLVM_VERSION/bin:$PATH
|
||||
|
||||
- compiler: clang
|
||||
env: GCC_VER=5 TARGET=debug.nounity CLANG_VER=3.8 PATH=$PWD/llvm-$LLVM_VERSION/bin:$PATH
|
||||
|
||||
# The clang cmake builds do not link.
|
||||
# - compiler: clang
|
||||
# env: GCC_VER=5 BUILD=cmake TARGET=debug CLANG_VER=3.8 PATH=$PWD/llvm-$LLVM_VERSION/bin:$PWD/cmake/bin:$PATH
|
||||
|
||||
# - compiler: clang
|
||||
# env: GCC_VER=5 BUILD=cmake TARGET=debug.nounity CLANG_VER=3.8 PATH=$PWD/llvm-$LLVM_VERSION/bin:$PWD/cmake/bin:$PATH
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $BOOST_ROOT
|
||||
- llvm-$LLVM_VERSION
|
||||
- cmake
|
||||
|
||||
before_install:
|
||||
- bin/ci/ubuntu/install-dependencies.sh
|
||||
|
||||
script:
|
||||
- travis_retry bin/ci/ubuntu/build-and-test.sh
|
||||
|
||||
notifications:
|
||||
email:
|
||||
false
|
||||
irc:
|
||||
channels:
|
||||
- "chat.freenode.net#ripple-dev"
|
||||
225
API-CHANGELOG.md
@@ -1,225 +0,0 @@
|
||||
# API Changelog
|
||||
|
||||
This changelog is intended to list all updates to the [public API methods](https://xrpl.org/public-api-methods.html).
|
||||
|
||||
For info about how [API versioning](https://xrpl.org/request-formatting.html#api-versioning) works, including examples, please view the [XLS-22d spec](https://github.com/XRPLF/XRPL-Standards/discussions/54). For details about the implementation of API versioning, view the [implementation PR](https://github.com/XRPLF/rippled/pull/3155). API versioning ensures existing integrations and users continue to receive existing behavior, while those that request a higher API version will experience new behavior.
|
||||
|
||||
The API version controls the API behavior you see. This includes what properties you see in responses, what parameters you're permitted to send in requests, and so on. You specify the API version in each of your requests. When a breaking change is introduced to the `rippled` API, a new version is released. To avoid breaking your code, you should set (or increase) your version when you're ready to upgrade.
|
||||
|
||||
For a log of breaking changes, see the **API Version [number]** headings. In general, breaking changes are associated with a particular API Version number. For non-breaking changes, scroll to the **XRP Ledger version [x.y.z]** headings. Non-breaking changes are associated with a particular XRP Ledger (`rippled`) release.
|
||||
|
||||
## API Version 2
|
||||
|
||||
API version 2 is available in `rippled` version 2.0.0 and later. To use this API, clients specify `"api_version" : 2` in each request.
|
||||
|
||||
#### Removed methods
|
||||
|
||||
In API version 2, the following deprecated methods are no longer available: (https://github.com/XRPLF/rippled/pull/4759)
|
||||
|
||||
- `tx_history` - Instead, use other methods such as `account_tx` or `ledger` with the `transactions` field set to `true`.
|
||||
- `ledger_header` - Instead, use the `ledger` method.
|
||||
|
||||
#### Modifications to JSON transaction element in V2
|
||||
|
||||
In API version 2, JSON elements for transaction output have been changed and made consistent for all methods which output transactions. (https://github.com/XRPLF/rippled/pull/4775)
|
||||
This helps to unify the JSON serialization format of transactions. (https://github.com/XRPLF/clio/issues/722, https://github.com/XRPLF/rippled/issues/4727)
|
||||
|
||||
- JSON transaction element is named `tx_json`
|
||||
- Binary transaction element is named `tx_blob`
|
||||
- JSON transaction metadata element is named `meta`
|
||||
- Binary transaction metadata element is named `meta_blob`
|
||||
|
||||
Additionally, these elements are now consistently available next to `tx_json` (i.e. sibling elements), where possible:
|
||||
|
||||
- `hash` - Transaction ID. This data was stored inside transaction output in API version 1, but in API version 2 is a sibling element.
|
||||
- `ledger_index` - Ledger index (only set on validated ledgers)
|
||||
- `ledger_hash` - Ledger hash (only set on closed or validated ledgers)
|
||||
- `close_time_iso` - Ledger close time expressed in ISO 8601 time format (only set on validated ledgers)
|
||||
- `validated` - Bool element set to `true` if the transaction is in a validated ledger, otherwise `false`
|
||||
|
||||
This change affects the following methods:
|
||||
|
||||
- `tx` - Transaction data moved into element `tx_json` (was inline inside `result`) or, if binary output was requested, moved from `tx` to `tx_blob`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
|
||||
- `account_tx` - Renamed transaction element from `tx` to `tx_json`. Renamed binary transaction metadata element (if it was requested) from `meta` to `meta_blob`. Changed location of `hash` and added new elements
|
||||
- `transaction_entry` - Renamed transaction metadata element from `metadata` to `meta`. Changed location of `hash` and added new elements
|
||||
- `subscribe` - Renamed transaction element from `transaction` to `tx_json`. Changed location of `hash` and added new elements
|
||||
- `sign`, `sign_for`, `submit` and `submit_multisigned` - Changed location of `hash` element.
|
||||
|
||||
#### Modification to `Payment` transaction JSON schema
|
||||
|
||||
When reading Payments, the `Amount` field should generally **not** be used. Instead, use [delivered_amount](https://xrpl.org/partial-payments.html#the-delivered_amount-field) to see the amount that the Payment delivered. To clarify its meaning, the `Amount` field is being renamed to `DeliverMax`. (https://github.com/XRPLF/rippled/pull/4733)
|
||||
|
||||
- In `Payment` transaction type, JSON RPC field `Amount` is renamed to `DeliverMax`. To enable smooth client transition, `Amount` is still handled, as described below: (https://github.com/XRPLF/rippled/pull/4733)
|
||||
- On JSON RPC input (e.g. `submit_multisigned` etc. methods), `Amount` is recognized as an alias to `DeliverMax` for both API version 1 and version 2 clients.
|
||||
- On JSON RPC input, submitting both `Amount` and `DeliverMax` fields is allowed _only_ if they are identical; otherwise such input is rejected with `rpcINVALID_PARAMS` error.
|
||||
- On JSON RPC output (e.g. `subscribe`, `account_tx` etc. methods), `DeliverMax` is present in both API version 1 and version 2.
|
||||
- On JSON RPC output, `Amount` is only present in API version 1 and _not_ in version 2.
|
||||
|
||||
#### Modifications to account_info response
|
||||
|
||||
- `signer_lists` is returned in the root of the response. (In API version 1, it was nested under `account_data`.) (https://github.com/XRPLF/rippled/pull/3770)
|
||||
- When using an invalid `signer_lists` value, the API now returns an "invalidParams" error. (https://github.com/XRPLF/rippled/pull/4585)
|
||||
- (`signer_lists` must be a boolean. In API version 1, strings were accepted and may return a normal response - i.e. as if `signer_lists` were `true`.)
|
||||
|
||||
#### Modifications to [account_tx](https://xrpl.org/account_tx.html#account_tx) response
|
||||
|
||||
- Using `ledger_index_min`, `ledger_index_max`, and `ledger_index` returns `invalidParams` because if you use `ledger_index_min` or `ledger_index_max`, then it does not make sense to also specify `ledger_index`. In API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4571)
|
||||
- The same applies for `ledger_index_min`, `ledger_index_max`, and `ledger_hash`. (https://github.com/XRPLF/rippled/issues/4545#issuecomment-1565065579)
|
||||
- Using a `ledger_index_min` or `ledger_index_max` beyond the range of ledgers that the server has:
|
||||
- returns `lgrIdxMalformed` in API version 2. Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/issues/4288)
|
||||
- Attempting to use a non-boolean value (such as a string) for the `binary` or `forward` parameters returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
|
||||
|
||||
#### Modifications to [noripple_check](https://xrpl.org/noripple_check.html#noripple_check) response
|
||||
|
||||
- Attempting to use a non-boolean value (such as a string) for the `transactions` parameter returns `invalidParams` (`rpcINVALID_PARAMS`). Previously, in API version 1, no error was returned. (https://github.com/XRPLF/rippled/pull/4620)
|
||||
|
||||
## API Version 1
|
||||
|
||||
This version is supported by all `rippled` versions. For WebSocket and HTTP JSON-RPC requests, it is currently the default API version used when no `api_version` is specified.
|
||||
|
||||
The [commandline](https://xrpl.org/docs/references/http-websocket-apis/api-conventions/request-formatting/#commandline-format) always uses the latest API version. The command line is intended for ad-hoc usage by humans, not programs or automated scripts. The command line is not meant for use in production code.
|
||||
|
||||
### Inconsistency: server_info - network_id
|
||||
|
||||
The `network_id` field was added in the `server_info` response in version 1.5.0 (2019), but it is not returned in [reporting mode](https://xrpl.org/rippled-server-modes.html#reporting-mode). However, use of reporting mode is now discouraged, in favor of using [Clio](https://github.com/XRPLF/clio) instead.
|
||||
|
||||
## XRP Ledger server version 2.5.0
|
||||
|
||||
As of 2025-04-04, version 2.5.0 is in development. You can use a pre-release version by building from source or [using the `nightly` package](https://xrpl.org/docs/infrastructure/installation/install-rippled-on-ubuntu).
|
||||
|
||||
### Additions and bugfixes in 2.5.0
|
||||
|
||||
- `channel_authorize`: If `signing_support` is not enabled in the config, the RPC is disabled.
|
||||
|
||||
## XRP Ledger server version 2.4.0
|
||||
|
||||
[Version 2.4.0](https://github.com/XRPLF/rippled/releases/tag/2.4.0) was released on March 4, 2025.
|
||||
|
||||
### Additions and bugfixes in 2.4.0
|
||||
|
||||
- `ledger_entry`: `state` is added an alias for `ripple_state`.
|
||||
- `ledger_entry`: Enables case-insensitive filtering by canonical name in addition to case-sensitive filtering by RPC name.
|
||||
- `validators`: Added new field `validator_list_threshold` in response.
|
||||
- `simulate`: A new RPC that executes a [dry run of a transaction submission](https://github.com/XRPLF/XRPL-Standards/tree/master/XLS-0069d-simulate#2-rpc-simulate)
|
||||
- Signing methods autofill fees better and properly handle transactions that don't have a base fee, and will also autofill the `NetworkID` field.
|
||||
|
||||
## XRP Ledger server version 2.3.0
|
||||
|
||||
[Version 2.3.0](https://github.com/XRPLF/rippled/releases/tag/2.3.0) was released on Nov 25, 2024.
|
||||
|
||||
### Breaking changes in 2.3.0
|
||||
|
||||
- `book_changes`: If the requested ledger version is not available on this node, a `ledgerNotFound` error is returned and the node does not attempt to acquire the ledger from the p2p network (as with other non-admin RPCs).
|
||||
|
||||
Admins can still attempt to retrieve old ledgers with the `ledger_request` RPC.
|
||||
|
||||
### Additions and bugfixes in 2.3.0
|
||||
|
||||
- `book_changes`: Returns a `validated` field in its response, which was missing in prior versions.
|
||||
|
||||
## XRP Ledger server version 2.2.0
|
||||
|
||||
[Version 2.2.0](https://github.com/XRPLF/rippled/releases/tag/2.2.0) was released on Jun 5, 2024. The following additions are non-breaking (because they are purely additive):
|
||||
|
||||
- The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781))
|
||||
|
||||
## XRP Ledger server version 2.0.0
|
||||
|
||||
[Version 2.0.0](https://github.com/XRPLF/rippled/releases/tag/2.0.0) was released on Jan 9, 2024. The following additions are non-breaking (because they are purely additive):
|
||||
|
||||
- `server_definitions`: A new RPC that generates a `definitions.json`-like output that can be used in XRPL libraries.
|
||||
- In `Payment` transactions, `DeliverMax` has been added. This is a replacement for the `Amount` field, which should not be used. Typically, the `delivered_amount` (in transaction metadata) should be used. To ease the transition, `DeliverMax` is present regardless of API version, since adding a field is non-breaking.
|
||||
- API version 2 has been moved from beta to supported, meaning that it is generally available (regardless of the `beta_rpc_api` setting).
|
||||
|
||||
## XRP Ledger server version 2.2.0
|
||||
|
||||
The following is a non-breaking addition to the API.
|
||||
|
||||
- The `feature` method now has a non-admin mode for users. (It was previously only available to admin connections.) The method returns an updated list of amendments, including their names and other information. ([#4781](https://github.com/XRPLF/rippled/pull/4781))
|
||||
|
||||
## XRP Ledger server version 1.12.0
|
||||
|
||||
[Version 1.12.0](https://github.com/XRPLF/rippled/releases/tag/1.12.0) was released on Sep 6, 2023. The following additions are non-breaking (because they are purely additive).
|
||||
|
||||
- `server_info`: Added `ports`, an array which advertises the RPC and WebSocket ports. This information is also included in the `/crawl` endpoint (which calls `server_info` internally). `grpc` and `peer` ports are also included. (https://github.com/XRPLF/rippled/pull/4427)
|
||||
- `ports` contains objects, each containing a `port` for the listening port (a number string), and a `protocol` array listing the supported protocols on that port.
|
||||
- This allows crawlers to build a more detailed topology without needing to port-scan nodes.
|
||||
- (For peers and other non-admin clients, the info about admin ports is excluded.)
|
||||
- Clawback: The following additions are gated by the Clawback amendment (`featureClawback`). (https://github.com/XRPLF/rippled/pull/4553)
|
||||
- Adds an [AccountRoot flag](https://xrpl.org/accountroot.html#accountroot-flags) called `lsfAllowTrustLineClawback` (https://github.com/XRPLF/rippled/pull/4617)
|
||||
- Adds the corresponding `asfAllowTrustLineClawback` [AccountSet Flag](https://xrpl.org/accountset.html#accountset-flags) as well.
|
||||
- Clawback is disabled by default, so if an issuer desires the ability to claw back funds, they must use an `AccountSet` transaction to set the AllowTrustLineClawback flag. They must do this before creating any trust lines, offers, escrows, payment channels, or checks.
|
||||
- Adds the [Clawback transaction type](https://github.com/XRPLF/XRPL-Standards/blob/master/XLS-39d-clawback/README.md#331-clawback-transaction), containing these fields:
|
||||
- `Account`: The issuer of the asset being clawed back. Must also be the sender of the transaction.
|
||||
- `Amount`: The amount being clawed back, with the `Amount.issuer` being the token holder's address.
|
||||
- Adds [AMM](https://github.com/XRPLF/XRPL-Standards/discussions/78) ([#4294](https://github.com/XRPLF/rippled/pull/4294), [#4626](https://github.com/XRPLF/rippled/pull/4626)) feature:
|
||||
- Adds `amm_info` API to retrieve AMM information for a given tokens pair.
|
||||
- Adds `AMMCreate` transaction type to create `AMM` instance.
|
||||
- Adds `AMMDeposit` transaction type to deposit funds into `AMM` instance.
|
||||
- Adds `AMMWithdraw` transaction type to withdraw funds from `AMM` instance.
|
||||
- Adds `AMMVote` transaction type to vote for the trading fee of `AMM` instance.
|
||||
- Adds `AMMBid` transaction type to bid for the Auction Slot of `AMM` instance.
|
||||
- Adds `AMMDelete` transaction type to delete `AMM` instance.
|
||||
- Adds `sfAMMID` to `AccountRoot` to indicate that the account is `AMM`'s account. `AMMID` is used to fetch `ltAMM`.
|
||||
- Adds `lsfAMMNode` `TrustLine` flag to indicate that one side of the `TrustLine` is `AMM` account.
|
||||
- Adds `tfLPToken`, `tfSingleAsset`, `tfTwoAsset`, `tfOneAssetLPToken`, `tfLimitLPToken`, `tfTwoAssetIfEmpty`,
|
||||
`tfWithdrawAll`, `tfOneAssetWithdrawAll` which allow a trader to specify different fields combination
|
||||
for `AMMDeposit` and `AMMWithdraw` transactions.
|
||||
- Adds new transaction result codes:
|
||||
- tecUNFUNDED_AMM: insufficient balance to fund AMM. The account does not have funds for liquidity provision.
|
||||
- tecAMM_BALANCE: AMM has invalid balance. Calculated balances greater than the current pool balances.
|
||||
- tecAMM_FAILED: AMM transaction failed. Fails due to a processing failure.
|
||||
- tecAMM_INVALID_TOKENS: AMM invalid LP tokens. Invalid input values, format, or calculated values.
|
||||
- tecAMM_EMPTY: AMM is in empty state. Transaction requires AMM in non-empty state (LP tokens > 0).
|
||||
- tecAMM_NOT_EMPTY: AMM is not in empty state. Transaction requires AMM in empty state (LP tokens == 0).
|
||||
- tecAMM_ACCOUNT: AMM account. Clawback of AMM account.
|
||||
- tecINCOMPLETE: Some work was completed, but more submissions required to finish. AMMDelete partially deletes the trustlines.
|
||||
|
||||
## XRP Ledger server version 1.11.0
|
||||
|
||||
[Version 1.11.0](https://github.com/XRPLF/rippled/releases/tag/1.11.0) was released on Jun 20, 2023.
|
||||
|
||||
### Breaking changes in 1.11
|
||||
|
||||
- Added the ability to mark amendments as obsolete. For the `feature` admin API, there is a new possible value for the `vetoed` field. (https://github.com/XRPLF/rippled/pull/4291)
|
||||
- The value of `vetoed` can now be `true`, `false`, or `"Obsolete"`.
|
||||
- Removed the acceptance of seeds or public keys in place of account addresses. (https://github.com/XRPLF/rippled/pull/4404)
|
||||
- This simplifies the API and encourages better security practices (i.e. seeds should never be sent over the network).
|
||||
- For the `ledger_data` method, when all entries are filtered out, the `state` field of the response is now an empty list (in other words, an empty array, `[]`). (Previously, it would return `null`.) While this is technically a breaking change, the new behavior is consistent with the documentation, so this is considered only a bug fix. (https://github.com/XRPLF/rippled/pull/4398)
|
||||
- If and when the `fixNFTokenRemint` amendment activates, there will be a new AccountRoot field, `FirstNFTSequence`. This field is set to the current account sequence when the account issues their first NFT. If an account has not issued any NFTs, then the field is not set. ([#4406](https://github.com/XRPLF/rippled/pull/4406))
|
||||
- There is a new account deletion restriction: an account can only be deleted if `FirstNFTSequence` + `MintedNFTokens` + `256` is less than the current ledger sequence.
|
||||
- This is potentially a breaking change if clients have logic for determining whether an account can be deleted.
|
||||
- NetworkID
|
||||
- For sidechains and networks with a network ID greater than 1024, there is a new [transaction common field](https://xrpl.org/transaction-common-fields.html), `NetworkID`. (https://github.com/XRPLF/rippled/pull/4370)
|
||||
- This field helps to prevent replay attacks and is now required for chains whose network ID is 1025 or higher.
|
||||
- The field must be omitted for Mainnet, so there is no change for Mainnet users.
|
||||
- There are three new local error codes:
|
||||
- `telNETWORK_ID_MAKES_TX_NON_CANONICAL`: a `NetworkID` is present but the chain's network ID is less than 1025. Remove the field from the transaction, and try again.
|
||||
- `telREQUIRES_NETWORK_ID`: a `NetworkID` is required, but is not present. Add the field to the transaction, and try again.
|
||||
- `telWRONG_NETWORK`: a `NetworkID` is specified, but it is for a different network. Submit the transaction to a different server which is connected to the correct network.
|
||||
|
||||
### Additions and bug fixes in 1.11
|
||||
|
||||
- Added `nftoken_id`, `nftoken_ids` and `offer_id` meta fields into NFT `tx` and `account_tx` responses. (https://github.com/XRPLF/rippled/pull/4447)
|
||||
- Added an `account_flags` object to the `account_info` method response. (https://github.com/XRPLF/rippled/pull/4459)
|
||||
- Added `NFTokenPages` to the `account_objects` RPC. (https://github.com/XRPLF/rippled/pull/4352)
|
||||
- Fixed: `marker` returned from the `account_lines` command would not work on subsequent commands. (https://github.com/XRPLF/rippled/pull/4361)
|
||||
|
||||
## XRP Ledger server version 1.10.0
|
||||
|
||||
[Version 1.10.0](https://github.com/XRPLF/rippled/releases/tag/1.10.0)
|
||||
was released on Mar 14, 2023.
|
||||
|
||||
### Breaking changes in 1.10
|
||||
|
||||
- If the `XRPFees` feature is enabled, the `fee_ref` field will be
|
||||
removed from the [ledger subscription stream](https://xrpl.org/subscribe.html#ledger-stream), because it will no longer
|
||||
have any meaning.
|
||||
|
||||
# Unit tests for API changes
|
||||
|
||||
The following information is useful to developers contributing to this project:
|
||||
|
||||
The purpose of unit tests is to catch bugs and prevent regressions. In general, it often makes sense to create a test function when there is a breaking change to the API. For APIs that have changed in a new API version, the tests should be modified so that both the prior version and the new version are properly tested.
|
||||
|
||||
To take one example: for `account_info` version 1, WebSocket and JSON-RPC behavior should be tested. The latest API version, i.e. API version 2, should be tested over WebSocket, JSON-RPC, and command line.
|
||||
611
BUILD.md
@@ -1,611 +0,0 @@
|
||||
| :warning: **WARNING** :warning:
|
||||
|---|
|
||||
| These instructions assume you have a C++ development environment ready with Git, Python, Conan, CMake, and a C++ compiler. For help setting one up on Linux, macOS, or Windows, [see this guide](./docs/build/environment.md). |
|
||||
|
||||
> These instructions also assume a basic familiarity with Conan and CMake.
|
||||
> If you are unfamiliar with Conan, you can read our
|
||||
> [crash course](./docs/build/conan.md) or the official [Getting Started][3]
|
||||
> walkthrough.
|
||||
|
||||
## Branches
|
||||
|
||||
For a stable release, choose the `master` branch or one of the [tagged
|
||||
releases](https://github.com/ripple/rippled/releases).
|
||||
|
||||
```bash
|
||||
git checkout master
|
||||
```
|
||||
|
||||
For the latest release candidate, choose the `release` branch.
|
||||
|
||||
```bash
|
||||
git checkout release
|
||||
```
|
||||
|
||||
For the latest set of untested features, or to contribute, choose the `develop`
|
||||
branch.
|
||||
|
||||
```bash
|
||||
git checkout develop
|
||||
```
|
||||
|
||||
## Minimum Requirements
|
||||
|
||||
See [System Requirements](https://xrpl.org/system-requirements.html).
|
||||
|
||||
Building rippled generally requires git, Python, Conan, CMake, and a C++
|
||||
compiler. Some guidance on setting up such a [C++ development environment can be
|
||||
found here](./docs/build/environment.md).
|
||||
|
||||
- [Python 3.11](https://www.python.org/downloads/), or higher
|
||||
- [Conan 2.17](https://conan.io/downloads.html)[^1], or higher
|
||||
- [CMake 3.22](https://cmake.org/download/)[^2], or higher
|
||||
|
||||
[^1]:
|
||||
It is possible to build with Conan 1.60+, but the instructions are
|
||||
significantly different, which is why we are not recommending it.
|
||||
|
||||
[^2]:
|
||||
CMake 4 is not yet supported by all dependencies required by this project.
|
||||
If you are affected by this issue, follow [conan workaround for cmake
|
||||
4](#workaround-for-cmake-4)
|
||||
|
||||
`rippled` is written in the C++20 dialect and includes the `<concepts>` header.
|
||||
The [minimum compiler versions][2] required are:
|
||||
|
||||
| Compiler | Version |
|
||||
| ----------- | --------- |
|
||||
| GCC | 12 |
|
||||
| Clang | 16 |
|
||||
| Apple Clang | 16 |
|
||||
| MSVC | 19.44[^3] |
|
||||
|
||||
### Linux
|
||||
|
||||
The Ubuntu Linux distribution has received the highest level of quality
|
||||
assurance, testing, and support. We also support Red Hat and use Debian
|
||||
internally.
|
||||
|
||||
Here are [sample instructions for setting up a C++ development environment on
|
||||
Linux](./docs/build/environment.md#linux).
|
||||
|
||||
### Mac
|
||||
|
||||
Many rippled engineers use macOS for development.
|
||||
|
||||
Here are [sample instructions for setting up a C++ development environment on
|
||||
macOS](./docs/build/environment.md#macos).
|
||||
|
||||
### Windows
|
||||
|
||||
Windows is used by some engineers for development only.
|
||||
|
||||
[^3]: Windows is not recommended for production use.
|
||||
|
||||
## Steps
|
||||
|
||||
### Set Up Conan
|
||||
|
||||
After you have a [C++ development environment](./docs/build/environment.md) ready with Git, Python,
|
||||
Conan, CMake, and a C++ compiler, you may need to set up your Conan profile.
|
||||
|
||||
These instructions assume a basic familiarity with Conan and CMake. If you are
|
||||
unfamiliar with Conan, then please read [this crash course](./docs/build/conan.md) or the official
|
||||
[Getting Started][3] walkthrough.
|
||||
|
||||
#### Default profile
|
||||
|
||||
We recommend that you import the provided `conan/profiles/default` profile:
|
||||
|
||||
```bash
|
||||
conan config install conan/profiles/ -tf $(conan config home)/profiles/
|
||||
```
|
||||
|
||||
You can check your Conan profile by running:
|
||||
|
||||
```bash
|
||||
conan profile show
|
||||
```
|
||||
|
||||
#### Custom profile
|
||||
|
||||
If the default profile does not work for you and you do not yet have a Conan
|
||||
profile, you can create one by running:
|
||||
|
||||
```bash
|
||||
conan profile detect
|
||||
```
|
||||
|
||||
You may need to make changes to the profile to suit your environment. You can
|
||||
refer to the provided `conan/profiles/default` profile for inspiration, and you
|
||||
may also need to apply the required [tweaks](#conan-profile-tweaks) to this
|
||||
default profile.
|
||||
|
||||
### Patched recipes
|
||||
|
||||
The recipes in Conan Center occasionally need to be patched for compatibility
|
||||
with the latest version of `rippled`. We maintain a fork of the Conan Center
|
||||
[here](https://github.com/XRPLF/conan-center-index/) containing the patches.
|
||||
|
||||
To ensure our patched recipes are used, you must add our Conan remote at a
|
||||
higher index than the default Conan Center remote, so it is consulted first. You
|
||||
can do this by running:
|
||||
|
||||
```bash
|
||||
conan remote add --index 0 xrplf "https://conan.ripplex.io"
|
||||
```
|
||||
|
||||
Alternatively, you can pull the patched recipes into the repository and use them
|
||||
locally:
|
||||
|
||||
```bash
|
||||
cd external
|
||||
git init
|
||||
git remote add origin git@github.com:XRPLF/conan-center-index.git
|
||||
git sparse-checkout init
|
||||
git sparse-checkout set recipes/snappy
|
||||
git sparse-checkout add recipes/soci
|
||||
git fetch origin master
|
||||
git checkout master
|
||||
conan export --version 1.1.10 recipes/snappy/all
|
||||
conan export --version 4.0.3 recipes/soci/all
|
||||
rm -rf .git
|
||||
```
|
||||
|
||||
In the case we switch to a newer version of a dependency that still requires a
|
||||
patch, it will be necessary for you to pull in the changes and re-export the
|
||||
updated dependencies with the newer version. However, if we switch to a newer
|
||||
version that no longer requires a patch, no action is required on your part, as
|
||||
the new recipe will be automatically pulled from the official Conan Center.
|
||||
|
||||
### Conan profile tweaks
|
||||
|
||||
#### Missing compiler version
|
||||
|
||||
If you see an error similar to the following after running `conan profile show`:
|
||||
|
||||
```bash
|
||||
ERROR: Invalid setting '17' is not a valid 'settings.compiler.version' value.
|
||||
Possible values are ['5.0', '5.1', '6.0', '6.1', '7.0', '7.3', '8.0', '8.1',
|
||||
'9.0', '9.1', '10.0', '11.0', '12.0', '13', '13.0', '13.1', '14', '14.0', '15',
|
||||
'15.0', '16', '16.0']
|
||||
Read "http://docs.conan.io/2/knowledge/faq.html#error-invalid-setting"
|
||||
```
|
||||
|
||||
you need to amend the list of compiler versions in
|
||||
`$(conan config home)/settings.yml`, by appending the required version number(s)
|
||||
to the `version` array specific for your compiler. For example:
|
||||
|
||||
```yaml
|
||||
apple-clang:
|
||||
version:
|
||||
[
|
||||
"5.0",
|
||||
"5.1",
|
||||
"6.0",
|
||||
"6.1",
|
||||
"7.0",
|
||||
"7.3",
|
||||
"8.0",
|
||||
"8.1",
|
||||
"9.0",
|
||||
"9.1",
|
||||
"10.0",
|
||||
"11.0",
|
||||
"12.0",
|
||||
"13",
|
||||
"13.0",
|
||||
"13.1",
|
||||
"14",
|
||||
"14.0",
|
||||
"15",
|
||||
"15.0",
|
||||
"16",
|
||||
"16.0",
|
||||
"17",
|
||||
"17.0",
|
||||
]
|
||||
```
|
||||
|
||||
#### Multiple compilers
|
||||
|
||||
If you have multiple compilers installed, make sure to select the one to use in
|
||||
your default Conan configuration **before** running `conan profile detect`, by
|
||||
setting the `CC` and `CXX` environment variables.
|
||||
|
||||
For example, if you are running MacOS and have [homebrew
|
||||
LLVM@18](https://formulae.brew.sh/formula/llvm@18), and want to use it as a
|
||||
compiler in the new Conan profile:
|
||||
|
||||
```bash
|
||||
export CC=$(brew --prefix llvm@18)/bin/clang
|
||||
export CXX=$(brew --prefix llvm@18)/bin/clang++
|
||||
conan profile detect
|
||||
```
|
||||
|
||||
You should also explicitly set the path to the compiler in the profile file,
|
||||
which helps to avoid errors when `CC` and/or `CXX` are set and disagree with the
|
||||
selected Conan profile. For example:
|
||||
|
||||
```text
|
||||
[conf]
|
||||
tools.build:compiler_executables={'c':'/usr/bin/gcc','cpp':'/usr/bin/g++'}
|
||||
```
|
||||
|
||||
#### Multiple profiles
|
||||
|
||||
You can manage multiple Conan profiles in the directory
|
||||
`$(conan config home)/profiles`, for example renaming `default` to a different
|
||||
name and then creating a new `default` profile for a different compiler.
|
||||
|
||||
#### Select language
|
||||
|
||||
The default profile created by Conan will typically select different C++ dialect
|
||||
than C++20 used by this project. You should set `20` in the profile line
|
||||
starting with `compiler.cppstd=`. For example:
|
||||
|
||||
```bash
|
||||
sed -i.bak -e 's|^compiler\.cppstd=.*$|compiler.cppstd=20|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
#### Select standard library in Linux
|
||||
|
||||
**Linux** developers will commonly have a default Conan [profile][] that
|
||||
compiles with GCC and links with libstdc++. If you are linking with libstdc++
|
||||
(see profile setting `compiler.libcxx`), then you will need to choose the
|
||||
`libstdc++11` ABI:
|
||||
|
||||
```bash
|
||||
sed -i.bak -e 's|^compiler\.libcxx=.*$|compiler.libcxx=libstdc++11|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
#### Select architecture and runtime in Windows
|
||||
|
||||
**Windows** developers may need to use the x64 native build tools. An easy way
|
||||
to do that is to run the shortcut "x64 Native Tools Command Prompt" for the
|
||||
version of Visual Studio that you have installed.
|
||||
|
||||
Windows developers must also build `rippled` and its dependencies for the x64
|
||||
architecture:
|
||||
|
||||
```bash
|
||||
sed -i.bak -e 's|^arch=.*$|arch=x86_64|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
**Windows** developers also must select static runtime:
|
||||
|
||||
```bash
|
||||
sed -i.bak -e 's|^compiler\.runtime=.*$|compiler.runtime=static|' $(conan config home)/profiles/default
|
||||
```
|
||||
|
||||
#### Workaround for CMake 4
|
||||
|
||||
If your system CMake is version 4 rather than 3, you may have to configure Conan
|
||||
profile to use CMake version 3 for dependencies, by adding the following two
|
||||
lines to your profile:
|
||||
|
||||
```text
|
||||
[tool_requires]
|
||||
!cmake/*: cmake/[>=3 <4]
|
||||
```
|
||||
|
||||
This will force Conan to download and use a locally cached CMake 3 version, and
|
||||
is needed because some of the dependencies used by this project do not support
|
||||
CMake 4.
|
||||
|
||||
#### Clang workaround for grpc
|
||||
|
||||
If your compiler is clang, version 19 or later, or apple-clang, version 17 or
|
||||
later, you may encounter a compilation error while building the `grpc`
|
||||
dependency:
|
||||
|
||||
```text
|
||||
In file included from .../lib/promise/try_seq.h:26:
|
||||
.../lib/promise/detail/basic_seq.h:499:38: error: a template argument list is expected after a name prefixed by the template keyword [-Wmissing-template-arg-list-after-template-kw]
|
||||
499 | Traits::template CallSeqFactory(f_, *cur_, std::move(arg)));
|
||||
| ^
|
||||
```
|
||||
|
||||
The workaround for this error is to add two lines to profile:
|
||||
|
||||
```text
|
||||
[conf]
|
||||
tools.build:cxxflags=['-Wno-missing-template-arg-list-after-template-kw']
|
||||
```
|
||||
|
||||
#### Workaround for gcc 12
|
||||
|
||||
If your compiler is gcc, version 12, and you have enabled `werr` option, you may
|
||||
encounter a compilation error such as:
|
||||
|
||||
```text
|
||||
/usr/include/c++/12/bits/char_traits.h:435:56: error: 'void* __builtin_memcpy(void*, const void*, long unsigned int)' accessing 9223372036854775810 or more bytes at offsets [2, 9223372036854775807] and 1 may overlap up to 9223372036854775813 bytes at offset -3 [-Werror=restrict]
|
||||
435 | return static_cast<char_type*>(__builtin_memcpy(__s1, __s2, __n));
|
||||
| ~~~~~~~~~~~~~~~~^~~~~~~~~~~~~~~~~
|
||||
cc1plus: all warnings being treated as errors
|
||||
```
|
||||
|
||||
The workaround for this error is to add two lines to your profile:
|
||||
|
||||
```text
|
||||
[conf]
|
||||
tools.build:cxxflags=['-Wno-restrict']
|
||||
```
|
||||
|
||||
#### Workaround for clang 16
|
||||
|
||||
If your compiler is clang, version 16, you may encounter compilation error such
|
||||
as:
|
||||
|
||||
```text
|
||||
In file included from .../boost/beast/websocket/stream.hpp:2857:
|
||||
.../boost/beast/websocket/impl/read.hpp:695:17: error: call to 'async_teardown' is ambiguous
|
||||
async_teardown(impl.role, impl.stream(),
|
||||
^~~~~~~~~~~~~~
|
||||
```
|
||||
|
||||
The workaround for this error is to add two lines to your profile:
|
||||
|
||||
```text
|
||||
[conf]
|
||||
tools.build:cxxflags=['-DBOOST_ASIO_DISABLE_CONCEPTS']
|
||||
```
|
||||
|
||||
### Build and Test
|
||||
|
||||
1. Create a build directory and move into it.
|
||||
|
||||
```
|
||||
mkdir .build
|
||||
cd .build
|
||||
```
|
||||
|
||||
You can use any directory name. Conan treats your working directory as an
|
||||
install folder and generates files with implementation details.
|
||||
You don't need to worry about these files, but make sure to change
|
||||
your working directory to your build directory before calling Conan.
|
||||
|
||||
**Note:** You can specify a directory for the installation files by adding
|
||||
the `install-folder` or `-if` option to every `conan install` command
|
||||
in the next step.
|
||||
|
||||
2. Use conan to generate CMake files for every configuration you want to build:
|
||||
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
```
|
||||
|
||||
To build Debug, in the next step, be sure to set `-DCMAKE_BUILD_TYPE=Debug`
|
||||
|
||||
For a single-configuration generator, e.g. `Unix Makefiles` or `Ninja`,
|
||||
you only need to run this command once.
|
||||
For a multi-configuration generator, e.g. `Visual Studio`, you may want to
|
||||
run it more than once.
|
||||
|
||||
Each of these commands should also have a different `build_type` setting.
|
||||
A second command with the same `build_type` setting will overwrite the files
|
||||
generated by the first. You can pass the build type on the command line with
|
||||
`--settings build_type=$BUILD_TYPE` or in the profile itself,
|
||||
under the section `[settings]` with the key `build_type`.
|
||||
|
||||
If you are using a Microsoft Visual C++ compiler,
|
||||
then you will need to ensure consistency between the `build_type` setting
|
||||
and the `compiler.runtime` setting.
|
||||
|
||||
When `build_type` is `Release`, `compiler.runtime` should be `MT`.
|
||||
|
||||
When `build_type` is `Debug`, `compiler.runtime` should be `MTd`.
|
||||
|
||||
```
|
||||
conan install .. --output-folder . --build missing --settings build_type=Release --settings compiler.runtime=MT
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug --settings compiler.runtime=MTd
|
||||
```
|
||||
|
||||
3. Configure CMake and pass the toolchain file generated by Conan, located at
|
||||
`$OUTPUT_FOLDER/build/generators/conan_toolchain.cmake`.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
Pass the CMake variable [`CMAKE_BUILD_TYPE`][build_type]
|
||||
and make sure it matches the one of the `build_type` settings
|
||||
you chose in the previous step.
|
||||
|
||||
For example, to build Debug, in the next command, replace "Release" with "Debug"
|
||||
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release -Dxrpld=ON -Dtests=ON ..
|
||||
```
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -Dxrpld=ON -Dtests=ON ..
|
||||
```
|
||||
|
||||
**Note:** You can pass build options for `rippled` in this step.
|
||||
|
||||
4. Build `rippled`.
|
||||
|
||||
For a single-configuration generator, it will build whatever configuration
|
||||
you passed for `CMAKE_BUILD_TYPE`. For a multi-configuration generator, you
|
||||
must pass the option `--config` to select the build configuration.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
cmake --build .
|
||||
```
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
cmake --build . --config Release
|
||||
cmake --build . --config Debug
|
||||
```
|
||||
|
||||
5. Test rippled.
|
||||
|
||||
Single-config generators:
|
||||
|
||||
```
|
||||
./rippled --unittest --unittest-jobs N
|
||||
```
|
||||
|
||||
Multi-config generators:
|
||||
|
||||
```
|
||||
./Release/rippled --unittest --unittest-jobs N
|
||||
./Debug/rippled --unittest --unittest-jobs N
|
||||
```
|
||||
|
||||
Replace the `--unittest-jobs` parameter N with the desired unit tests
|
||||
concurrency. Recommended setting is half of the number of available CPU
|
||||
cores.
|
||||
|
||||
The location of `rippled` binary in your build directory depends on your
|
||||
CMake generator. Pass `--help` to see the rest of the command line options.
|
||||
|
||||
## Coverage report
|
||||
|
||||
The coverage report is intended for developers using compilers GCC
|
||||
or Clang (including Apple Clang). It is generated by the build target `coverage`,
|
||||
which is only enabled when the `coverage` option is set, e.g. with
|
||||
`--options coverage=True` in `conan` or `-Dcoverage=ON` variable in `cmake`
|
||||
|
||||
Prerequisites for the coverage report:
|
||||
|
||||
- [gcovr tool][gcovr] (can be installed e.g. with [pip][python-pip])
|
||||
- `gcov` for GCC (installed with the compiler by default) or
|
||||
- `llvm-cov` for Clang (installed with the compiler by default)
|
||||
- `Debug` build type
|
||||
|
||||
A coverage report is created when the following steps are completed, in order:
|
||||
|
||||
1. `rippled` binary built with instrumentation data, enabled by the `coverage`
|
||||
option mentioned above
|
||||
2. completed run of unit tests, which populates coverage capture data
|
||||
3. completed run of the `gcovr` tool (which internally invokes either `gcov` or `llvm-cov`)
|
||||
to assemble both instrumentation data and the coverage capture data into a coverage report
|
||||
|
||||
The above steps are automated into a single target `coverage`. The instrumented
|
||||
`rippled` binary can also be used for regular development or testing work, at
|
||||
the cost of extra disk space utilization and a small performance hit
|
||||
(to store coverage capture). In case of a spurious failure of unit tests, it is
|
||||
possible to re-run the `coverage` target without rebuilding the `rippled` binary
|
||||
(since it is simply a dependency of the coverage report target). It is also possible
|
||||
to select only specific tests for the purpose of the coverage report, by setting
|
||||
the `coverage_test` variable in `cmake`
|
||||
|
||||
The default coverage report format is `html-details`, but the user
|
||||
can override it to any of the formats listed in `Builds/CMake/CodeCoverage.cmake`
|
||||
by setting the `coverage_format` variable in `cmake`. It is also possible
|
||||
to generate more than one format at a time by setting the `coverage_extra_args`
|
||||
variable in `cmake`. The specific command line used to run the `gcovr` tool will be
|
||||
displayed if the `CODE_COVERAGE_VERBOSE` variable is set.
|
||||
|
||||
By default, the code coverage tool runs parallel unit tests with `--unittest-jobs`
|
||||
set to the number of available CPU cores. This may cause spurious test
|
||||
errors on Apple. Developers can override the number of unit test jobs with
|
||||
the `coverage_test_parallelism` variable in `cmake`.
|
||||
|
||||
Example use with some cmake variables set:
|
||||
|
||||
```
|
||||
cd .build
|
||||
conan install .. --output-folder . --build missing --settings build_type=Debug
|
||||
cmake -DCMAKE_BUILD_TYPE=Debug -Dcoverage=ON -Dxrpld=ON -Dtests=ON -Dcoverage_test_parallelism=2 -Dcoverage_format=html-details -Dcoverage_extra_args="--json coverage.json" -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ..
|
||||
cmake --build . --target coverage
|
||||
```
|
||||
|
||||
After the `coverage` target is completed, the generated coverage report will be
|
||||
stored inside the build directory, as either of:
|
||||
|
||||
- file named `coverage.`_extension_, with a suitable extension for the report format, or
|
||||
- directory named `coverage`, with the `index.html` and other files inside, for the `html-details` or `html-nested` report formats.
|
||||
|
||||
## Options
|
||||
|
||||
| Option | Default Value | Description |
|
||||
| ---------- | ------------- | -------------------------------------------------------------------------- |
|
||||
| `assert` | OFF | Enable assertions. |
|
||||
| `coverage` | OFF | Prepare the coverage report. |
|
||||
| `san` | N/A | Enable a sanitizer with Clang. Choices are `thread` and `address`. |
|
||||
| `tests` | OFF | Build tests. |
|
||||
| `unity` | OFF | Configure a unity build. |
|
||||
| `xrpld` | OFF | Build the xrpld (`rippled`) application, and not just the libxrpl library. |
|
||||
| `werr` | OFF | Treat compilation warnings as errors |
|
||||
| `wextra` | OFF | Enable additional compilation warnings |
|
||||
|
||||
[Unity builds][5] may be faster for the first build
|
||||
(at the cost of much more memory) since they concatenate sources into fewer
|
||||
translation units. Non-unity builds may be faster for incremental builds,
|
||||
and can be helpful for detecting `#include` omissions.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Conan
|
||||
|
||||
After any updates or changes to dependencies, you may need to do the following:
|
||||
|
||||
1. Remove your build directory.
|
||||
2. Remove individual libraries from the Conan cache, e.g.
|
||||
|
||||
```bash
|
||||
conan remove 'grpc/*'
|
||||
```
|
||||
|
||||
**or**
|
||||
|
||||
Remove all libraries from Conan cache:
|
||||
|
||||
```bash
|
||||
conan remove '*'
|
||||
```
|
||||
|
||||
3. Re-run [conan export](#patched-recipes) if needed.
|
||||
4. Re-run [conan install](#build-and-test).
|
||||
|
||||
### `protobuf/port_def.inc` file not found
|
||||
|
||||
If `cmake --build .` results in an error due to a missing a protobuf file, then
|
||||
you might have generated CMake files for a different `build_type` than the
|
||||
`CMAKE_BUILD_TYPE` you passed to Conan.
|
||||
|
||||
```
|
||||
/rippled/.build/pb-xrpl.libpb/xrpl/proto/ripple.pb.h:10:10: fatal error: 'google/protobuf/port_def.inc' file not found
|
||||
10 | #include <google/protobuf/port_def.inc>
|
||||
| ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
1 error generated.
|
||||
```
|
||||
|
||||
For example, if you want to build Debug:
|
||||
|
||||
1. For conan install, pass `--settings build_type=Debug`
|
||||
2. For cmake, pass `-DCMAKE_BUILD_TYPE=Debug`
|
||||
|
||||
## Add a Dependency
|
||||
|
||||
If you want to experiment with a new package, follow these steps:
|
||||
|
||||
1. Search for the package on [Conan Center](https://conan.io/center/).
|
||||
2. Modify [`conanfile.py`](./conanfile.py):
|
||||
- Add a version of the package to the `requires` property.
|
||||
- Change any default options for the package by adding them to the
|
||||
`default_options` property (with syntax `'$package:$option': $value`).
|
||||
3. Modify [`CMakeLists.txt`](./CMakeLists.txt):
|
||||
- Add a call to `find_package($package REQUIRED)`.
|
||||
- Link a library from the package to the target `ripple_libs`
|
||||
(search for the existing call to `target_link_libraries(ripple_libs INTERFACE ...)`).
|
||||
4. Start coding! Don't forget to include whatever headers you need from the package.
|
||||
|
||||
[1]: https://github.com/conan-io/conan-center-index/issues/13168
|
||||
[2]: https://en.cppreference.com/w/cpp/compiler_support/20
|
||||
[3]: https://docs.conan.io/en/latest/getting_started.html
|
||||
[5]: https://en.wikipedia.org/wiki/Unity_build
|
||||
[6]: https://github.com/boostorg/beast/issues/2648
|
||||
[7]: https://github.com/boostorg/beast/issues/2661
|
||||
[gcovr]: https://gcovr.com/en/stable/getting-started.html
|
||||
[python-pip]: https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/
|
||||
[build_type]: https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
[profile]: https://docs.conan.io/en/latest/reference/profiles.html
|
||||
38
Builds/ArchLinux/PKGBUILD
Normal file
@@ -0,0 +1,38 @@
|
||||
# Maintainer: Roberto Catini <roberto.catini@gmail.com>
|
||||
|
||||
pkgname=rippled
|
||||
pkgrel=1
|
||||
pkgver=0
|
||||
pkgdesc="Ripple peer-to-peer network daemon"
|
||||
arch=('i686' 'x86_64')
|
||||
url="https://github.com/ripple/rippled"
|
||||
license=('custom:ISC')
|
||||
depends=('protobuf' 'openssl' 'boost-libs')
|
||||
makedepends=('git' 'scons' 'boost')
|
||||
backup=("etc/$pkgname/rippled.cfg")
|
||||
source=("git://github.com/ripple/rippled.git#branch=master")
|
||||
sha512sums=('SKIP')
|
||||
|
||||
pkgver() {
|
||||
cd "$srcdir/$pkgname"
|
||||
git describe --long --tags | sed -r 's/([^-]*-g)/r\1/;s/-/./g'
|
||||
}
|
||||
|
||||
build() {
|
||||
cd "$srcdir/$pkgname"
|
||||
scons
|
||||
}
|
||||
|
||||
check() {
|
||||
cd "$srcdir/$pkgname"
|
||||
build/rippled --unittest
|
||||
}
|
||||
|
||||
package() {
|
||||
cd "$srcdir/$pkgname"
|
||||
install -D -m644 LICENSE "$pkgdir/usr/share/licenses/$pkgname/LICENSE"
|
||||
install -D build/rippled "$pkgdir/usr/bin/rippled"
|
||||
install -D -m644 doc/rippled-example.cfg "$pkgdir/etc/$pkgname/rippled.cfg"
|
||||
mkdir -p "$pkgdir/var/lib/$pkgname/db"
|
||||
mkdir -p "$pkgdir/var/log/$pkgname"
|
||||
}
|
||||
658
Builds/CMake/CMakeFuncs.cmake
Normal file
@@ -0,0 +1,658 @@
|
||||
# This is a set of common functions and settings for rippled
|
||||
# and derived products.
|
||||
|
||||
############################################################
|
||||
|
||||
cmake_minimum_required(VERSION 3.1.0)
|
||||
|
||||
if("${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
|
||||
message(WARNING "Builds are strongly discouraged in "
|
||||
"${CMAKE_SOURCE_DIR}.")
|
||||
endif()
|
||||
|
||||
macro(parse_target)
|
||||
|
||||
if (NOT target AND NOT CMAKE_BUILD_TYPE)
|
||||
if (APPLE)
|
||||
set(target clang.debug)
|
||||
elseif(WIN32)
|
||||
set(target msvc)
|
||||
else()
|
||||
set(target gcc.debug)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (target)
|
||||
# Parse the target
|
||||
set(remaining ${target})
|
||||
while (remaining)
|
||||
# get the component up to the next dot or end
|
||||
string(REGEX REPLACE "^\\.?([^\\.]+).*$" "\\1" cur_component ${remaining})
|
||||
string(REGEX REPLACE "^\\.?[^\\.]+(.*$)" "\\1" remaining ${remaining})
|
||||
|
||||
if (${cur_component} STREQUAL gcc)
|
||||
if (DEFINED ENV{GNU_CC})
|
||||
set(CMAKE_C_COMPILER $ENV{GNU_CC})
|
||||
elseif ($ENV{CXX} MATCHES .*gcc.*)
|
||||
set(CMAKE_CXX_COMPILER $ENV{CC})
|
||||
else()
|
||||
find_program(CMAKE_C_COMPILER gcc)
|
||||
endif()
|
||||
|
||||
if (DEFINED ENV{GNU_CXX})
|
||||
set(CMAKE_C_COMPILER $ENV{GNU_CXX})
|
||||
elseif ($ENV{CXX} MATCHES .*g\\+\\+.*)
|
||||
set(CMAKE_C_COMPILER $ENV{CC})
|
||||
else()
|
||||
find_program(CMAKE_CXX_COMPILER g++)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL clang)
|
||||
if (DEFINED ENV{CLANG_CC})
|
||||
set(CMAKE_C_COMPILER $ENV{CLANG_CC})
|
||||
elseif ($ENV{CXX} MATCHES .*clang.*)
|
||||
set(CMAKE_CXX_COMPILER $ENV{CC})
|
||||
else()
|
||||
find_program(CMAKE_C_COMPILER clang)
|
||||
endif()
|
||||
|
||||
if (DEFINED ENV{CLANG_CXX})
|
||||
set(CMAKE_C_COMPILER $ENV{CLANG_CXX})
|
||||
elseif ($ENV{CXX} MATCHES .*clang.*)
|
||||
set(CMAKE_C_COMPILER $ENV{CC})
|
||||
else()
|
||||
find_program(CMAKE_CXX_COMPILER clang++)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL msvc)
|
||||
# TBD
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL unity)
|
||||
set(unity true)
|
||||
set(nonunity false)
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL nounity)
|
||||
set(unity false)
|
||||
set(nonunity true)
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL debug)
|
||||
set(release false)
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL release)
|
||||
set(release true)
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL coverage)
|
||||
set(coverage true)
|
||||
set(debug true)
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL profile)
|
||||
set(profile true)
|
||||
endif()
|
||||
|
||||
if (${cur_component} STREQUAL ci)
|
||||
# Workarounds that make various CI builds work, but that
|
||||
# we don't want in the general case.
|
||||
set(ci true)
|
||||
set(openssl_min 1.0.1)
|
||||
endif()
|
||||
|
||||
endwhile()
|
||||
|
||||
if (release)
|
||||
set(CMAKE_BUILD_TYPE Release)
|
||||
else()
|
||||
set(CMAKE_BUILD_TYPE Debug)
|
||||
endif()
|
||||
|
||||
if (NOT unity)
|
||||
set(CMAKE_BUILD_TYPE ${CMAKE_BUILD_TYPE}Classic)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
endmacro()
|
||||
|
||||
############################################################
|
||||
|
||||
macro(setup_build_cache)
|
||||
set(san "" CACHE STRING "On gcc & clang, add sanitizer
|
||||
instrumentation")
|
||||
set_property(CACHE san PROPERTY STRINGS ";address;thread")
|
||||
set(assert false CACHE BOOL "Enables asserts, even in release builds")
|
||||
set(static false CACHE BOOL
|
||||
"On linux, link protobuf, openssl, libc++, and boost statically")
|
||||
|
||||
if (static AND (WIN32 OR APPLE))
|
||||
message(FATAL_ERROR "Static linking is only supported on linux.")
|
||||
endif()
|
||||
|
||||
if (${CMAKE_GENERATOR} STREQUAL "Unix Makefiles" AND NOT CMAKE_BUILD_TYPE)
|
||||
set(CMAKE_BUILD_TYPE Debug)
|
||||
endif()
|
||||
|
||||
# Can't exclude files from configurations, so can't support both
|
||||
# unity and nonunity configurations at the same time
|
||||
if (NOT DEFINED unity OR unity)
|
||||
set(CMAKE_CONFIGURATION_TYPES
|
||||
Debug
|
||||
Release)
|
||||
else()
|
||||
set(CMAKE_CONFIGURATION_TYPES
|
||||
DebugClassic
|
||||
ReleaseClassic)
|
||||
if(${CMAKE_BUILD_TYPE} STREQUAL "Debug")
|
||||
set(CMAKE_BUILD_TYPE DebugClassic)
|
||||
elseif(${CMAKE_BUILD_TYPE} STREQUAL "Release")
|
||||
set(CMAKE_BUILD_TYPE ReleaseClassic)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(CMAKE_CONFIGURATION_TYPES
|
||||
${CMAKE_CONFIGURATION_TYPES} CACHE STRING "" FORCE)
|
||||
endmacro()
|
||||
|
||||
############################################################
|
||||
|
||||
function(prepend var prefix)
|
||||
set(listVar "")
|
||||
foreach(f ${ARGN})
|
||||
list(APPEND listVar "${prefix}${f}")
|
||||
endforeach(f)
|
||||
set(${var} "${listVar}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
macro(append_flags name)
|
||||
foreach (arg ${ARGN})
|
||||
set(${name} "${${name}} ${arg}")
|
||||
endforeach()
|
||||
endmacro()
|
||||
|
||||
macro(group_sources curdir)
|
||||
file(GLOB children RELATIVE ${PROJECT_SOURCE_DIR}/${curdir}
|
||||
${PROJECT_SOURCE_DIR}/${curdir}/*)
|
||||
foreach (child ${children})
|
||||
if (IS_DIRECTORY ${PROJECT_SOURCE_DIR}/${curdir}/${child})
|
||||
group_sources(${curdir}/${child})
|
||||
else()
|
||||
string(REPLACE "/" "\\" groupname ${curdir})
|
||||
source_group(${groupname} FILES
|
||||
${PROJECT_SOURCE_DIR}/${curdir}/${child})
|
||||
endif()
|
||||
endforeach()
|
||||
endmacro()
|
||||
|
||||
macro(add_with_props src_var files)
|
||||
list(APPEND ${src_var} ${files})
|
||||
foreach (arg ${ARGN})
|
||||
set(props "${props} ${arg}")
|
||||
endforeach()
|
||||
set_source_files_properties(
|
||||
${files}
|
||||
PROPERTIES COMPILE_FLAGS
|
||||
${props})
|
||||
endmacro()
|
||||
|
||||
############################################################
|
||||
|
||||
macro(determine_build_type)
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES ".*Clang") # both Clang and AppleClang
|
||||
set(is_clang true)
|
||||
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
|
||||
set(is_gcc true)
|
||||
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC")
|
||||
set(is_msvc true)
|
||||
endif()
|
||||
|
||||
if (${CMAKE_GENERATOR} STREQUAL "Xcode")
|
||||
set(is_xcode true)
|
||||
else()
|
||||
set(is_xcode false)
|
||||
endif()
|
||||
|
||||
if (NOT is_gcc AND NOT is_clang AND NOT is_msvc)
|
||||
message("Current compiler is ${CMAKE_CXX_COMPILER_ID}")
|
||||
message(FATAL_ERROR "Missing compiler. Must be GNU, Clang, or MSVC")
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
############################################################
|
||||
|
||||
macro(check_gcc4_abi)
|
||||
# Check if should use gcc4's ABI
|
||||
set(gcc4_abi false)
|
||||
|
||||
if ($ENV{RIPPLED_OLD_GCC_ABI})
|
||||
set(gcc4_abi true)
|
||||
endif()
|
||||
|
||||
if (is_gcc AND NOT gcc4_abi)
|
||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 5)
|
||||
execute_process(COMMAND lsb_release -si OUTPUT_VARIABLE lsb)
|
||||
string(STRIP "${lsb}" lsb)
|
||||
if ("${lsb}" STREQUAL "Ubuntu")
|
||||
execute_process(COMMAND lsb_release -sr OUTPUT_VARIABLE lsb)
|
||||
string(STRIP ${lsb} lsb)
|
||||
if (${lsb} VERSION_LESS 15.1)
|
||||
set(gcc4_abi true)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (gcc4_abi)
|
||||
add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0)
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
############################################################
|
||||
|
||||
macro(special_build_flags)
|
||||
if (coverage)
|
||||
add_compile_options(-fprofile-arcs -ftest-coverage)
|
||||
append_flags(CMAKE_EXE_LINKER_FLAGS -fprofile-arcs -ftest-coverage)
|
||||
endif()
|
||||
|
||||
if (profile)
|
||||
add_compile_options(-p -pg)
|
||||
append_flags(CMAKE_EXE_LINKER_FLAGS -p -pg)
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
############################################################
|
||||
|
||||
# Params: Boost components to search for.
|
||||
macro(use_boost)
|
||||
if ((NOT DEFINED BOOST_ROOT) AND (DEFINED ENV{BOOST_ROOT}))
|
||||
set(BOOST_ROOT $ENV{BOOST_ROOT})
|
||||
endif()
|
||||
if(WIN32 OR CYGWIN)
|
||||
# Workaround for MSVC having two boost versions - x86 and x64 on same PC in stage folders
|
||||
if(DEFINED BOOST_ROOT)
|
||||
if(CMAKE_SIZEOF_VOID_P EQUAL 8 AND IS_DIRECTORY ${BOOST_ROOT}/stage64/lib)
|
||||
set(Boost_LIBRARY_DIR ${BOOST_ROOT}/stage64/lib)
|
||||
else()
|
||||
set(Boost_LIBRARY_DIR ${BOOST_ROOT}/stage/lib)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (is_clang AND DEFINED ENV{CLANG_BOOST_ROOT})
|
||||
set(BOOST_ROOT $ENV{CLANG_BOOST_ROOT})
|
||||
endif()
|
||||
|
||||
set(Boost_USE_STATIC_LIBS on)
|
||||
set(Boost_USE_MULTITHREADED on)
|
||||
set(Boost_USE_STATIC_RUNTIME off)
|
||||
find_package(Boost COMPONENTS
|
||||
${ARGN})
|
||||
|
||||
if (Boost_FOUND OR
|
||||
((CYGWIN OR WIN32) AND Boost_INCLUDE_DIRS AND Boost_LIBRARY_DIRS))
|
||||
if(NOT Boost_FOUND)
|
||||
message(WARNING "Boost directory found, but not all components. May not be able to build.")
|
||||
endif()
|
||||
include_directories(SYSTEM ${Boost_INCLUDE_DIRS})
|
||||
link_directories(${Boost_LIBRARY_DIRS})
|
||||
else()
|
||||
message(FATAL_ERROR "Boost not found")
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
macro(use_pthread)
|
||||
if (NOT WIN32)
|
||||
set(THREADS_PREFER_PTHREAD_FLAG ON)
|
||||
find_package(Threads)
|
||||
add_compile_options(${CMAKE_THREAD_LIBS_INIT})
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
macro(use_openssl openssl_min)
|
||||
if (APPLE AND NOT DEFINED ENV{OPENSSL_ROOT_DIR})
|
||||
find_program(HOMEBREW brew)
|
||||
if (NOT HOMEBREW STREQUAL "HOMEBREW-NOTFOUND")
|
||||
execute_process(COMMAND brew --prefix openssl
|
||||
OUTPUT_VARIABLE OPENSSL_ROOT_DIR
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (WIN32)
|
||||
if (DEFINED ENV{OPENSSL_ROOT})
|
||||
include_directories($ENV{OPENSSL_ROOT}/include)
|
||||
link_directories($ENV{OPENSSL_ROOT}/lib)
|
||||
endif()
|
||||
else()
|
||||
if (static)
|
||||
set(tmp CMAKE_FIND_LIBRARY_SUFFIXES)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES .a)
|
||||
endif()
|
||||
|
||||
find_package(OpenSSL)
|
||||
|
||||
if (static)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES tmp)
|
||||
endif()
|
||||
|
||||
if (OPENSSL_FOUND)
|
||||
include_directories(${OPENSSL_INCLUDE_DIR})
|
||||
else()
|
||||
message(FATAL_ERROR "OpenSSL not found")
|
||||
endif()
|
||||
if (UNIX AND NOT APPLE AND ${OPENSSL_VERSION} VERSION_LESS ${openssl_min})
|
||||
message(FATAL_ERROR
|
||||
"Your openssl is Version: ${OPENSSL_VERSION}, ${openssl_min} or better is required.")
|
||||
endif()
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
macro(use_protobuf)
|
||||
if (WIN32)
|
||||
if (DEFINED ENV{PROTOBUF_ROOT})
|
||||
include_directories($ENV{PROTOBUF_ROOT}/src)
|
||||
link_directories($ENV{PROTOBUF_ROOT}/src/.libs)
|
||||
endif()
|
||||
|
||||
# Modified from FindProtobuf.cmake
|
||||
FUNCTION(PROTOBUF_GENERATE_CPP SRCS HDRS PROTOFILES)
|
||||
# argument parsing
|
||||
IF(NOT PROTOFILES)
|
||||
MESSAGE(SEND_ERROR "Error: PROTOBUF_GENERATE_CPP() called without any proto files")
|
||||
RETURN()
|
||||
ENDIF()
|
||||
|
||||
SET(OUTPATH ${CMAKE_CURRENT_BINARY_DIR})
|
||||
SET(PROTOROOT ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
# the real logic
|
||||
SET(${SRCS})
|
||||
SET(${HDRS})
|
||||
FOREACH(PROTOFILE ${PROTOFILES})
|
||||
# ensure that the file ends with .proto
|
||||
STRING(REGEX MATCH "\\.proto$$" PROTOEND ${PROTOFILE})
|
||||
IF(NOT PROTOEND)
|
||||
MESSAGE(SEND_ERROR "Proto file '${PROTOFILE}' does not end with .proto")
|
||||
ENDIF()
|
||||
|
||||
GET_FILENAME_COMPONENT(PROTO_PATH ${PROTOFILE} PATH)
|
||||
GET_FILENAME_COMPONENT(ABS_FILE ${PROTOFILE} ABSOLUTE)
|
||||
GET_FILENAME_COMPONENT(FILE_WE ${PROTOFILE} NAME_WE)
|
||||
|
||||
STRING(REGEX MATCH "^${PROTOROOT}" IN_ROOT_PATH ${PROTOFILE})
|
||||
STRING(REGEX MATCH "^${PROTOROOT}" IN_ROOT_ABS_FILE ${ABS_FILE})
|
||||
|
||||
IF(IN_ROOT_PATH)
|
||||
SET(MATCH_PATH ${PROTOFILE})
|
||||
ELSEIF(IN_ROOT_ABS_FILE)
|
||||
SET(MATCH_PATH ${ABS_FILE})
|
||||
ELSE()
|
||||
MESSAGE(SEND_ERROR "Proto file '${PROTOFILE}' is not in protoroot '${PROTOROOT}'")
|
||||
ENDIF()
|
||||
|
||||
# build the result file name
|
||||
STRING(REGEX REPLACE "^${PROTOROOT}(/?)" "" ROOT_CLEANED_FILE ${MATCH_PATH})
|
||||
STRING(REGEX REPLACE "\\.proto$$" "" EXT_CLEANED_FILE ${ROOT_CLEANED_FILE})
|
||||
|
||||
SET(CPP_FILE "${OUTPATH}/${EXT_CLEANED_FILE}.pb.cc")
|
||||
SET(H_FILE "${OUTPATH}/${EXT_CLEANED_FILE}.pb.h")
|
||||
|
||||
LIST(APPEND ${SRCS} "${CPP_FILE}")
|
||||
LIST(APPEND ${HDRS} "${H_FILE}")
|
||||
|
||||
ADD_CUSTOM_COMMAND(
|
||||
OUTPUT "${CPP_FILE}" "${H_FILE}"
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory ${OUTPATH}
|
||||
COMMAND ${PROTOBUF_PROTOC_EXECUTABLE}
|
||||
ARGS "--cpp_out=${OUTPATH}" --proto_path "${PROTOROOT}" "${MATCH_PATH}"
|
||||
DEPENDS ${ABS_FILE}
|
||||
COMMENT "Running C++ protocol buffer compiler on ${MATCH_PATH} with root ${PROTOROOT}, generating: ${CPP_FILE}"
|
||||
VERBATIM)
|
||||
|
||||
ENDFOREACH()
|
||||
|
||||
SET_SOURCE_FILES_PROPERTIES(${${SRCS}} ${${HDRS}} PROPERTIES GENERATED TRUE)
|
||||
SET(${SRCS} ${${SRCS}} PARENT_SCOPE)
|
||||
SET(${HDRS} ${${HDRS}} PARENT_SCOPE)
|
||||
|
||||
ENDFUNCTION()
|
||||
|
||||
set(PROTOBUF_PROTOC_EXECUTABLE Protoc) # must be on path
|
||||
else()
|
||||
if (static)
|
||||
set(tmp CMAKE_FIND_LIBRARY_SUFFIXES)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES .a)
|
||||
endif()
|
||||
|
||||
find_package(Protobuf REQUIRED)
|
||||
|
||||
if (static)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES tmp)
|
||||
endif()
|
||||
|
||||
if (is_clang AND DEFINED ENV{CLANG_PROTOBUF_ROOT})
|
||||
link_directories($ENV{CLANG_PROTOBUF_ROOT}/src/.libs)
|
||||
include_directories($ENV{CLANG_PROTOBUF_ROOT}/src)
|
||||
else()
|
||||
include_directories(${PROTOBUF_INCLUDE_DIRS})
|
||||
endif()
|
||||
endif()
|
||||
include_directories(${CMAKE_CURRENT_BINARY_DIR})
|
||||
|
||||
file(GLOB ripple_proto src/ripple/proto/*.proto)
|
||||
PROTOBUF_GENERATE_CPP(PROTO_SRCS PROTO_HDRS ${ripple_proto})
|
||||
|
||||
if (WIN32)
|
||||
include_directories(src/protobuf/src
|
||||
src/protobuf/vsprojects
|
||||
${CMAKE_CURRENT_BINARY_DIR}/src/ripple/proto)
|
||||
endif()
|
||||
|
||||
endmacro()
|
||||
|
||||
############################################################
|
||||
|
||||
macro(setup_build_boilerplate)
|
||||
if (NOT WIN32 AND san)
|
||||
add_compile_options(-fsanitize=${san} -fno-omit-frame-pointer)
|
||||
|
||||
append_flags(CMAKE_EXE_LINKER_FLAGS
|
||||
-fsanitize=${san})
|
||||
|
||||
string(TOLOWER ${san} ci_san)
|
||||
if (${ci_san} STREQUAL address)
|
||||
set(SANITIZER_LIBRARIES asan)
|
||||
add_definitions(-DSANITIZER=ASAN)
|
||||
endif()
|
||||
if (${ci_san} STREQUAL thread)
|
||||
set(SANITIZER_LIBRARIES tsan)
|
||||
add_definitions(-DSANITIZER=TSAN)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
############################################################
|
||||
|
||||
add_definitions(
|
||||
-DOPENSSL_NO_SSL2
|
||||
-DDEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER
|
||||
-DHAVE_USLEEP=1
|
||||
-DSOCI_CXX_C11=1
|
||||
-D_SILENCE_STDEXT_HASH_DEPRECATION_WARNINGS
|
||||
-DBOOST_NO_AUTO_PTR
|
||||
)
|
||||
|
||||
if (is_gcc)
|
||||
add_compile_options(-Wno-unused-but-set-variable -Wno-deprecated)
|
||||
endif()
|
||||
|
||||
# Generator expressions are not supported in add_definitions, use set_property instead
|
||||
set_property(
|
||||
DIRECTORY
|
||||
APPEND
|
||||
PROPERTY COMPILE_DEFINITIONS
|
||||
$<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:DEBUG _DEBUG>)
|
||||
|
||||
if (NOT assert)
|
||||
set_property(
|
||||
DIRECTORY
|
||||
APPEND
|
||||
PROPERTY COMPILE_DEFINITIONS
|
||||
$<$<OR:$<BOOL:${profile}>,$<CONFIG:Release>,$<CONFIG:ReleaseClassic>>:NDEBUG>)
|
||||
endif()
|
||||
|
||||
if (NOT WIN32)
|
||||
add_definitions(-D_FILE_OFFSET_BITS=64)
|
||||
append_flags(CMAKE_CXX_FLAGS -frtti -std=c++14 -Wno-invalid-offsetof
|
||||
-DBOOST_COROUTINE_NO_DEPRECATION_WARNING -DBOOST_COROUTINES_NO_DEPRECATION_WARNING)
|
||||
add_compile_options(-Wall -Wno-sign-compare -Wno-char-subscripts -Wno-format
|
||||
-Wno-unused-local-typedefs -g)
|
||||
# There seems to be an issue using generator experssions with multiple values,
|
||||
# split the expression
|
||||
add_compile_options($<$<OR:$<CONFIG:Release>,$<CONFIG:ReleaseClassic>>:-O3>)
|
||||
add_compile_options($<$<OR:$<CONFIG:Release>,$<CONFIG:ReleaseClassic>>:-fno-strict-aliasing>)
|
||||
append_flags(CMAKE_EXE_LINKER_FLAGS -rdynamic -g)
|
||||
|
||||
if (is_clang)
|
||||
add_compile_options(
|
||||
-Wno-redeclared-class-member -Wno-mismatched-tags -Wno-deprecated-register)
|
||||
add_definitions(-DBOOST_ASIO_HAS_STD_ARRAY)
|
||||
endif()
|
||||
|
||||
if (APPLE)
|
||||
add_definitions(-DBEAST_COMPILE_OBJECTIVE_CPP=1
|
||||
-DNO_LOG_UNHANDLED_EXCEPTIONS)
|
||||
add_compile_options(
|
||||
-Wno-deprecated -Wno-deprecated-declarations -Wno-unused-variable -Wno-unused-function)
|
||||
endif()
|
||||
|
||||
if (is_gcc)
|
||||
add_compile_options(-Wno-unused-but-set-variable -Wno-unused-local-typedefs)
|
||||
add_compile_options($<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:-O0>)
|
||||
endif (is_gcc)
|
||||
else(NOT WIN32)
|
||||
add_compile_options(
|
||||
/bigobj # Increase object file max size
|
||||
/EHa # ExceptionHandling all
|
||||
/fp:precise # Floating point behavior
|
||||
/Gd # __cdecl calling convention
|
||||
/Gm- # Minimal rebuild: disabled
|
||||
/GR # Enable RTTI
|
||||
/Gy- # Function level linking: disabled
|
||||
/FS
|
||||
/MP # Multiprocessor compilation
|
||||
/openmp- # pragma omp: disabled
|
||||
/Zc:forScope # Language extension: for scope
|
||||
/Zi # Generate complete debug info
|
||||
/errorReport:none # No error reporting to Internet
|
||||
/nologo # Suppress login banner
|
||||
/W3 # Warning level 3
|
||||
/WX- # Disable warnings as errors
|
||||
/wd"4018"
|
||||
/wd"4244"
|
||||
/wd"4267"
|
||||
/wd"4800" # Disable C4800(int to bool performance)
|
||||
/wd"4503" # Decorated name length exceeded, name was truncated
|
||||
)
|
||||
add_definitions(
|
||||
-D_WIN32_WINNT=0x6000
|
||||
-D_SCL_SECURE_NO_WARNINGS
|
||||
-D_CRT_SECURE_NO_WARNINGS
|
||||
-DWIN32_CONSOLE
|
||||
-DNOMINMAX
|
||||
-DBOOST_COROUTINE_NO_DEPRECATION_WARNING
|
||||
-DBOOST_COROUTINES_NO_DEPRECATION_WARNING)
|
||||
append_flags(CMAKE_EXE_LINKER_FLAGS
|
||||
/DEBUG
|
||||
/DYNAMICBASE
|
||||
/ERRORREPORT:NONE
|
||||
/MACHINE:X64
|
||||
/MANIFEST
|
||||
/nologo
|
||||
/NXCOMPAT
|
||||
/SUBSYSTEM:CONSOLE
|
||||
/TLBID:1)
|
||||
|
||||
|
||||
# There seems to be an issue using generator experssions with multiple values,
|
||||
# split the expression
|
||||
# /GS Buffers security check: enable
|
||||
add_compile_options($<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:/GS>)
|
||||
# /MTd Language: Multi-threaded Debug CRT
|
||||
add_compile_options($<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:/MTd>)
|
||||
# /Od Optimization: Disabled
|
||||
add_compile_options($<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:/Od>)
|
||||
# /RTC1 Run-time error checks:
|
||||
add_compile_options($<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:/RTC1>)
|
||||
|
||||
# Generator expressions are not supported in add_definitions, use set_property instead
|
||||
set_property(
|
||||
DIRECTORY
|
||||
APPEND
|
||||
PROPERTY COMPILE_DEFINITIONS
|
||||
$<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:_CRTDBG_MAP_ALLOC>)
|
||||
|
||||
# /MT Language: Multi-threaded CRT
|
||||
add_compile_options($<$<OR:$<CONFIG:Release>,$<CONFIG:ReleaseClassic>>:/MT>)
|
||||
add_compile_options($<$<OR:$<CONFIG:Release>,$<CONFIG:ReleaseClassic>>:/Ox>)
|
||||
# /Ox Optimization: Full
|
||||
|
||||
endif (NOT WIN32)
|
||||
|
||||
if (static)
|
||||
append_flags(CMAKE_EXE_LINKER_FLAGS -static-libstdc++)
|
||||
# set_target_properties(ripple-libpp PROPERTIES LINK_SEARCH_START_STATIC 1)
|
||||
# set_target_properties(ripple-libpp PROPERTIES LINK_SEARCH_END_STATIC 1)
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
############################################################
|
||||
|
||||
macro(create_build_folder cur_project)
|
||||
if (NOT WIN32)
|
||||
ADD_CUSTOM_TARGET(build_folder ALL
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}
|
||||
COMMENT "Creating build output folder")
|
||||
add_dependencies(${cur_project} build_folder)
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
macro(set_startup_project cur_project)
|
||||
if (WIN32 AND NOT ci)
|
||||
if (CMAKE_VERSION VERSION_LESS 3.6)
|
||||
message(WARNING
|
||||
"Setting the VS startup project requires cmake 3.6 or later. Please upgrade.")
|
||||
endif()
|
||||
set_property(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY
|
||||
VS_STARTUP_PROJECT ${cur_project})
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
macro(link_common_libraries cur_project)
|
||||
if (NOT MSVC)
|
||||
target_link_libraries(${cur_project} ${Boost_LIBRARIES})
|
||||
target_link_libraries(${cur_project} dl)
|
||||
target_link_libraries(${cur_project} Threads::Threads)
|
||||
if (APPLE)
|
||||
find_library(app_kit AppKit)
|
||||
find_library(foundation Foundation)
|
||||
target_link_libraries(${cur_project}
|
||||
crypto ssl ${app_kit} ${foundation})
|
||||
else()
|
||||
target_link_libraries(${cur_project} rt)
|
||||
endif()
|
||||
else(NOT MSVC)
|
||||
target_link_libraries(${cur_project}
|
||||
$<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:VC/static/ssleay32MTd>
|
||||
$<$<OR:$<CONFIG:Debug>,$<CONFIG:DebugClassic>>:VC/static/libeay32MTd>)
|
||||
target_link_libraries(${cur_project}
|
||||
$<$<OR:$<CONFIG:Release>,$<CONFIG:ReleaseClassic>>:VC/static/ssleay32MT>
|
||||
$<$<OR:$<CONFIG:Release>,$<CONFIG:ReleaseClassic>>:VC/static/libeay32MT>)
|
||||
target_link_libraries(${cur_project}
|
||||
legacy_stdio_definitions.lib Shlwapi kernel32 user32 gdi32 winspool comdlg32
|
||||
advapi32 shell32 ole32 oleaut32 uuid odbc32 odbccp32)
|
||||
endif (NOT MSVC)
|
||||
endmacro()
|
||||
30
Builds/Docker/Dockerfile
Normal file
@@ -0,0 +1,30 @@
|
||||
# rippled
|
||||
|
||||
# use the ubuntu base image
|
||||
FROM ubuntu
|
||||
MAINTAINER Roberto Catini roberto.catini@gmail.com
|
||||
|
||||
# make sure the package repository is up to date
|
||||
RUN apt-get update
|
||||
RUN apt-get -y upgrade
|
||||
|
||||
# install the dependencies
|
||||
RUN apt-get -y install git scons pkg-config protobuf-compiler libprotobuf-dev libssl-dev libboost1.55-all-dev
|
||||
|
||||
# download source code from official repository
|
||||
RUN git clone https://github.com/ripple/rippled.git src; cd src/; git checkout master
|
||||
|
||||
# compile
|
||||
RUN cd src/; scons build/rippled
|
||||
|
||||
# move to root directory and strip
|
||||
RUN cp src/build/rippled rippled; strip rippled
|
||||
|
||||
# copy default config
|
||||
RUN cp src/doc/rippled-example.cfg rippled.cfg
|
||||
|
||||
# clean source
|
||||
RUN rm -r src
|
||||
|
||||
# launch rippled when launching the container
|
||||
ENTRYPOINT ./rippled
|
||||
23
Builds/Docker/Dockerfile-testnet
Normal file
@@ -0,0 +1,23 @@
|
||||
FROM ubuntu
|
||||
MAINTAINER Torrie Fischer <torrie@ripple.com>
|
||||
|
||||
RUN apt-get update -qq &&\
|
||||
apt-get install -qq software-properties-common &&\
|
||||
apt-add-repository -y ppa:ubuntu-toolchain-r/test &&\
|
||||
apt-add-repository -y ppa:afrank/boost &&\
|
||||
apt-get update -qq
|
||||
|
||||
RUN apt-get purge -qq libboost1.48-dev &&\
|
||||
apt-get install -qq libprotobuf8 libboost1.57-all-dev
|
||||
|
||||
RUN mkdir -p /srv/rippled/data
|
||||
|
||||
VOLUME /srv/rippled/data/
|
||||
|
||||
ENTRYPOINT ["/srv/rippled/bin/rippled"]
|
||||
CMD ["--conf", "/srv/rippled/data/rippled.cfg"]
|
||||
EXPOSE 51235/udp
|
||||
EXPOSE 5005/tcp
|
||||
|
||||
ADD ./rippled.cfg /srv/rippled/data/rippled.cfg
|
||||
ADD ./rippled /srv/rippled/bin/
|
||||
13
Builds/Docker/build-ci.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
set -e
|
||||
|
||||
mkdir -p build/docker/
|
||||
cp doc/rippled-example.cfg build/clang.debug/rippled build/docker/
|
||||
cp Builds/Docker/Dockerfile-testnet build/docker/Dockerfile
|
||||
mv build/docker/rippled-example.cfg build/docker/rippled.cfg
|
||||
strip build/docker/rippled
|
||||
docker build -t ripple/rippled:$CIRCLE_SHA1 build/docker/
|
||||
docker tag ripple/rippled:$CIRCLE_SHA1 ripple/rippled:latest
|
||||
|
||||
if [ -z "$CIRCLE_PR_NUMBER" ]; then
|
||||
docker tag ripple/rippled:$CIRCLE_SHA1 ripple/rippled:$CIRCLE_BRANCH
|
||||
fi
|
||||
16
Builds/Docker/push-to-hub.sh
Executable file
@@ -0,0 +1,16 @@
|
||||
set -e
|
||||
|
||||
if [ -z "$DOCKER_EMAIL" -o -z "$DOCKER_USERNAME" -o -z "$DOCKER_PASSWORD" ];then
|
||||
echo "Docker credentials are not set. Can't login to docker, no containers will be pushed."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ -n "$CIRCLE_PR_NUMBER" ]; then
|
||||
echo "Not pushing results of a pull request build."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
docker login -e $DOCKER_EMAIL -u $DOCKER_USERNAME -p $DOCKER_PASSWORD
|
||||
docker push ripple/rippled:$CIRCLE_SHA1
|
||||
docker push ripple/rippled:$CIRCLE_BRANCH
|
||||
docker push ripple/rippled:latest
|
||||
31
Builds/Eclipse/README.md
Normal file
@@ -0,0 +1,31 @@
|
||||
**Requirements**
|
||||
|
||||
1. Java Runtime Environment (JRE)
|
||||
2. Eclipse with CDT (tested on Luna):
|
||||
http://www.eclipse.org/downloads/packages/eclipse-ide-cc-developers/lunasr2
|
||||
3. Eclipse SCons plugin: http://sconsolidator.com/
|
||||
**WARNING**: by default the SCons plugin uses 16 threads. Go to
|
||||
*Window->Preferences->SCons->Build Settings* in Eclipse and make it
|
||||
use only 4-8 jobs(threads) or whatever you feel confortable with. It will
|
||||
positively freeze your system if you run with 16 threads/jobs.
|
||||
|
||||

|
||||
|
||||
**Getting Started**
|
||||
|
||||
After setting up Eclipse just do a File->New->Other...
|
||||
Select: C/C++ / New SCons project from existing source
|
||||
Point the importer to the folder where the SConstruct resides (the root
|
||||
folder of your git workspace normally)
|
||||
|
||||
**Build**
|
||||
|
||||
Just hit Project->Build All in Eclipse to get started. And remember to not
|
||||
let it run 16 threads!
|
||||
|
||||
**Debug**
|
||||
|
||||
Start a new Eclipse debug configuration and set binary to run to build/rippled
|
||||
(assuming you have built it).
|
||||
|
||||

|
||||
BIN
Builds/Eclipse/debug.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
Builds/Eclipse/scons.png
Normal file
|
After Width: | Height: | Size: 17 KiB |
22
Builds/Fedora/install_rippled_depends_fedora.sh
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
#
|
||||
# This scripts installs the dependencies needed by rippled. It should be run
|
||||
# with sudo.
|
||||
#
|
||||
|
||||
if [ ! -f /etc/fedora-release ]; then
|
||||
echo "This script is meant to be run on fedora"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
fedora_release=$(grep -o '[0-9]*' /etc/fedora-release)
|
||||
|
||||
if (( $(bc <<< "${fedora_release} < 22") )); then
|
||||
echo "This script is meant to run on fedora 22 or greater"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
yum -y update
|
||||
yum -y group install "Development Tools"
|
||||
yum -y install gcc-c++ scons openssl-devel openssl-static protobuf-devel protobuf-static boost-devel boost-static libstdc++-static
|
||||
5
Builds/QtCreator/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# QTCreator
|
||||
|
||||
Makefile
|
||||
*.user
|
||||
|
||||
112
Builds/QtCreator/rippled.pro
Normal file
@@ -0,0 +1,112 @@
|
||||
|
||||
# Ripple protocol buffers
|
||||
|
||||
PROTOS = ../../src/ripple_data/protocol/ripple.proto
|
||||
PROTOS_DIR = ../../build/proto
|
||||
|
||||
# Google Protocol Buffers support
|
||||
|
||||
protobuf_h.name = protobuf header
|
||||
protobuf_h.input = PROTOS
|
||||
protobuf_h.output = $${PROTOS_DIR}/${QMAKE_FILE_BASE}.pb.h
|
||||
protobuf_h.depends = ${QMAKE_FILE_NAME}
|
||||
protobuf_h.commands = protoc --cpp_out=$${PROTOS_DIR} --proto_path=${QMAKE_FILE_PATH} ${QMAKE_FILE_NAME}
|
||||
protobuf_h.variable_out = HEADERS
|
||||
QMAKE_EXTRA_COMPILERS += protobuf_h
|
||||
|
||||
protobuf_cc.name = protobuf implementation
|
||||
protobuf_cc.input = PROTOS
|
||||
protobuf_cc.output = $${PROTOS_DIR}/${QMAKE_FILE_BASE}.pb.cc
|
||||
protobuf_cc.depends = $${PROTOS_DIR}/${QMAKE_FILE_BASE}.pb.h
|
||||
protobuf_cc.commands = $$escape_expand(\\n)
|
||||
#protobuf_cc.variable_out = SOURCES
|
||||
QMAKE_EXTRA_COMPILERS += protobuf_cc
|
||||
|
||||
# Ripple compilation
|
||||
|
||||
DESTDIR = ../../build/QtCreator
|
||||
OBJECTS_DIR = ../../build/QtCreator/obj
|
||||
|
||||
TEMPLATE = app
|
||||
CONFIG += console thread warn_off
|
||||
CONFIG -= qt gui
|
||||
|
||||
DEFINES += _DEBUG
|
||||
|
||||
linux-g++:QMAKE_CXXFLAGS += \
|
||||
-Wall \
|
||||
-Wno-sign-compare \
|
||||
-Wno-char-subscripts \
|
||||
-Wno-invalid-offsetof \
|
||||
-Wno-unused-parameter \
|
||||
-Wformat \
|
||||
-O0 \
|
||||
-std=c++11 \
|
||||
-pthread
|
||||
|
||||
INCLUDEPATH += \
|
||||
"../../src/leveldb/" \
|
||||
"../../src/leveldb/port" \
|
||||
"../../src/leveldb/include" \
|
||||
$${PROTOS_DIR}
|
||||
|
||||
OTHER_FILES += \
|
||||
# $$files(../../src/*, true) \
|
||||
# $$files(../../src/beast/*) \
|
||||
# $$files(../../src/beast/modules/beast_basics/diagnostic/*)
|
||||
# $$files(../../src/beast/modules/beast_core/, true)
|
||||
|
||||
UI_HEADERS_DIR += ../../src/ripple_basics
|
||||
|
||||
# ---------
|
||||
# New style
|
||||
#
|
||||
SOURCES += \
|
||||
../../src/ripple/beast/ripple_beast.unity.cpp \
|
||||
../../src/ripple/beast/ripple_beastc.c \
|
||||
../../src/ripple/common/ripple_common.unity.cpp \
|
||||
../../src/ripple/http/ripple_http.unity.cpp \
|
||||
../../src/ripple/json/ripple_json.unity.cpp \
|
||||
../../src/ripple/peerfinder/ripple_peerfinder.unity.cpp \
|
||||
../../src/ripple/radmap/ripple_radmap.unity.cpp \
|
||||
../../src/ripple/resource/ripple_resource.unity.cpp \
|
||||
../../src/ripple/sitefiles/ripple_sitefiles.unity.cpp \
|
||||
../../src/ripple/sslutil/ripple_sslutil.unity.cpp \
|
||||
../../src/ripple/testoverlay/ripple_testoverlay.unity.cpp \
|
||||
../../src/ripple/types/ripple_types.unity.cpp \
|
||||
../../src/ripple/validators/ripple_validators.unity.cpp
|
||||
|
||||
# ---------
|
||||
# Old style
|
||||
#
|
||||
SOURCES += \
|
||||
../../src/ripple_app/ripple_app.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt1.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt2.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt3.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt4.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt5.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt6.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt7.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt8.unity.cpp \
|
||||
../../src/ripple_basics/ripple_basics.unity.cpp \
|
||||
../../src/ripple_core/ripple_core.unity.cpp \
|
||||
../../src/ripple_data/ripple_data.unity.cpp \
|
||||
../../src/ripple_hyperleveldb/ripple_hyperleveldb.unity.cpp \
|
||||
../../src/ripple_leveldb/ripple_leveldb.unity.cpp \
|
||||
../../src/ripple_net/ripple_net.unity.cpp \
|
||||
../../src/ripple_overlay/ripple_overlay.unity.cpp \
|
||||
../../src/ripple_rpc/ripple_rpc.unity.cpp \
|
||||
../../src/ripple_websocket/ripple_websocket.unity.cpp
|
||||
|
||||
LIBS += \
|
||||
-lboost_date_time-mt\
|
||||
-lboost_filesystem-mt \
|
||||
-lboost_program_options-mt \
|
||||
-lboost_regex-mt \
|
||||
-lboost_system-mt \
|
||||
-lboost_thread-mt \
|
||||
-lboost_random-mt \
|
||||
-lprotobuf \
|
||||
-lssl \
|
||||
-lrt
|
||||
266
Builds/Test.py
Executable file
@@ -0,0 +1,266 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of rippled: https://github.com/ripple/rippled
|
||||
# Copyright (c) 2012 - 2015 Ripple Labs Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and/or distribute this software for any
|
||||
# purpose with or without fee is hereby granted, provided that the above
|
||||
# copyright notice and this permission notice appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
# ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""
|
||||
Invocation:
|
||||
|
||||
./Builds/Test.py - builds and tests all configurations
|
||||
|
||||
The build must succeed without shell aliases for this to work.
|
||||
|
||||
To pass flags to scons, put them at the very end of the command line, after
|
||||
the -- flag - like this:
|
||||
|
||||
./Builds/Test.py -- -j4 # Pass -j4 to scons.
|
||||
|
||||
|
||||
Common problems:
|
||||
|
||||
1) Boost not found. Solution: export BOOST_ROOT=[path to boost folder]
|
||||
|
||||
2) OpenSSL not found. Solution: export OPENSSL_ROOT=[path to OpenSSL folder]
|
||||
|
||||
3) scons is an alias. Solution: Create a script named "scons" somewhere in
|
||||
your $PATH (eg. ~/bin/scons will often work).
|
||||
|
||||
#!/bin/sh
|
||||
python /C/Python27/Scripts/scons.py "${@}"
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import itertools
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def powerset(iterable):
|
||||
"""powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"""
|
||||
s = list(iterable)
|
||||
return itertools.chain.from_iterable(itertools.combinations(s, r) for r in range(len(s) + 1))
|
||||
|
||||
IS_WINDOWS = platform.system().lower() == 'windows'
|
||||
IS_OS_X = platform.system().lower() == 'darwin'
|
||||
|
||||
if IS_WINDOWS or IS_OS_X:
|
||||
ALL_TARGETS = [('debug',), ('release',)]
|
||||
else:
|
||||
ALL_TARGETS = [(cc + "." + target,)
|
||||
for cc in ['gcc', 'clang']
|
||||
for target in ['debug', 'release', 'coverage', 'profile',
|
||||
'debug.nounity', 'release.nounity', 'coverage.nounity', 'profile.nounity']]
|
||||
|
||||
# list of tuples of all possible options
|
||||
if IS_WINDOWS or IS_OS_X:
|
||||
ALL_OPTIONS = [tuple(x) for x in powerset(['--assert'])]
|
||||
else:
|
||||
ALL_OPTIONS = list(set(
|
||||
[tuple(x) for x in powerset(['--ninja', '--static', '--assert', '--sanitize=address'])] +
|
||||
[tuple(x) for x in powerset(['--ninja', '--static', '--assert', '--sanitize=thread'])]))
|
||||
|
||||
# list of tuples of all possible options + all possible targets
|
||||
ALL_BUILDS = [options + target
|
||||
for target in ALL_TARGETS
|
||||
for options in ALL_OPTIONS]
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Test.py - run ripple tests'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--all', '-a',
|
||||
action='store_true',
|
||||
help='Build all configurations.',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--keep_going', '-k',
|
||||
action='store_true',
|
||||
help='Keep going after one configuration has failed.',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--silent', '-s',
|
||||
action='store_true',
|
||||
help='Silence all messages except errors',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--verbose', '-v',
|
||||
action='store_true',
|
||||
help=('Report more information about which commands are executed and the '
|
||||
'results.'),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--test', '-t',
|
||||
default='',
|
||||
help='Add a prefix for unit tests',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--clean', '-c',
|
||||
action='store_true',
|
||||
help='delete all build artifacts after testing',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'scons_args',
|
||||
default=(),
|
||||
nargs='*'
|
||||
)
|
||||
|
||||
ARGS = parser.parse_args()
|
||||
|
||||
|
||||
def shell(cmd, args=(), silent=False):
|
||||
""""Execute a shell command and return the output."""
|
||||
silent = ARGS.silent or silent
|
||||
verbose = not silent and ARGS.verbose
|
||||
if verbose:
|
||||
print('$' + cmd, *args)
|
||||
|
||||
command = (cmd,) + args
|
||||
|
||||
process = subprocess.Popen(
|
||||
command,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
shell=IS_WINDOWS)
|
||||
lines = []
|
||||
count = 0
|
||||
for line in process.stdout:
|
||||
# Python 2 vs. Python 3
|
||||
if isinstance(line, str):
|
||||
decoded = line
|
||||
else:
|
||||
decoded = line.decode()
|
||||
lines.append(decoded)
|
||||
if verbose:
|
||||
print(decoded, end='')
|
||||
elif not silent:
|
||||
count += 1
|
||||
if count >= 80:
|
||||
print()
|
||||
count = 0
|
||||
else:
|
||||
print('.', end='')
|
||||
|
||||
if not verbose and count:
|
||||
print()
|
||||
process.wait()
|
||||
return process.returncode, lines
|
||||
|
||||
|
||||
def run_tests(args):
|
||||
failed = []
|
||||
if IS_WINDOWS:
|
||||
binary_re = re.compile(r'build\\([^\\]+)\\rippled.exe')
|
||||
else:
|
||||
binary_re = re.compile(r'build/([^/]+)/rippled')
|
||||
_, lines = shell('scons', ('-n', '--tree=derived',) + args, silent=True)
|
||||
for line in lines:
|
||||
match = binary_re.search(line)
|
||||
if match:
|
||||
executable, target = match.group(0, 1)
|
||||
|
||||
print('Unit tests for', target)
|
||||
testflag = '--unittest'
|
||||
if ARGS.test:
|
||||
testflag += ('=' + ARGS.test)
|
||||
resultcode, lines = shell(executable, (testflag,))
|
||||
|
||||
if resultcode:
|
||||
if not ARGS.verbose:
|
||||
print('ERROR:', *lines, sep='')
|
||||
failed.append([target, 'unittest'])
|
||||
if not ARGS.keep_going:
|
||||
break
|
||||
|
||||
return failed
|
||||
|
||||
|
||||
def run_build(args=None):
|
||||
print('Building:', *args or ('(default)',))
|
||||
resultcode, lines = shell('scons', args)
|
||||
|
||||
if resultcode:
|
||||
print('Build FAILED:')
|
||||
if not ARGS.verbose:
|
||||
print(*lines, sep='')
|
||||
sys.exit(1)
|
||||
if '--ninja' in args:
|
||||
resultcode, lines = shell('ninja')
|
||||
|
||||
if resultcode:
|
||||
print('Ninja build FAILED:')
|
||||
if not ARGS.verbose:
|
||||
print(*lines, sep='')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main():
|
||||
if ARGS.all:
|
||||
to_build = ALL_BUILDS
|
||||
else:
|
||||
to_build = [tuple(ARGS.scons_args)]
|
||||
|
||||
all_failed = []
|
||||
|
||||
for build in to_build:
|
||||
args = ()
|
||||
# additional arguments come first
|
||||
for arg in list(ARGS.scons_args):
|
||||
if arg not in build:
|
||||
args += (arg,)
|
||||
args += build
|
||||
|
||||
run_build(args)
|
||||
failed = run_tests(args)
|
||||
|
||||
if failed:
|
||||
print('FAILED:', *(':'.join(f) for f in failed))
|
||||
if not ARGS.keep_going:
|
||||
sys.exit(1)
|
||||
else:
|
||||
all_failed.extend([','.join(build), ':'.join(f)]
|
||||
for f in failed)
|
||||
else:
|
||||
print('Success')
|
||||
|
||||
if ARGS.clean:
|
||||
shutil.rmtree('build')
|
||||
if '--ninja' in args:
|
||||
os.remove('build.ninja')
|
||||
os.remove('.ninja_deps')
|
||||
os.remove('.ninja_log')
|
||||
|
||||
if all_failed:
|
||||
if len(to_build) > 1:
|
||||
print()
|
||||
print('FAILED:', *(':'.join(f) for f in all_failed))
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
sys.exit(0)
|
||||
82
Builds/Ubuntu/build_clang_libs.sh
Executable file
@@ -0,0 +1,82 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
#
|
||||
# This scripts installs boost and protobuf built with clang. This is needed on
|
||||
# ubuntu 15.10 when building with clang
|
||||
# It will build these in a 'clang' subdirectory that it creates below the directory
|
||||
# this script is run from. If a clang directory already exists the script will refuse
|
||||
# to run.
|
||||
|
||||
if hash lsb_release 2>/dev/null; then
|
||||
if [ $(lsb_release -si) == "Ubuntu" ]; then
|
||||
ubuntu_release=$(lsb_release -sr)
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "${ubuntu_release}" ]; then
|
||||
echo "System not supported"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! hash clang 2>/dev/null; then
|
||||
clang_version=3.7
|
||||
if [ ${ubuntu_release} == "16.04" ]; then
|
||||
clang_version=3.8
|
||||
fi
|
||||
sudo apt-get -y install clang-${clang_version}
|
||||
update-alternatives --install /usr/bin/clang clang /usr/bin/clang-${clang_version} 99 clang++
|
||||
hash -r
|
||||
if ! hash clang 2>/dev/null; then
|
||||
echo "Please install clang"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ${ubuntu_release} != "16.04" ] && [ ${ubuntu_release} != "15.10" ]; then
|
||||
echo "clang specific boost and protobuf not needed"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ -d clang ]; then
|
||||
echo "clang directory already exists. Cowardly refusing to run"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! hash wget 2>/dev/null; then
|
||||
sudo apt-get -y install wget
|
||||
hash -r
|
||||
if ! hash wget 2>/dev/null; then
|
||||
echo "Please install wget"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
num_procs=$(lscpu -p | grep -v '^#' | sort -u -t, -k 2,4 | wc -l) # pysical cores
|
||||
|
||||
mkdir clang
|
||||
pushd clang > /dev/null
|
||||
|
||||
# Install protobuf
|
||||
pb=protobuf-2.6.1
|
||||
pb_tar=${pb}.tar.gz
|
||||
wget -O ${pb_tar} https://github.com/google/protobuf/releases/download/v2.6.1/${pb_tar}
|
||||
tar xf ${pb_tar}
|
||||
rm ${pb_tar}
|
||||
pushd ${pb} > /dev/null
|
||||
./configure CC=clang CXX=clang++ CXXFLAGS='-std=c++14 -O3 -g'
|
||||
make -j${num_procs}
|
||||
popd > /dev/null
|
||||
|
||||
# Install boost
|
||||
boost_ver=1.60.0
|
||||
bd=boost_${boost_ver//./_}
|
||||
bd_tar=${bd}.tar.gz
|
||||
wget -O ${bd_tar} http://sourceforge.net/projects/boost/files/boost/${boost_ver}/${bd_tar}
|
||||
tar xf ${bd_tar}
|
||||
rm ${bd_tar}
|
||||
pushd ${bd} > /dev/null
|
||||
./bootstrap.sh
|
||||
./b2 toolset=clang -j${num_procs}
|
||||
popd > /dev/null
|
||||
|
||||
popd > /dev/null
|
||||
39
Builds/Ubuntu/install_boost.sh
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
#
|
||||
# This script builds boost with the correct ABI flags for ubuntu
|
||||
#
|
||||
|
||||
version=59
|
||||
patch=0
|
||||
|
||||
if hash lsb_release 2>/dev/null; then
|
||||
if [ $(lsb_release -si) == "Ubuntu" ]; then
|
||||
ubuntu_release=$(lsb_release -sr)
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "${ubuntu_release}" ]; then
|
||||
echo "System not supported"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
extra_defines=""
|
||||
if (( $(bc <<< "${ubuntu_release} < 15.1") )); then
|
||||
extra_defines="define=_GLIBCXX_USE_CXX11_ABI=0"
|
||||
fi
|
||||
num_procs=$(lscpu -p | grep -v '^#' | sort -u -t, -k 2,4 | wc -l) # pysical cores
|
||||
printf "\nBuild command will be: ./b2 -j${num_procs} ${extra_defines}\n\n"
|
||||
|
||||
boost_dir="boost_1_${version}_${patch}"
|
||||
boost_tag="boost-1.${version}.${patch}"
|
||||
git clone -b "${boost_tag}" --recursive https://github.com/boostorg/boost.git "${boost_dir}"
|
||||
|
||||
cd ${boost_dir}
|
||||
git checkout --force ${boost_tag}
|
||||
git submodule foreach git checkout --force ${boost_tag}
|
||||
./bootstrap.sh
|
||||
./b2 headers
|
||||
./b2 -j${num_procs} ${extra_defines}
|
||||
echo "Build command was: ./b2 -j${num_procs} ${extra_defines}"
|
||||
echo "Don't forget to set BOOST_ROOT!"
|
||||
55
Builds/Ubuntu/install_rippled_depends_ubuntu.sh
Executable file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
#
|
||||
# This scripts installs the dependencies needed by rippled. It should be run
|
||||
# with sudo. For ubuntu < 15.10, it installs gcc 5 as the default compiler. gcc
|
||||
# 5 is ABI incompatable with gcc 4. If needed, the following will switch back to
|
||||
# gcc-4: `sudo update-alternatives --config gcc` and choosing the gcc-4
|
||||
# option.
|
||||
#
|
||||
|
||||
if hash lsb_release 2>/dev/null; then
|
||||
if [ $(lsb_release -si) == "Ubuntu" ]; then
|
||||
ubuntu_release=$(lsb_release -sr)
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "${ubuntu_release}" ]; then
|
||||
echo "System not supported"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ${ubuntu_release} == "12.04" ]; then
|
||||
apt-get install python-software-properties
|
||||
add-apt-repository ppa:afrank/boost
|
||||
add-apt-repository ppa:ubuntu-toolchain-r/test
|
||||
apt-get update
|
||||
apt-get -y upgrade
|
||||
apt-get -y install curl git scons ctags pkg-config protobuf-compiler libprotobuf-dev libssl-dev python-software-properties boost1.57-all-dev g++-5 g++-4.9
|
||||
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 99 --slave /usr/bin/g++ g++ /usr/bin/g++-5
|
||||
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.9 99 --slave /usr/bin/g++ g++ /usr/bin/g++-4.9
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ ${ubuntu_release} == "14.04" ] || [ ${ubuntu_release} == "15.04" ]; then
|
||||
apt-get install python-software-properties
|
||||
echo "deb [arch=amd64] https://mirrors.ripple.com/ubuntu/ trusty stable contrib" | sudo tee /etc/apt/sources.list.d/ripple.list
|
||||
wget -O- -q https://mirrors.ripple.com/mirrors.ripple.com.gpg.key | sudo apt-key add -
|
||||
add-apt-repository ppa:ubuntu-toolchain-r/test
|
||||
apt-get update
|
||||
apt-get -y upgrade
|
||||
apt-get -y install curl git scons ctags pkg-config protobuf-compiler libprotobuf-dev libssl-dev python-software-properties boost-all-dev g++-5 g++-4.9
|
||||
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 99 --slave /usr/bin/g++ g++ /usr/bin/g++-5
|
||||
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.9 99 --slave /usr/bin/g++ g++ /usr/bin/g++-4.9
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ ${ubuntu_release} == "16.04" ] || [ ${ubuntu_release} == "15.10" ]; then
|
||||
apt-get update
|
||||
apt-get -y upgrade
|
||||
apt-get -y install python-software-properties curl git scons ctags pkg-config protobuf-compiler libprotobuf-dev libssl-dev python-software-properties libboost-all-dev
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "System not supported"
|
||||
exit 1
|
||||
4
Builds/VisualStudio2015/.gitattributes
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
RippleD.vcxproj -text
|
||||
RippleD.vcxproj.filters -text
|
||||
|
||||
|
||||
255
Builds/VisualStudio2015/README.md
Normal file
@@ -0,0 +1,255 @@
|
||||
# Visual Studio 2015 Build Instructions
|
||||
|
||||
## Important
|
||||
|
||||
We do not recommend Windows for rippled production use at this time. Currently, the Ubuntu
|
||||
platform has received the highest level of quality assurance, testing, and support.
|
||||
Additionally, 32-bit Windows versions are not supported.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
To clone the source code repository, create branches for inspection or modification,
|
||||
build rippled under Visual Studio, and run the unit tests you will need these
|
||||
software components:
|
||||
|
||||
* [Visual Studio 2015](README.md#install-visual-studio-2015)
|
||||
* [Git for Windows](README.md#install-git-for-windows)
|
||||
* [Google Protocol Buffers Compiler](README.md#install-google-protocol-buffers-compiler)
|
||||
* (Optional) [Python and Scons](README.md#optional-install-python-and-scons)
|
||||
* [OpenSSL Library](README.md#install-openssl)
|
||||
* [Boost library](README.md#build-boost)
|
||||
|
||||
## Install Software
|
||||
|
||||
### Install Visual Studio 2015
|
||||
|
||||
If not already installed on your system, download your choice of installer from the
|
||||
[Visual Studio 2015 Download](https://www.visualstudio.com/downloads/download-visual-studio-vs)
|
||||
page, run the installer, and follow the directions. You may need to choose a "Custom"
|
||||
installation and ensure that "Visual C++" is selected under "Programming Languages".
|
||||
|
||||
Any version of Visual Studio 2015 may be used to build rippled.
|
||||
The **Visual Studio 2015 Community** edition is available free of charge (see
|
||||
[the product page](https://www.visualstudio.com/products/visual-studio-community-vs)
|
||||
for licensing details), while paid editions may be used for an free initial trial period.
|
||||
|
||||
### Install Git for Windows
|
||||
|
||||
Git is a distributed revision control system. The Windows version also provides the
|
||||
bash shell and many Windows versions of Unix commands. While there are other
|
||||
varieties of Git (such as TortoiseGit, which has a native Windows interface and
|
||||
integrates with the Explorer shell), we recommend installing
|
||||
[Git for Windows](https://git-scm.com/) since
|
||||
it provides a Unix-like command line environment useful for running shell scripts.
|
||||
Use of the bash shell under Windows is mandatory for running the unit tests.
|
||||
|
||||
* NOTE: To gain full featured access to the
|
||||
[git-subtree](https://blogs.atlassian.com/2013/05/alternatives-to-git-submodule-git-subtree/)
|
||||
functionality used in the rippled repository we suggest Git version 2.6.2 or later.
|
||||
|
||||
### Install Google Protocol Buffers Compiler
|
||||
|
||||
Building rippled requires **protoc.exe** version 2.5.1 or later. At your option you
|
||||
may build it yourself from the sources in the
|
||||
[Google Protocol Buffers](https://github.com/google/protobuf) repository,
|
||||
or you may download a
|
||||
[protoc.exe](https://ripple.github.io/Downloads/protoc/2.5.1/protoc.exe)
|
||||
([alternate link](https://github.com/ripple/Downloads/raw/gh-pages/protoc/2.5.1/protoc.exe))
|
||||
precompiled Windows executable from the
|
||||
[Ripple Organization](https://github.com/ripple).
|
||||
|
||||
Either way, once you have the required version of **protoc.exe**, copy it into
|
||||
a folder in your command line `%PATH%`.
|
||||
|
||||
* **NOTE:** If you use an older version of the compiler, the build will
|
||||
fail with errors related to a mismatch of the version of protocol
|
||||
buffer headers versus the compiler.
|
||||
|
||||
### (Optional) Install Python and Scons
|
||||
|
||||
[Python](https://www.python.org/downloads/) and
|
||||
[Scons](http://scons.org/download.php) are not required to build
|
||||
rippled with Visual Studio, but can be used to build from the
|
||||
command line and in scripts, and are required to properly update
|
||||
the `RippleD.vcxproj` file.
|
||||
|
||||
If you wish to build with scons, a version after 2.3.5 is required
|
||||
for Visual Studio 2015 support.
|
||||
|
||||
## Configure Dependencies
|
||||
|
||||
### Install OpenSSL
|
||||
|
||||
[Download OpenSSL.](http://slproweb.com/products/Win32OpenSSL.html)
|
||||
There will be four variants available:
|
||||
|
||||
1. 64-bit. Use this if you are running 64-bit windows. As of this writing, the link is called: "Win64 OpenSSL v1.0.2j".
|
||||
2. 64-bit light - Don't use this. It is missing files needed to build rippled. As of this writing, the link is called: "Win64 OpenSSL v1.0.2j Light"
|
||||
|
||||
Run the installer, and choose an appropriate location for your OpenSSL
|
||||
installation. In this guide we use **C:\lib\OpenSSL-Win64** as the
|
||||
destination location.
|
||||
|
||||
You may be informed on running the installer that "Visual C++ 2008
|
||||
Redistributables" must first be installed first. If so, download it
|
||||
from the [same page](http://slproweb.com/products/Win32OpenSSL.html),
|
||||
again making sure to get the correct 32-/64-bit variant.
|
||||
|
||||
* NOTE: Since rippled links statically to OpenSSL, it does not matter
|
||||
where the OpenSSL .DLL files are placed, or what version they are.
|
||||
rippled does not use or require any external .DLL files to run
|
||||
other than the standard operating system ones.
|
||||
|
||||
### Build Boost
|
||||
|
||||
After [downloading boost](http://www.boost.org/users/download/) and
|
||||
unpacking it, open a **Developer Command Prompt** for
|
||||
Visual Studio, change to the directory containing boost, then
|
||||
bootstrap the build tools:
|
||||
|
||||
(As of this writing, the most recent version of boost is 1.62.0, which
|
||||
will unpack into a directory named `boost_1_62_0`. For higher versions
|
||||
of boost, adjust the directories provided in these examples as
|
||||
appropriate.)
|
||||
|
||||
```powershell
|
||||
cd C:\lib\boost_1_62_0
|
||||
bootstrap
|
||||
```
|
||||
|
||||
The rippled application is linked statically to the standard runtimes and external
|
||||
dependencies on Windows, to ensure that the behavior of the executable is not
|
||||
affected by changes in outside files. Therefore, it is necessary to build the
|
||||
required boost static libraries using this command:
|
||||
|
||||
```powershell
|
||||
bjam --toolset=msvc-14.0 address-model=64 architecture=x86 link=static threading=multi runtime-link=shared,static stage --stagedir=stage64
|
||||
```
|
||||
|
||||
Building the boost libraries may take considerable time. When the build process
|
||||
is completed, take note of both the reported compiler include paths and linker
|
||||
library paths as they will be required later.
|
||||
|
||||
* NOTE: If older versions of Visual Studio are also installed, the build may fail.
|
||||
If this happens, make sure that only Visual Studio 2015 is installed. Due to
|
||||
defects in the uninstallation procedures of these Microsoft products, it may
|
||||
be necessary to start with a fresh install of the operating system with only
|
||||
the necessary development environment components installed to have a successful build.
|
||||
|
||||
### Clone the rippled repository
|
||||
|
||||
If you are familiar with cloning github repositories, just follow your normal process
|
||||
and clone `git@github.com:ripple/rippled.git`. Otherwise follow this section for instructions.
|
||||
|
||||
1. If you don't have a github account, sign up for one at
|
||||
[github.com](https://github.com/).
|
||||
2. Make sure you have Github ssh keys. For help see
|
||||
[generating-ssh-keys](https://help.github.com/articles/generating-ssh-keys).
|
||||
|
||||
Open the "Git Bash" shell that was installed with "Git for Windows" in the
|
||||
step above. Navigate to the directory where you want to clone rippled (git
|
||||
bash uses `/c` for windows's `C:` and forward slash where windows uses
|
||||
backslash, so `C:\Users\joe\projs` would be `/c/Users/joe/projs` in git bash).
|
||||
Now clone the repository and optionally switch to the *master* branch.
|
||||
Type the following at the bash prompt:
|
||||
|
||||
```powershell
|
||||
git clone git@github.com:ripple/rippled.git
|
||||
cd rippled
|
||||
git checkout master
|
||||
```
|
||||
|
||||
* If you receive an error about not having the "correct access rights"
|
||||
make sure you have Github ssh keys, as described above.
|
||||
|
||||
### Configure Library Paths
|
||||
|
||||
Open the solution file located at **Builds/Visual Studio 2015/ripple.sln**
|
||||
and select the "View->Property Manager" to bring up the Property Manager.
|
||||
Expand the *debug | x64* section and
|
||||
double click the *Microsoft.Cpp.x64.user* property sheet to bring up the
|
||||
*Property Pages* dialog. These are global properties applied to all
|
||||
64-bit build targets:
|
||||
|
||||

|
||||
|
||||
Go to *C/C++, General, Additional Include Directories* and add the
|
||||
location of the boost installation:
|
||||
|
||||

|
||||
|
||||
Then, go to *Linker, General, Additional Library Directories* and add
|
||||
the location of the compiled boost libraries reported at the completion
|
||||
of building the boost libraries:
|
||||
|
||||

|
||||
|
||||
Follow the same procedure for adding the `Additional Include Directories`
|
||||
and `Additional Library Directories` required for OpenSSL. In our example
|
||||
these directories are **C:\lib\OpenSSL-Win64\include** and
|
||||
**C:\lib\OpenSSL-Win64\lib** respectively.
|
||||
|
||||
# Setup Environment
|
||||
|
||||
## Create a working directory for rippled.cfg
|
||||
|
||||
The rippled server uses the [Rippled.cfg](https://wiki.ripple.com/Rippled.cfg)
|
||||
file to read its configuration parameters. This section describes setting up
|
||||
a directory to hold the config file. The next sections describe how to tell
|
||||
the rippled server where that file is.
|
||||
|
||||
1. Create a directory to hold the configuration file. In this example, the
|
||||
ripple config directory was created in `C:\Users\joe\ripple\config`.
|
||||
2. Copy the example config file located in `doc\rippled-example.cfg` to the
|
||||
new directory and rename it "rippled.cfg".
|
||||
3. Read the rippled.cfg file and edit as appropriate.
|
||||
|
||||
## Change the Visual Studio Projects Debugging Properties
|
||||
|
||||
1. If not already open, open the solution file located at **Builds/Visual Studio 2015/Ripple.sln**
|
||||
2. Select the correct solution platform in the solution platform dropdown (either *x64*
|
||||
or *Win32* depending on machine type).
|
||||
3. Select the "Project->Properties" menu item to bring up RippleD's Properties Pages
|
||||
4. In "Configuration Properties" select "Debugging".
|
||||
5. In the upper-left Configurations drop down, select "All Configurations".
|
||||
6. In "Debugger to Launch" select "Local Windows Debugger".
|
||||
|
||||
### Tell rippled where to find the configuration file.
|
||||
|
||||
The `--conf` command-line switch to tell rippled where to find this file.
|
||||
In the "Command Arguments" field in the properties dialog (that you opened
|
||||
in the above section), add: `--conf="C:/Users/joe/ripple/config/rippled.cfg"`
|
||||
(of course replacing that path with the path you set up above).
|
||||
|
||||

|
||||
|
||||
### Set the _NO_DEBUG_HEAP Environment Variable
|
||||
|
||||
Rippled can run very slowly in the debugger when using the Windows Debug Heap.
|
||||
Set the `_NO_DEBUG_HEAP` environment variable to one to disable the debug heap.
|
||||
In the "Environment" field (that you opened in the above section), add:
|
||||
`_NO_DEBUG_HEAP=1`
|
||||
|
||||

|
||||
|
||||
# Build
|
||||
|
||||
After these steps are complete, rippled should be ready to build. Simply
|
||||
set rippled as the startup project by right clicking on it in the
|
||||
Visual Studio Solution Explorer, choose **Set as Startup Project**,
|
||||
and then choose the **Build->Build Solution** menu item.
|
||||
|
||||
# Unit Tests (Recommended)
|
||||
|
||||
The rippled unit tests are written in C++ and are part
|
||||
of the rippled executable.
|
||||
|
||||
From a Windows console, run the unit tests:
|
||||
|
||||
```
|
||||
./build/msvc.debug/rippled.exe --unittest
|
||||
```
|
||||
|
||||
Substitute the correct path to the executable to test different builds.
|
||||
|
||||
|
||||
4943
Builds/VisualStudio2015/RippleD.vcxproj
Normal file
5554
Builds/VisualStudio2015/RippleD.vcxproj.filters
Normal file
BIN
Builds/VisualStudio2015/images/NoDebugHeapPropPage.png
Normal file
|
After Width: | Height: | Size: 34 KiB |
BIN
Builds/VisualStudio2015/images/VS2015x64IncludeDirs.png
Normal file
|
After Width: | Height: | Size: 67 KiB |
BIN
Builds/VisualStudio2015/images/VS2015x64LibraryDirs.png
Normal file
|
After Width: | Height: | Size: 66 KiB |
BIN
Builds/VisualStudio2015/images/VS2015x64Properties.png
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
Builds/VisualStudio2015/images/VSCommandArgsPropPage.png
Normal file
|
After Width: | Height: | Size: 19 KiB |
36
Builds/VisualStudio2015/ripple.sln
Normal file
@@ -0,0 +1,36 @@
|
||||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio 14
|
||||
VisualStudioVersion = 14.0.25123.0
|
||||
MinimumVisualStudioVersion = 10.0.40219.1
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "RippleD", "RippleD.vcxproj", "{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
debug.classic|x64 = debug.classic|x64
|
||||
debug.classic|x86 = debug.classic|x86
|
||||
debug|x64 = debug|x64
|
||||
debug|x86 = debug|x86
|
||||
release.classic|x64 = release.classic|x64
|
||||
release.classic|x86 = release.classic|x86
|
||||
release|x64 = release|x64
|
||||
release|x86 = release|x86
|
||||
EndGlobalSection
|
||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug.classic|x64.ActiveCfg = debug.classic|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug.classic|x64.Build.0 = debug.classic|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug.classic|x86.ActiveCfg = debug.classic|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug|x64.ActiveCfg = debug|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug|x64.Build.0 = debug|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug|x86.ActiveCfg = debug|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release.classic|x64.ActiveCfg = release.classic|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release.classic|x64.Build.0 = release.classic|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release.classic|x86.ActiveCfg = release.classic|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release|x64.ActiveCfg = release|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release|x64.Build.0 = release|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release|x86.ActiveCfg = release|x64
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
177
Builds/XCode/README.md
Normal file
@@ -0,0 +1,177 @@
|
||||
# macos Build Instructions
|
||||
|
||||
## Important
|
||||
|
||||
We don't recommend OS X for rippled production use at this time. Currently, the
|
||||
Ubuntu platform has received the highest level of quality assurance and
|
||||
testing.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
You'll need OSX 10.8 or later
|
||||
|
||||
To clone the source code repository, create branches for inspection or
|
||||
modification, build rippled using clang, and run the system tests you will need
|
||||
these software components:
|
||||
|
||||
* [XCode](https://developer.apple.com/xcode/)
|
||||
* [Homebrew](http://brew.sh/)
|
||||
* [Git](http://git-scm.com/)
|
||||
* [Scons](http://www.scons.org/)
|
||||
|
||||
## Install Software
|
||||
|
||||
### Install XCode
|
||||
|
||||
If not already installed on your system, download and install XCode using the
|
||||
appstore or by using [this link](https://developer.apple.com/xcode/).
|
||||
|
||||
For more info, see "Step 1: Download and Install the Command Line Tools"
|
||||
[here](http://www.moncefbelyamani.com/how-to-install-xcode-homebrew-git-rvm-ruby-on-mac)
|
||||
|
||||
The command line tools can be installed through the terminal with the command:
|
||||
|
||||
```
|
||||
xcode-select --install
|
||||
```
|
||||
|
||||
### Install Homebrew
|
||||
|
||||
> "[Homebrew](http://brew.sh/) installs the stuff you need that Apple didn’t."
|
||||
|
||||
Open a terminal and type:
|
||||
|
||||
```
|
||||
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
|
||||
```
|
||||
|
||||
For more info, see "Step 3: Install Homebrew"
|
||||
[here](http://www.moncefbelyamani.com/how-to-install-xcode-homebrew-git-rvm-ruby-on-mac)
|
||||
|
||||
### Install Git
|
||||
|
||||
```
|
||||
brew update brew install git
|
||||
```
|
||||
|
||||
For more info, see "Step 4: Install Git"
|
||||
[here](http://www.moncefbelyamani.com/how-to-install-xcode-homebrew-git-rvm-ruby-on-mac)
|
||||
|
||||
**NOTE**: To gain full featured access to the
|
||||
[git-subtree](http://blogs.atlassian.com/2013/05/alternatives-to-git-submodule-git-subtree/)
|
||||
functionality used in the rippled repository, we suggest Git version 1.8.3.2 or
|
||||
later.
|
||||
|
||||
### Install Scons
|
||||
|
||||
Requires version 2.3.0 or later
|
||||
|
||||
```
|
||||
brew install scons
|
||||
```
|
||||
|
||||
`brew` will generally install the latest stable version of any package, which
|
||||
will satisfy the scons minimum version requirement for rippled.
|
||||
|
||||
### Install Package Config
|
||||
|
||||
```
|
||||
brew install pkg-config
|
||||
```
|
||||
|
||||
## Install/Build/Configure Dependencies
|
||||
|
||||
### Build Google Protocol Buffers Compiler
|
||||
|
||||
Building rippled on osx requires `protoc` version 2.5.x or 2.6.x (later versions
|
||||
do not work with rippled at this time).
|
||||
|
||||
Download [this](https://github.com/google/protobuf/releases/download/v2.6.1/protobuf-2.6.1.tar.bz2)
|
||||
|
||||
We want to compile protocol buffers with clang/libc++:
|
||||
|
||||
```
|
||||
tar xfvj protobuf-2.6.1.tar.bz2
|
||||
cd protobuf-2.6.1
|
||||
./configure CC=clang CXX=clang++ CXXFLAGS='-std=c++11 -stdlib=libc++ -O3 -g' LDFLAGS='-stdlib=libc++' LIBS="-lc++ -lc++abi"
|
||||
make -j 4
|
||||
sudo make install
|
||||
```
|
||||
|
||||
If you have installed `protobuf` via brew - either directly or indirectly as a
|
||||
dependency of some other package - this is likely to conflict with our specific
|
||||
version requirements. The simplest way to avoid conflicts is to uninstall it.
|
||||
`brew ls --versions protobuf` will list any versions of protobuf
|
||||
you currently have installed.
|
||||
|
||||
### Install OpenSSL
|
||||
|
||||
```
|
||||
brew install openssl
|
||||
```
|
||||
|
||||
### Build Boost
|
||||
|
||||
We want to compile boost with clang/libc++
|
||||
|
||||
Download [a release](https://sourceforge.net/projects/boost/files/boost/1.61.0/boost_1_61_0.tar.bz2)
|
||||
|
||||
Extract it to a folder, making note of where, open a terminal, then:
|
||||
|
||||
```
|
||||
./bootstrap.sh ./b2 toolset=clang threading=multi runtime-link=static link=static cxxflags="-stdlib=libc++" linkflags="-stdlib=libc++" address-model=64
|
||||
```
|
||||
|
||||
Create an environment variable `BOOST_ROOT` in one of your `rc` files, pointing
|
||||
to the root of the extracted directory.
|
||||
|
||||
### Clone the rippled repository
|
||||
|
||||
From the terminal
|
||||
|
||||
```
|
||||
git clone git@github.com:ripple/rippled.git
|
||||
cd rippled
|
||||
```
|
||||
|
||||
Choose the master branch or one of the tagged releases listed on
|
||||
[GitHub](https://github.com/ripple/rippled/releases GitHub).
|
||||
|
||||
```
|
||||
git checkout master
|
||||
```
|
||||
|
||||
or to test the latest release candidate, choose the `release` branch.
|
||||
|
||||
```
|
||||
git checkout release
|
||||
```
|
||||
|
||||
### Configure Library Paths
|
||||
|
||||
If you didn't persistently set the `BOOST_ROOT` environment variable to the
|
||||
root of the extracted directory above, then you should set it temporarily.
|
||||
|
||||
For example, assuming your username were `Abigail` and you extracted Boost
|
||||
1.61.0 in `/Users/Abigail/Downloads/boost_1_61_0`, you would do for any
|
||||
shell in which you want to build:
|
||||
|
||||
```
|
||||
export BOOST_ROOT=/Users/Abigail/Downloads/boost_1_61_0
|
||||
```
|
||||
|
||||
## Build
|
||||
|
||||
```
|
||||
scons
|
||||
```
|
||||
|
||||
See: [here](https://ripple.com/wiki/Rippled_build_instructions#Building)
|
||||
|
||||
## Unit Tests (Recommended)
|
||||
|
||||
rippled builds a set of unit tests into the server executable. To run these unit
|
||||
tests after building, pass the `--unittest` option to the compiled `rippled`
|
||||
executable. The executable will exit after running the unit tests.
|
||||
|
||||
|
||||
6
Builds/build_all.sh
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
num_procs=$(lscpu -p | grep -v '^#' | sort -u -t, -k 2,4 | wc -l) # number of physical cores
|
||||
|
||||
cd ..
|
||||
./Builds/Test.py -a -c -- -j${num_procs}
|
||||
13
Builds/travis/clang.boost.patch
Normal file
@@ -0,0 +1,13 @@
|
||||
--- /usr/include/boost/config/compiler/clang.hpp 2013-07-20 13:17:10.000000000 -0400
|
||||
+++ /usr/include/boost/config/compiler/clang.rippled.hpp 2014-03-11 16:40:51.000000000 -0400
|
||||
@@ -39,6 +39,10 @@
|
||||
// Clang supports "long long" in all compilation modes.
|
||||
#define BOOST_HAS_LONG_LONG
|
||||
|
||||
+#if defined(__SIZEOF_INT128__)
|
||||
+# define BOOST_HAS_INT128
|
||||
+#endif
|
||||
+
|
||||
//
|
||||
// Dynamic shared object (DSO) and dynamic-link library (DLL) support
|
||||
//
|
||||
10
Builds/travis/static_error.boost.patch
Normal file
@@ -0,0 +1,10 @@
|
||||
--- /usr/include/boost/bimap/detail/debug/static_error.hpp 2008-03-22 17:45:55.000000000 -0400
|
||||
+++ /usr/include/boost/bimap/detail/debug/static_error.rippled.hpp 2014-03-12 19:40:05.000000000 -0400
|
||||
@@ -25,7 +25,6 @@
|
||||
// a static error.
|
||||
/*===========================================================================*/
|
||||
#define BOOST_BIMAP_STATIC_ERROR(MESSAGE,VARIABLES) \
|
||||
- struct BOOST_PP_CAT(BIMAP_STATIC_ERROR__,MESSAGE) {}; \
|
||||
BOOST_MPL_ASSERT_MSG(false, \
|
||||
BOOST_PP_CAT(BIMAP_STATIC_ERROR__,MESSAGE), \
|
||||
VARIABLES)
|
||||
637
CMakeLists.txt
@@ -1,156 +1,517 @@
|
||||
cmake_minimum_required(VERSION 3.16)
|
||||
# !!! The official build system is SConstruct !!!
|
||||
# This is an experimental cmake build file for rippled
|
||||
#
|
||||
# cmake support in rippled. Currently supports:
|
||||
#
|
||||
# * unity/nounity debug/release
|
||||
# * running protobuf
|
||||
# * sanitizer builds
|
||||
# * optional release build with assert turned on
|
||||
# * `target` variable to easily set compiler/debug/unity
|
||||
# (i.e. -Dtarget=gcc.debug.nounity)
|
||||
# * gcc/clang/visual studio/xcode
|
||||
# * linux/mac/win
|
||||
# * gcc 4 ABI, when needed
|
||||
# * ninja builds
|
||||
# * check openssl version on linux
|
||||
# * static builds (swd TBD: needs to be tested by building & deploying on different systems)
|
||||
#
|
||||
# TBD:
|
||||
# * jemalloc support
|
||||
# * count
|
||||
# * Windows protobuf compiler puts generated file in src directory instead of build directory.
|
||||
#
|
||||
# Notes:
|
||||
# * Use the -G"Visual Studio 14 2015 Win64" generator on Windows. Without this
|
||||
# a 32-bit project will be created. There is no way to set the generator or
|
||||
# force a 64-bit build in CMakeLists.txt (setting CMAKE_GENERATOR_PLATFORM won't work).
|
||||
# The best solution may be to wrap cmake with a script.
|
||||
#
|
||||
# * It is not possible to generate a visual studio project on linux or
|
||||
# mac. The visual studio generator is only available on windows.
|
||||
#
|
||||
# * The visual studio project can be _either_ unity or
|
||||
# non-unity (selected at generation time). It does not appear possible
|
||||
# to disable compilation based on configuration.
|
||||
#
|
||||
# * Language is _much_ worse than python, poor documentation and "quirky"
|
||||
# language support (for example, generator expressions can only be used
|
||||
# in limited contexts and seem to work differently based on
|
||||
# context (set_property can set multiple values, add_compile_options
|
||||
# can not/or is buggy)
|
||||
#
|
||||
# * Could not call out to `sed` because cmake messed with the regular
|
||||
# expression before calling the external command. I did not see a way
|
||||
# around this.
|
||||
#
|
||||
# * Makefile generators want to be single target. It wants a separate
|
||||
# directory for each target type. I saw some mentions on the web for
|
||||
# ways around this bug haven't look into it. The visual studio project
|
||||
# does support debug/release configurations in the same project (but
|
||||
# not unity/non-unity).
|
||||
|
||||
if(POLICY CMP0074)
|
||||
cmake_policy(SET CMP0074 NEW)
|
||||
endif()
|
||||
if(POLICY CMP0077)
|
||||
cmake_policy(SET CMP0077 NEW)
|
||||
endif()
|
||||
############################################################
|
||||
|
||||
# Fix "unrecognized escape" issues when passing CMAKE_MODULE_PATH on Windows.
|
||||
file(TO_CMAKE_PATH "${CMAKE_MODULE_PATH}" CMAKE_MODULE_PATH)
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
|
||||
cmake_minimum_required(VERSION 3.1.0)
|
||||
|
||||
project(xrpl)
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
set(CMAKE_CXX_STANDARD 20)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "GNU")
|
||||
# GCC-specific fixes
|
||||
add_compile_options(-Wno-unknown-pragmas -Wno-subobject-linkage)
|
||||
# -Wno-subobject-linkage can be removed when we upgrade GCC version to at least 13.3
|
||||
elseif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
# Clang-specific fixes
|
||||
add_compile_options(-Wno-unknown-warning-option) # Ignore unknown warning options
|
||||
elseif(MSVC)
|
||||
# MSVC-specific fixes
|
||||
add_compile_options(/wd4068) # Ignore unknown pragmas
|
||||
endif()
|
||||
|
||||
# make GIT_COMMIT_HASH define available to all sources
|
||||
find_package(Git)
|
||||
if(Git_FOUND)
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse HEAD
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gch)
|
||||
if(gch)
|
||||
set(GIT_COMMIT_HASH "${gch}")
|
||||
message(STATUS gch: ${GIT_COMMIT_HASH})
|
||||
add_definitions(-DGIT_COMMIT_HASH="${GIT_COMMIT_HASH}")
|
||||
endif()
|
||||
|
||||
execute_process(COMMAND ${GIT_EXECUTABLE} --git-dir=${CMAKE_CURRENT_SOURCE_DIR}/.git rev-parse --abbrev-ref HEAD
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE gb)
|
||||
if(gb)
|
||||
set(GIT_BRANCH "${gb}")
|
||||
message(STATUS gb: ${GIT_BRANCH})
|
||||
add_definitions(-DGIT_BRANCH="${GIT_BRANCH}")
|
||||
endif()
|
||||
endif() #git
|
||||
|
||||
if(thread_safety_analysis)
|
||||
add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS)
|
||||
add_compile_options("-stdlib=libc++")
|
||||
add_link_options("-stdlib=libc++")
|
||||
endif()
|
||||
|
||||
include (CheckCXXCompilerFlag)
|
||||
include (FetchContent)
|
||||
include (ExternalProject)
|
||||
include (CMakeFuncs) # must come *after* ExternalProject b/c it overrides one function in EP
|
||||
if (target)
|
||||
message (FATAL_ERROR "The target option has been removed - use native cmake options to control build")
|
||||
endif ()
|
||||
|
||||
include(RippledSanity)
|
||||
include(RippledVersion)
|
||||
include(RippledSettings)
|
||||
# this check has to remain in the top-level cmake
|
||||
# because of the early return statement
|
||||
if (packages_only)
|
||||
if (NOT TARGET rpm)
|
||||
message (FATAL_ERROR "packages_only requested, but targets were not created - is docker installed?")
|
||||
if("${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
|
||||
set(dir "build")
|
||||
set(cmd "cmake")
|
||||
if (target)
|
||||
set(dir "${dir}/${target}")
|
||||
set(cmd "${cmd} -Dtarget=${target}")
|
||||
elseif(CMAKE_BUILD_TYPE)
|
||||
set(dir "${dir}/${CMAKE_BUILD_TYPE}")
|
||||
set(cmd "${cmd} -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}")
|
||||
else()
|
||||
set(dir "${dir}/default")
|
||||
endif()
|
||||
return ()
|
||||
endif ()
|
||||
include(RippledCompiler)
|
||||
include(RippledInterface)
|
||||
set(cmd "${cmd} ${CMAKE_SOURCE_DIR}")
|
||||
|
||||
option(only_docs "Include only the docs target?" FALSE)
|
||||
include(RippledDocs)
|
||||
if(only_docs)
|
||||
return()
|
||||
message(FATAL_ERROR "Builds are not allowed in ${CMAKE_SOURCE_DIR}.\n"
|
||||
"Instead:\n"
|
||||
"1) Remove the CMakeCache.txt file and CMakeFiles directory "
|
||||
"from ${CMAKE_SOURCE_DIR}.\n"
|
||||
"2) Create a directory to hold your build files, for example: ${dir}.\n"
|
||||
"3) Change to that directory.\n"
|
||||
"4) Run cmake targetting ${CMAKE_SOURCE_DIR}, for example: ${cmd}")
|
||||
endif()
|
||||
if("${CMAKE_GENERATOR}" MATCHES "Visual Studio" AND
|
||||
NOT ("${CMAKE_GENERATOR}" MATCHES .*Win64.*))
|
||||
message(FATAL_ERROR "Visual Studio 32-bit build is unsupported. Use
|
||||
-G\"${CMAKE_GENERATOR} Win64\"")
|
||||
endif()
|
||||
|
||||
###
|
||||
set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/Builds/CMake")
|
||||
include(CMakeFuncs)
|
||||
|
||||
include(deps/Boost)
|
||||
find_package(OpenSSL 1.1.1 REQUIRED)
|
||||
set_target_properties(OpenSSL::SSL PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2
|
||||
)
|
||||
set(SECP256K1_INSTALL TRUE)
|
||||
set(SECP256K1_BUILD_BENCHMARK FALSE)
|
||||
set(SECP256K1_BUILD_TESTS FALSE)
|
||||
set(SECP256K1_BUILD_EXHAUSTIVE_TESTS FALSE)
|
||||
set(SECP256K1_BUILD_CTIME_TESTS FALSE)
|
||||
set(SECP256K1_BUILD_EXAMPLES FALSE)
|
||||
add_subdirectory(external/secp256k1)
|
||||
add_library(secp256k1::secp256k1 ALIAS secp256k1)
|
||||
add_subdirectory(external/ed25519-donna)
|
||||
add_subdirectory(external/antithesis-sdk)
|
||||
find_package(gRPC REQUIRED)
|
||||
find_package(lz4 REQUIRED)
|
||||
# Target names with :: are not allowed in a generator expression.
|
||||
# We need to pull the include directories and imported location properties
|
||||
# from separate targets.
|
||||
find_package(LibArchive REQUIRED)
|
||||
find_package(SOCI REQUIRED)
|
||||
find_package(SQLite3 REQUIRED)
|
||||
set(openssl_min 1.0.2)
|
||||
|
||||
option(rocksdb "Enable RocksDB" ON)
|
||||
if(rocksdb)
|
||||
find_package(RocksDB REQUIRED)
|
||||
set_target_properties(RocksDB::rocksdb PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1
|
||||
)
|
||||
target_link_libraries(ripple_libs INTERFACE RocksDB::rocksdb)
|
||||
parse_target()
|
||||
|
||||
if (NOT DEFINED unity)
|
||||
set(unity true)
|
||||
set(nonunity false)
|
||||
endif()
|
||||
|
||||
find_package(nudb REQUIRED)
|
||||
find_package(date REQUIRED)
|
||||
find_package(xxHash REQUIRED)
|
||||
setup_build_cache()
|
||||
|
||||
target_link_libraries(ripple_libs INTERFACE
|
||||
ed25519::ed25519
|
||||
lz4::lz4
|
||||
OpenSSL::Crypto
|
||||
OpenSSL::SSL
|
||||
secp256k1::secp256k1
|
||||
soci::soci
|
||||
SQLite::SQLite3
|
||||
)
|
||||
project(rippled)
|
||||
|
||||
# Work around changes to Conan recipe for now.
|
||||
if(TARGET nudb::core)
|
||||
set(nudb nudb::core)
|
||||
elseif(TARGET NuDB::nudb)
|
||||
set(nudb NuDB::nudb)
|
||||
if(nonunity)
|
||||
get_cmake_property(allvars VARIABLES)
|
||||
string(REGEX MATCHALL "[^;]*(DEBUG|RELEASE)[^;]*" matchvars "${allvars}")
|
||||
foreach(var IN LISTS matchvars)
|
||||
string(REGEX REPLACE "(DEBUG|RELEASE)" "\\1CLASSIC" newvar ${var})
|
||||
set(${newvar} ${${var}})
|
||||
endforeach()
|
||||
|
||||
get_cmake_property(allvars CACHE_VARIABLES)
|
||||
string(REGEX MATCHALL "[^;]*(DEBUG|RELEASE)[^;]*" matchvars "${allvars}")
|
||||
foreach(var IN LISTS matchvars)
|
||||
string(REGEX REPLACE "(DEBUG|RELEASE)" "\\1CLASSIC" newvar ${var})
|
||||
set(${newvar} ${${var}} CACHE STRING "Copied from ${var}")
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
determine_build_type()
|
||||
|
||||
check_gcc4_abi()
|
||||
|
||||
############################################################
|
||||
|
||||
include_directories(
|
||||
src
|
||||
src/beast
|
||||
src/beast/include
|
||||
src/beast/extras
|
||||
src/nudb/include
|
||||
src/soci/src
|
||||
src/soci/include)
|
||||
|
||||
special_build_flags()
|
||||
|
||||
############################################################
|
||||
|
||||
use_boost(
|
||||
# resist the temptation to alphabetize these. coroutine
|
||||
# must come before context.
|
||||
chrono
|
||||
coroutine
|
||||
context
|
||||
date_time
|
||||
filesystem
|
||||
program_options
|
||||
regex
|
||||
system
|
||||
thread)
|
||||
|
||||
use_pthread()
|
||||
|
||||
use_openssl(${openssl_min})
|
||||
|
||||
use_protobuf()
|
||||
|
||||
setup_build_boilerplate()
|
||||
|
||||
############################################################
|
||||
|
||||
if (is_clang)
|
||||
set(rocks_db_system_header --system-header-prefix=rocksdb2)
|
||||
else()
|
||||
message(FATAL_ERROR "unknown nudb target")
|
||||
endif()
|
||||
target_link_libraries(ripple_libs INTERFACE ${nudb})
|
||||
|
||||
if(coverage)
|
||||
include(RippledCov)
|
||||
unset(rocks_db_system_header)
|
||||
endif()
|
||||
|
||||
set(PROJECT_EXPORT_SET RippleExports)
|
||||
include(RippledCore)
|
||||
include(RippledInstall)
|
||||
include(RippledValidatorKeys)
|
||||
set(soci_extra_includes
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/soci/src/core
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/soci/include/private
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/sqlite)
|
||||
|
||||
if(tests)
|
||||
include(CTest)
|
||||
add_subdirectory(src/tests/libxrpl)
|
||||
############################################################
|
||||
|
||||
# Unity sources
|
||||
prepend(beast_unity_srcs
|
||||
src/ripple/beast/unity/
|
||||
beast_insight_unity.cpp
|
||||
beast_net_unity.cpp
|
||||
beast_utility_unity.cpp)
|
||||
|
||||
prepend(ripple_unity_srcs
|
||||
src/ripple/unity/
|
||||
app_ledger.cpp
|
||||
app_main.cpp
|
||||
app_misc.cpp
|
||||
app_paths.cpp
|
||||
app_tx.cpp
|
||||
conditions.cpp
|
||||
core.cpp
|
||||
basics.cpp
|
||||
crypto.cpp
|
||||
ledger.cpp
|
||||
net.cpp
|
||||
overlay.cpp
|
||||
peerfinder.cpp
|
||||
json.cpp
|
||||
protocol.cpp
|
||||
rpcx.cpp
|
||||
shamap.cpp
|
||||
server.cpp)
|
||||
|
||||
prepend(test_unity_srcs
|
||||
src/test/unity/
|
||||
app_test_unity.cpp
|
||||
basics_test_unity.cpp
|
||||
beast_test_unity.cpp
|
||||
conditions_test_unity.cpp
|
||||
core_test_unity.cpp
|
||||
json_test_unity.cpp
|
||||
ledger_test_unity.cpp
|
||||
overlay_test_unity.cpp
|
||||
peerfinder_test_unity.cpp
|
||||
protocol_test_unity.cpp
|
||||
resource_test_unity.cpp
|
||||
rpc_test_unity.cpp
|
||||
server_test_unity.cpp
|
||||
shamap_test_unity.cpp
|
||||
support_unity.cpp)
|
||||
|
||||
list(APPEND rippled_src_unity ${beast_unity_srcs} ${ripple_unity_srcs} ${test_unity_srcs})
|
||||
|
||||
add_with_props(rippled_src_unity src/test/unity/nodestore_test_unity.cpp
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/rocksdb2/include
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/snappy/snappy
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/snappy/config
|
||||
${rocks_db_system_header})
|
||||
|
||||
add_with_props(rippled_src_unity src/ripple/unity/nodestore.cpp
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/rocksdb2/include
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/snappy/snappy
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/snappy/config
|
||||
${rocks_db_system_header})
|
||||
|
||||
add_with_props(rippled_src_unity src/ripple/unity/soci_ripple.cpp ${soci_extra_includes})
|
||||
|
||||
list(APPEND ripple_unity_srcs ${beast_unity_srcs} ${test_unity_srcs}
|
||||
src/ripple/unity/nodestore.cpp
|
||||
src/ripple/unity/soci_ripple.cpp
|
||||
src/test/unity/nodestore_test_unity.cpp)
|
||||
|
||||
############################################################
|
||||
|
||||
# Non-unity sources
|
||||
file(GLOB_RECURSE core_srcs src/ripple/core/*.cpp)
|
||||
add_with_props(rippled_src_nonunity "${core_srcs}"
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/soci/src/core
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/sqlite)
|
||||
|
||||
set(non_unity_srcs ${core_srcs})
|
||||
|
||||
foreach(curdir
|
||||
beast/clock
|
||||
beast/container
|
||||
beast/insight
|
||||
beast/net
|
||||
beast/utility
|
||||
app
|
||||
basics
|
||||
conditions
|
||||
crypto
|
||||
json
|
||||
ledger
|
||||
legacy
|
||||
net
|
||||
overlay
|
||||
peerfinder
|
||||
protocol
|
||||
rpc
|
||||
server
|
||||
shamap)
|
||||
file(GLOB_RECURSE cursrcs src/ripple/${curdir}/*.cpp)
|
||||
list(APPEND rippled_src_nonunity "${cursrcs}")
|
||||
list(APPEND non_unity_srcs "${cursrcs}")
|
||||
endforeach()
|
||||
|
||||
file(GLOB_RECURSE nodestore_srcs src/ripple/nodestore/*.cpp
|
||||
src/test/nodestore/*.cpp)
|
||||
|
||||
add_with_props(rippled_src_nonunity "${nodestore_srcs}"
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/rocksdb2/include
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/snappy/snappy
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/snappy/config
|
||||
${rocks_db_system_header})
|
||||
|
||||
list(APPEND non_unity_srcs "${nodestore_srcs}")
|
||||
|
||||
# unit test sources
|
||||
foreach(curdir
|
||||
app
|
||||
basics
|
||||
beast
|
||||
conditions
|
||||
core
|
||||
json
|
||||
ledger
|
||||
nodestore
|
||||
overlay
|
||||
peerfinder
|
||||
protocol
|
||||
resource
|
||||
rpc
|
||||
server
|
||||
shamap
|
||||
jtx)
|
||||
file(GLOB_RECURSE cursrcs src/test/${curdir}/*.cpp)
|
||||
list(APPEND test_srcs "${cursrcs}")
|
||||
endforeach()
|
||||
|
||||
add_with_props(rippled_src_nonunity "${test_srcs}"
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/rocksdb2/include
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/snappy/snappy
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/snappy/config
|
||||
${rocks_db_system_header})
|
||||
|
||||
list(APPEND non_unity_srcs "${test_srcs}")
|
||||
|
||||
if(WIN32 OR is_xcode)
|
||||
# Rippled headers. Only needed for IDEs.
|
||||
file(GLOB_RECURSE rippled_headers src/*.h src/*.hpp)
|
||||
list(APPEND rippled_headers Builds/CMake/CMakeFuncs.cmake)
|
||||
foreach(curdir
|
||||
beast/asio
|
||||
beast/core
|
||||
beast/crypto
|
||||
beast/cxx17
|
||||
beast/hash
|
||||
proto
|
||||
resource
|
||||
validators
|
||||
websocket)
|
||||
file(GLOB_RECURSE cursrcs src/ripple/${curdir}/*.cpp)
|
||||
list(APPEND rippled_headers "${cursrcs}")
|
||||
endforeach()
|
||||
list(APPEND rippled_src_nonunity "${rippled_headers}")
|
||||
|
||||
set_property(
|
||||
SOURCE ${rippled_headers}
|
||||
APPEND
|
||||
PROPERTY HEADER_FILE_ONLY
|
||||
true)
|
||||
# Doesn't work
|
||||
# $<OR:$<CONFIG:Debug>,$<CONFIG:Release>>)
|
||||
endif()
|
||||
|
||||
if (WIN32 OR is_xcode)
|
||||
# Documentation sources. Only needed for IDEs.
|
||||
prepend(doc_srcs
|
||||
docs/
|
||||
Jamfile.v2
|
||||
boostbook.dtd
|
||||
index.xml
|
||||
main.qbk
|
||||
quickref.xml
|
||||
reference.xsl
|
||||
source.dox)
|
||||
|
||||
set_property(
|
||||
SOURCE ${doc_srcs}
|
||||
APPEND
|
||||
PROPERTY HEADER_FILE_ONLY
|
||||
true)
|
||||
# Doesn't work
|
||||
# $<OR:$<CONFIG:Debug>,$<CONFIG:Release>>)
|
||||
endif()
|
||||
|
||||
############################################################
|
||||
|
||||
add_with_props(rippled_src_all src/ripple/unity/soci.cpp
|
||||
${soci_extra_includes})
|
||||
|
||||
if (NOT is_msvc)
|
||||
set(no_unused_w -Wno-unused-function)
|
||||
else()
|
||||
unset(no_unused_w)
|
||||
endif()
|
||||
|
||||
add_with_props(rippled_src_all src/ripple/unity/secp256k1.cpp
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/secp256k1
|
||||
${no_unused_w}
|
||||
)
|
||||
|
||||
foreach(cursrc
|
||||
src/ripple/beast/unity/beast_hash_unity.cpp
|
||||
src/ripple/unity/beast.cpp
|
||||
src/ripple/unity/lz4.c
|
||||
src/ripple/unity/protobuf.cpp
|
||||
src/ripple/unity/ripple.proto.cpp
|
||||
src/ripple/unity/resource.cpp)
|
||||
|
||||
add_with_props(rippled_src_all ${cursrc}
|
||||
${rocks_db_system_header}
|
||||
)
|
||||
|
||||
endforeach()
|
||||
|
||||
if (NOT is_msvc)
|
||||
set(extra_props -Wno-array-bounds)
|
||||
else()
|
||||
unset(extra_props)
|
||||
endif()
|
||||
|
||||
add_with_props(rippled_src_all src/sqlite/sqlite_unity.c
|
||||
${extra_props})
|
||||
|
||||
add_with_props(rippled_src_all src/ripple/unity/ed25519_donna.c
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/ed25519-donna)
|
||||
|
||||
if (is_gcc)
|
||||
set(no_init_w -Wno-maybe-uninitialized)
|
||||
else()
|
||||
unset(no_init_w)
|
||||
endif()
|
||||
|
||||
add_with_props(rippled_src_all src/ripple/unity/rocksdb.cpp
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/rocksdb2
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/rocksdb2/include
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/snappy/snappy
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/snappy/config
|
||||
${no_init_w} ${rocks_db_system_header})
|
||||
|
||||
if (NOT is_msvc)
|
||||
set(no_unused_w -Wno-unused-function)
|
||||
endif()
|
||||
|
||||
add_with_props(rippled_src_all src/ripple/unity/snappy.cpp
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/snappy/snappy
|
||||
-I"${CMAKE_SOURCE_DIR}/"src/snappy/config
|
||||
${no_unused_w})
|
||||
|
||||
if (APPLE AND is_clang)
|
||||
list(APPEND rippled_src_all src/ripple/unity/beastobjc.mm)
|
||||
endif()
|
||||
|
||||
list(APPEND rippled_src_unity "${rippled_src_all}")
|
||||
list(APPEND rippled_src_nonunity "${rippled_src_all}")
|
||||
|
||||
############################################################
|
||||
|
||||
if (WIN32 OR is_xcode)
|
||||
group_sources(src)
|
||||
group_sources(docs)
|
||||
group_sources(Builds)
|
||||
endif()
|
||||
|
||||
if(unity)
|
||||
add_executable(rippled ${rippled_src_unity} ${PROTO_HDRS})
|
||||
add_executable(rippled_classic EXCLUDE_FROM_ALL ${rippled_src_nonunity} ${PROTO_HDRS})
|
||||
set(other_target rippled_classic)
|
||||
else()
|
||||
add_executable(rippled ${rippled_src_nonunity} ${PROTO_HDRS})
|
||||
add_executable(rippled_unity EXCLUDE_FROM_ALL ${rippled_src_unity} ${PROTO_HDRS})
|
||||
set(other_target rippled_unity)
|
||||
endif()
|
||||
list(APPEND targets "rippled")
|
||||
list(APPEND targets ${other_target})
|
||||
# Not the same as EXCLUDE_FROM_ALL. Prevents Visual Studio from building the
|
||||
# other_target when the user builds the solution (default when pressing <F7>)
|
||||
set_property(TARGET ${other_target} PROPERTY EXCLUDE_FROM_DEFAULT_BUILD true)
|
||||
|
||||
find_program(
|
||||
B2_EXE
|
||||
NAMES b2
|
||||
HINTS ${BOOST_ROOT}
|
||||
PATHS ${BOOST_ROOT}
|
||||
DOC "Location of the b2 build executable from Boost")
|
||||
if(${B2_EXE} STREQUAL "B2_EXE-NOTFOUND")
|
||||
message(WARNING
|
||||
"Boost b2 executable not found. docs target will not be buildable")
|
||||
elseif(NOT BOOST_ROOT)
|
||||
if(Boost_INCLUDE_DIRS)
|
||||
set(BOOST_ROOT ${Boost_INCLUDE_DIRS})
|
||||
else()
|
||||
get_filename_component(BOOST_ROOT ${B2_EXE} DIRECTORY)
|
||||
endif()
|
||||
endif()
|
||||
# The value for BOOST_ROOT will be determined based on
|
||||
# 1) The environment BOOST_ROOT
|
||||
# 2) The Boost_INCLUDE_DIRS found by `get_boost`
|
||||
# 3) The folder the `b2` executable is found in.
|
||||
# If those checks don't yield the correct path, BOOST_ROOT
|
||||
# can be defined on the cmake command line:
|
||||
# cmake <path> -DBOOST_ROOT=<boost_path>
|
||||
if(BOOST_ROOT)
|
||||
set(B2_PARAMS "-sBOOST_ROOT=${BOOST_ROOT}")
|
||||
endif()
|
||||
|
||||
# Find bash to help Windows avoid file association problems
|
||||
find_program(
|
||||
BASH_EXE
|
||||
NAMES bash sh
|
||||
DOC "Location of the bash shell executable"
|
||||
)
|
||||
if(${BASH_EXE} STREQUAL "BASH_EXE-NOTFOUND")
|
||||
message(WARNING
|
||||
"Unable to find bash executable. docs target may not be buildable")
|
||||
set(BASH_EXE "")
|
||||
endif()
|
||||
|
||||
add_custom_target(docs
|
||||
COMMAND ${CMAKE_COMMAND} -E env "PATH=$ENV{PATH} " ${BASH_EXE} ./makeqbk.sh
|
||||
COMMAND ${B2_EXE} ${B2_PARAMS}
|
||||
BYPRODUCTS "${CMAKE_SOURCE_DIR}/docs/html/index.html"
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}/docs"
|
||||
SOURCES "${doc_srcs}"
|
||||
)
|
||||
|
||||
set_startup_project(rippled)
|
||||
|
||||
foreach(target IN LISTS targets)
|
||||
target_link_libraries(${target}
|
||||
${OPENSSL_LIBRARIES} ${PROTOBUF_LIBRARIES} ${SANITIZER_LIBRARIES})
|
||||
|
||||
link_common_libraries(${target})
|
||||
endforeach()
|
||||
|
||||
if (NOT CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||
message(WARNING "Rippled requires a 64 bit target architecture.\n"
|
||||
"The most likely cause of this warning is trying to build rippled with a 32-bit OS.")
|
||||
endif()
|
||||
|
||||
1077
CONTRIBUTING.md
11
Jamroot
Normal file
@@ -0,0 +1,11 @@
|
||||
#
|
||||
# Copyright (c) 2013-2016 Vinnie Falco (vinnie dot falco at gmail dot com)
|
||||
#
|
||||
# Distributed under the Boost Software License, Version 1.0. (See accompanying
|
||||
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||
#
|
||||
|
||||
import boost ;
|
||||
|
||||
boost.use-project ;
|
||||
|
||||
16
LICENSE.md
@@ -1,16 +0,0 @@
|
||||
ISC License
|
||||
|
||||
Copyright (c) 2011, Arthur Britto, David Schwartz, Jed McCaleb, Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant.
|
||||
Copyright (c) 2012-2020, the XRP Ledger developers.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
124
README.md
@@ -1,71 +1,91 @@
|
||||
[](https://codecov.io/gh/XRPLF/rippled)
|
||||

|
||||
|
||||
# The XRP Ledger
|
||||
# What is Ripple?
|
||||
Ripple is a network of computers which use the [Ripple consensus algorithm]
|
||||
(https://www.youtube.com/watch?v=pj1QVb1vlC0) to atomically settle and record
|
||||
transactions on a secure distributed database, the Ripple Consensus Ledger
|
||||
(RCL). Because of its distributed nature, the RCL offers transaction immutability
|
||||
without a central operator. The RCL contains a built-in currency exchange and its
|
||||
path-finding algorithm finds competitive exchange rates across order books
|
||||
and currency pairs.
|
||||
|
||||
The [XRP Ledger](https://xrpl.org/) is a decentralized cryptographic ledger powered by a network of peer-to-peer nodes. The XRP Ledger uses a novel Byzantine Fault Tolerant consensus algorithm to settle and record transactions in a secure distributed database without a central operator.
|
||||
### Key Features
|
||||
- **Distributed**
|
||||
- Direct account-to-account settlement with no central operator
|
||||
- Decentralized global market for competitive FX
|
||||
- **Secure**
|
||||
- Transactions are cryptographically signed using ECDSA or Ed25519
|
||||
- Multi-signing capabilities
|
||||
- **Scalable**
|
||||
- Capacity to process the world’s cross-border payments volume
|
||||
- Easy access to liquidity through a competitive FX marketplace
|
||||
|
||||
## XRP
|
||||
## Cross-border payments
|
||||
Ripple enables banks to settle cross-border payments in real-time, with
|
||||
end-to-end transparency, and at lower costs. Banks can provide liquidity
|
||||
for FX themselves or source it from third parties.
|
||||
|
||||
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free asset native to the XRP Ledger, and is designed to bridge the many different currencies in use worldwide. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP.
|
||||
As Ripple adoption grows, so do the number of currencies and counterparties.
|
||||
Liquidity providers need to maintain accounts with each counterparty for
|
||||
each currency – a capital- and time-intensive endeavor that spreads liquidity
|
||||
thin. Further, some transactions, such as exotic currency trades, will require
|
||||
multiple trading parties, who each layer costs to the transaction. Thin
|
||||
liquidity and many intermediary trading parties make competitive pricing
|
||||
challenging.
|
||||
|
||||
## rippled
|
||||

|
||||
|
||||
The server software that powers the XRP Ledger is called `rippled` and is available in this repository under the permissive [ISC open-source license](LICENSE.md). The `rippled` server software is written primarily in C++ and runs on a variety of platforms. The `rippled` server software can run in several modes depending on its [configuration](https://xrpl.org/rippled-server-modes.html).
|
||||
### XRP as a Bridge Currency
|
||||
Ripple can bridge even exotic currency pairs directly through XRP. Similar to
|
||||
USD in today’s currency market, XRP allows liquidity providers to focus on
|
||||
offering competitive FX rates on fewer pairs and adding depth to order books.
|
||||
Unlike USD, trading through XRP does not require bank accounts, service fees,
|
||||
counterparty risk, or additional operational costs. By using XRP, liquidity
|
||||
providers can specialize in certain currency corridors, reduce operational
|
||||
costs, and ultimately, offer more competitive FX pricing.
|
||||
|
||||
If you are interested in running an **API Server** (including a **Full History Server**), take a look at [Clio](https://github.com/XRPLF/clio). (rippled Reporting Mode has been replaced by Clio.)
|
||||

|
||||
|
||||
### Build from Source
|
||||
# rippled - Ripple server
|
||||
`rippled` is the reference server implementation of the Ripple
|
||||
protocol. To learn more about how to build and run a `rippled`
|
||||
server, visit https://ripple.com/build/rippled-setup/
|
||||
|
||||
- [Read the build instructions in `BUILD.md`](BUILD.md)
|
||||
- If you encounter any issues, please [open an issue](https://github.com/XRPLF/rippled/issues)
|
||||
[](https://travis-ci.org/ripple/rippled)
|
||||
[](https://codecov.io/gh/ripple/rippled)
|
||||
|
||||
## Key Features of the XRP Ledger
|
||||
### License
|
||||
`rippled` is open source and permissively licensed under the
|
||||
ISC license. See the LICENSE file for more details.
|
||||
|
||||
- **[Censorship-Resistant Transaction Processing][]:** No single party decides which transactions succeed or fail, and no one can "roll back" a transaction after it completes. As long as those who choose to participate in the network keep it healthy, they can settle transactions in seconds.
|
||||
- **[Fast, Efficient Consensus Algorithm][]:** The XRP Ledger's consensus algorithm settles transactions in 4 to 5 seconds, processing at a throughput of up to 1500 transactions per second. These properties put XRP at least an order of magnitude ahead of other top digital assets.
|
||||
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction costs.
|
||||
- **[Responsible Software Governance][]:** A team of full-time, world-class developers at Ripple maintain and continually improve the XRP Ledger's underlying software with contributions from the open-source community. Ripple acts as a steward for the technology and an advocate for its interests, and builds constructive relationships with governments and financial institutions worldwide.
|
||||
- **[Secure, Adaptable Cryptography][]:** The XRP Ledger relies on industry standard digital signature systems like ECDSA (the same scheme used by Bitcoin) but also supports modern, efficient algorithms like Ed25519. The extensible nature of the XRP Ledger's software makes it possible to add and disable algorithms as the state of the art in cryptography advances.
|
||||
- **[Modern Features for Smart Contracts][]:** Features like Escrow, Checks, and Payment Channels support cutting-edge financial applications including the [Interledger Protocol](https://interledger.org/). This toolbox of advanced features comes with safety features like a process for amending the network and separate checks against invariant constraints.
|
||||
- **[On-Ledger Decentralized Exchange][]:** In addition to all the features that make XRP useful on its own, the XRP Ledger also has a fully-functional accounting system for tracking and trading obligations denominated in any way users want, and an exchange built into the protocol. The XRP Ledger can settle long, cross-currency payment paths and exchanges of multiple currencies in atomic transactions, bridging gaps of trust with XRP.
|
||||
#### Repository Contents
|
||||
|
||||
[Censorship-Resistant Transaction Processing]: https://xrpl.org/xrp-ledger-overview.html#censorship-resistant-transaction-processing
|
||||
[Fast, Efficient Consensus Algorithm]: https://xrpl.org/xrp-ledger-overview.html#fast-efficient-consensus-algorithm
|
||||
[Finite XRP Supply]: https://xrpl.org/xrp-ledger-overview.html#finite-xrp-supply
|
||||
[Responsible Software Governance]: https://xrpl.org/xrp-ledger-overview.html#responsible-software-governance
|
||||
[Secure, Adaptable Cryptography]: https://xrpl.org/xrp-ledger-overview.html#secure-adaptable-cryptography
|
||||
[Modern Features for Smart Contracts]: https://xrpl.org/xrp-ledger-overview.html#modern-features-for-smart-contracts
|
||||
[On-Ledger Decentralized Exchange]: https://xrpl.org/xrp-ledger-overview.html#on-ledger-decentralized-exchange
|
||||
| Folder | Contents |
|
||||
|---------|----------|
|
||||
| ./bin | Scripts and data files for Ripple integrators. |
|
||||
| ./build | Intermediate and final build outputs. |
|
||||
| ./Builds| Platform or IDE-specific project files. |
|
||||
| ./doc | Documentation and example configuration files. |
|
||||
| ./src | Source code. |
|
||||
|
||||
## Source Code
|
||||
Some of the directories under `src` are external repositories inlined via
|
||||
git-subtree. See the corresponding README for more details.
|
||||
|
||||
Here are some good places to start learning the source code:
|
||||
##For more information:
|
||||
|
||||
- Read the markdown files in the source tree: `src/ripple/**/*.md`.
|
||||
- Read [the levelization document](.github/scripts/levelization) to get an idea of the internal dependency graph.
|
||||
- In the big picture, the `main` function constructs an `ApplicationImp` object, which implements the `Application` virtual interface. Almost every component in the application takes an `Application&` parameter in its constructor, typically named `app` and stored as a member variable `app_`. This allows most components to depend on any other component.
|
||||
* [Ripple Knowledge Center](https://ripple.com/learn/)
|
||||
* [Ripple Developer Center](https://ripple.com/build/)
|
||||
* [Ripple Whitepapers & Reports](https://ripple.com/whitepapers-reports/)
|
||||
* [Ripple Consensus Whitepaper](https://ripple.com/consensus-whitepaper/)
|
||||
* [Ripple Solutions Guide](https://ripple.com/files/ripple_solutions_guide.pdf)
|
||||
|
||||
### Repository Contents
|
||||
To learn about how Ripple is transforming global payments visit
|
||||
[https://ripple.com/contact/](https://ripple.com/contact/)
|
||||
|
||||
| Folder | Contents |
|
||||
| :--------- | :----------------------------------------------- |
|
||||
| `./bin` | Scripts and data files for Ripple integrators. |
|
||||
| `./Builds` | Platform-specific guides for building `rippled`. |
|
||||
| `./docs` | Source documentation files and doxygen config. |
|
||||
| `./cfg` | Example configuration files. |
|
||||
| `./src` | Source code. |
|
||||
- - -
|
||||
|
||||
Some of the directories under `src` are external repositories included using
|
||||
git-subtree. See those directories' README files for more details.
|
||||
Copyright © 2015, Ripple Labs. All rights reserved.
|
||||
|
||||
## Additional Documentation
|
||||
|
||||
- [XRP Ledger Dev Portal](https://xrpl.org/)
|
||||
- [Setup and Installation](https://xrpl.org/install-rippled.html)
|
||||
- [Source Documentation (Doxygen)](https://xrplf.github.io/rippled/)
|
||||
|
||||
## See Also
|
||||
|
||||
- [Clio API Server for the XRP Ledger](https://github.com/XRPLF/clio)
|
||||
- [Mailing List for Release Announcements](https://groups.google.com/g/ripple-server)
|
||||
- [Learn more about the XRP Ledger (YouTube)](https://www.youtube.com/playlist?list=PLJQ55Tj1hIVZtJ_JdTvSum2qMTsedWkNi)
|
||||
Portions of this document, including but not limited to the Ripple logo,
|
||||
images and image templates are the property of Ripple Labs and cannot be
|
||||
copied or used without permission.
|
||||
|
||||
2414
RELEASENOTES.md
Normal file
1283
SConstruct
Normal file
149
SECURITY.md
@@ -1,149 +0,0 @@
|
||||
### Operating an XRP Ledger server securely
|
||||
|
||||
For more details on operating an XRP Ledger server securely, please visit https://xrpl.org/manage-the-rippled-server.html.
|
||||
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Software constantly evolves. In order to focus resources, we only generally only accept vulnerability reports that affect recent and current versions of the software. We always accept reports for issues present in the **master**, **release** or **develop** branches, and with proposed, [open pull requests](https://github.com/ripple/rippled/pulls).
|
||||
|
||||
## Identifying and Reporting Vulnerabilities
|
||||
|
||||
We take security seriously and we do our best to ensure that all our releases are bug free. But we aren't perfect and sometimes things will slip through.
|
||||
|
||||
### Responsible Investigation
|
||||
|
||||
We urge you to examine our code carefully and responsibly, and to disclose any issues that you identify in a responsible fashion.
|
||||
|
||||
Responsible investigation includes, but isn't limited to, the following:
|
||||
|
||||
- Not performing tests on the main network. If testing is necessary, use the [Testnet or Devnet](https://xrpl.org/xrp-testnet-faucet.html).
|
||||
- Not targeting physical security measures, or attempting to use social engineering, spam, distributed denial of service (DDOS) attacks, etc.
|
||||
- Investigating bugs in a way that makes a reasonable, good faith effort not to be disruptive or harmful to the XRP Ledger and the broader ecosystem.
|
||||
|
||||
### Responsible Disclosure
|
||||
|
||||
If you discover a vulnerability or potential threat, or if you _think_
|
||||
you have, please reach out by dropping an email using the contact
|
||||
information below.
|
||||
|
||||
Your report should include the following:
|
||||
|
||||
- Your contact information (typically, an email address);
|
||||
- The description of the vulnerability;
|
||||
- The attack scenario (if any);
|
||||
- The steps to reproduce the vulnerability;
|
||||
- Any other relevant details or artifacts, including code, scripts or patches.
|
||||
|
||||
In your email, please describe the issue or potential threat. If possible, include a "repro" (code that can reproduce the issue) or describe the best way to reproduce and replicate the issue. Please make your report as detailed and comprehensive as possible.
|
||||
|
||||
For more information on responsible disclosure, please read this [Wikipedia article](https://en.wikipedia.org/wiki/Responsible_disclosure).
|
||||
|
||||
## Report Handling Process
|
||||
|
||||
Please report the bug directly to us and limit further disclosure. If you want to prove that you knew the bug as of a given time, consider using a cryptographic precommitment: hash the content of your report and publish the hash on a medium of your choice (e.g. on Twitter or as a memo in a transaction) as "proof" that you had written the text at a given point in time.
|
||||
|
||||
Once we receive a report, we:
|
||||
|
||||
1. Assign two people to independently evaluate the report;
|
||||
2. Consider their recommendations;
|
||||
3. If action is necessary, formulate a plan to address the issue;
|
||||
4. Communicate privately with the reporter to explain our plan.
|
||||
5. Prepare, test and release a version which fixes the issue; and
|
||||
6. Announce the vulnerability publicly.
|
||||
|
||||
We will triage and respond to your disclosure within 24 hours. Beyond that, we will work to analyze the issue in more detail, formulate, develop and test a fix.
|
||||
|
||||
While we commit to responding with 24 hours of your initial report with our triage assessment, we cannot guarantee a response time for the remaining steps. We will communicate with you throughout this process, letting you know where we are and keeping you updated on the timeframe.
|
||||
|
||||
## Bug Bounty Program
|
||||
|
||||
[Ripple](https://ripple.com) is generously sponsoring a bug bounty program for vulnerabilities in [`rippled`](https://github.com/XRPLF/rippled) (and other related projects, like [`xrpl.js`](https://github.com/XRPLF/xrpl.js), [`xrpl-py`](https://github.com/XRPLF/xrpl-py), [`xrpl4j`](https://github.com/XRPLF/xrpl4j)).
|
||||
|
||||
This program allows us to recognize and reward individuals or groups that identify and report bugs. In summary, in order to qualify for a bounty, the bug must be:
|
||||
|
||||
1. **In scope**. Only bugs in software under the scope of the program qualify. Currently, that means `rippled`, `xrpl.js`, `xrpl-py`, `xrpl4j`.
|
||||
2. **Relevant**. A security issue, posing a danger to user funds, privacy, or the operation of the XRP Ledger.
|
||||
3. **Original and previously unknown**. Bugs that are already known and discussed in public do not qualify. Previously reported bugs, even if publicly unknown, are not eligible.
|
||||
4. **Specific**. We welcome general security advice or recommendations, but we cannot pay bounties for that.
|
||||
5. **Fixable**. There has to be something we can do to permanently fix the problem. Note that bugs in other people’s software may still qualify in some cases. For example, if you find a bug in a library that we use which can compromise the security of software that is in scope and we can get it fixed, you may qualify for a bounty.
|
||||
6. **Unused**. If you use the exploit to attack the XRP Ledger, you do not qualify for a bounty. If you report a vulnerability used in an ongoing or past attack and there is specific, concrete evidence that suggests you are the attacker we reserve the right not to pay a bounty.
|
||||
|
||||
The amount paid varies dramatically. Vulnerabilities that are harmless on their own, but could form part of a critical exploit will usually receive a bounty. Full-blown exploits can receive much higher bounties. Please don’t hold back partial vulnerabilities while trying to construct a full-blown exploit. We will pay a bounty to anyone who reports a complete chain of vulnerabilities even if they have reported each component of the exploit separately and those vulnerabilities have been fixed in the meantime. However, to qualify for a the full bounty, you must to have been the first to report each of the partial exploits.
|
||||
|
||||
### Contacting Us
|
||||
|
||||
To report a qualifying bug, please send a detailed report to:
|
||||
|
||||
| Email Address | bugs@ripple.com |
|
||||
| :-----------: | :-------------------------------------------------- |
|
||||
| Short Key ID | `0xC57929BE` |
|
||||
| Long Key ID | `0xCD49A0AFC57929BE` |
|
||||
| Fingerprint | `24E6 3B02 37E0 FA9C 5E96 8974 CD49 A0AF C579 29BE` |
|
||||
|
||||
The full PGP key for this address, which is also available on several key servers (e.g. on [keyserver.ubuntu.com](https://keyserver.ubuntu.com)), is:
|
||||
|
||||
```
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
mQINBFUwGHYBEAC0wpGpBPkd8W1UdQjg9+cEFzeIEJRaoZoeuJD8mofwI5Ejnjdt
|
||||
kCpUYEDal0ygkKobu8SzOoATcDl18iCrScX39VpTm96vISFZMhmOryYCIp4QLJNN
|
||||
4HKc2ZdBj6W4igNi6vj5Qo6JMyGpLY2mz4CZskbt0TNuUxWrGood+UrCzpY8x7/N
|
||||
a93fcvNw+prgCr0rCH3hAPmAFfsOBbtGzNnmq7xf3jg5r4Z4sDiNIF1X1y53DAfV
|
||||
rWDx49IKsuCEJfPMp1MnBSvDvLaQ2hKXs+cOpx1BCZgHn3skouEUxxgqbtTzBLt1
|
||||
xXpmuijsaltWngPnGO7mOAzbpZSdBm82/Emrk9bPMuD0QaLQjWr7HkTSUs6ZsKt4
|
||||
7CLPdWqxyY/QVw9UaxeHEtWGQGMIQGgVJGh1fjtUr5O1sC9z9jXcQ0HuIHnRCTls
|
||||
GP7hklJmfH5V4SyAJQ06/hLuEhUJ7dn+BlqCsT0tLmYTgZYNzNcLHcqBFMEZHvHw
|
||||
9GENMx/tDXgajKql4bJnzuTK0iGU/YepanANLd1JHECJ4jzTtmKOus9SOGlB2/l1
|
||||
0t0ADDYAS3eqOdOcUvo9ElSLCI5vSVHhShSte/n2FMWU+kMUboTUisEG8CgQnrng
|
||||
g2CvvQvqDkeOtZeqMcC7HdiZS0q3LJUWtwA/ViwxrVlBDCxiTUXCotyBWwARAQAB
|
||||
tDBSaXBwbGUgTGFicyBCdWcgQm91bnR5IFByb2dyYW0gPGJ1Z3NAcmlwcGxlLmNv
|
||||
bT6JAjcEEwEKACEFAlUwGHYCGwMFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AACgkQ
|
||||
zUmgr8V5Kb6R0g//SwY/mVJY59k87iL26/KayauSoOcz7xjcST26l4ZHVVX85gOY
|
||||
HYZl8k0+m8X3zxeYm9a3QAoAml8sfoaFRFQP8ynnefRrLUPaZ2MjbJ0SACMwZNef
|
||||
T6o7Mi8LBAaiNZdYVyIfX1oM6YXtqYkuJdav6ZCyvVYqc9OvMJPY2ZzJYuI/ZtvQ
|
||||
/lTndxCeg9ALNX/iezOLGdfMpf4HuIFVwcPPlwGi+HDlB9/bggDEHC8z434SXVFc
|
||||
aQatXAPcDkjMUweU7y0CZtYEj00HITd4pSX6MqGiHrxlDZTqinCOPs1Ieqp7qufs
|
||||
MzlM6irLGucxj1+wa16ieyYvEtGaPIsksUKkywx0O7cf8N2qKg+eIkUk6O0Uc6eO
|
||||
CszizmiXIXy4O6OiLlVHGKkXHMSW9Nwe9GE95O8G9WR8OZCEuDv+mHPAutO+IjdP
|
||||
PDAAUvy+3XnkceO+HGWRpVvJZfFP2YH4A33InFL5yqlJmSoR/yVingGLxk55bZDM
|
||||
+HYGR3VeMb8Xj1rf/02qERsZyccMCFdAvKDbTwmvglyHdVLu5sPmktxbBYiemfyJ
|
||||
qxMxmYXCc9S0hWrWZW7edktBa9NpE58z1mx+hRIrDNbS2sDHrib9PULYCySyVYcF
|
||||
P+PWEe1CAS5jqkR2ker5td2/pHNnJIycynBEs7l6zbc9fu+nktFJz0q2B+GJAhwE
|
||||
EAEKAAYFAlUwGaQACgkQ+tiY1qQ2QkjMFw//f2hNY3BPNe+1qbhzumMDCnbTnGif
|
||||
kLuAGl9OKt81VHG1f6RnaGiLpR696+6Ja45KzH15cQ5JJl5Bgs1YkR/noTGX8IAD
|
||||
c70eNwiFu8JXTaaeeJrsmFkF9Tueufb364risYkvPP8tNUD3InBFEZT3WN7JKwix
|
||||
coD4/BwekUwOZVDd/uCFEyhlhZsROxdKNisNo3VtAq2s+3tIBAmTrriFUl0K+ZC5
|
||||
zgavcpnPN57zMtW9aK+VO3wXqAKYLYmtgxkVzSLUZt2M7JuwOaAdyuYWAneKZPCu
|
||||
1AXkmyo+d84sd5mZaKOr5xArAFiNMWPUcZL4rkS1Fq4dKtGAqzzR7a7hWtA5o27T
|
||||
6vynuxZ1n0PPh0er2O/zF4znIjm5RhTlfjp/VmhZdQfpulFEQ/dMxxGkQ9z5IYbX
|
||||
mTlSDbCSb+FMsanRBJ7Drp5EmBIudVGY6SHI5Re1RQiEh7GoDfUMUwZO+TVDII5R
|
||||
Ra7WyuimYleJgDo/+7HyfuIyGDaUCVj6pwVtYtYIdOI3tTw1R1Mr0V8yaNVnJghL
|
||||
CHcEJQL+YHSmiMM3ySil3O6tm1By6lFz8bVe/rgG/5uklQrnjMR37jYboi1orCC4
|
||||
yeIoQeV0ItlxeTyBwYIV/o1DBNxDevTZvJabC93WiGLw2XFjpZ0q/9+zI2rJUZJh
|
||||
qxmKP+D4e27lCI65Ag0EVTAYdgEQAMvttYNqeRNBRpSX8fk45WVIV8Fb21fWdwk6
|
||||
2SkZnJURbiC0LxQnOi7wrtii7DeFZtwM2kFHihS1VHekBnIKKZQSgGoKuFAQMGyu
|
||||
a426H4ZsSmA9Ufd7kRbvdtEcp7/RTAanhrSL4lkBhaKJrXlxBJ27o3nd7/rh7r3a
|
||||
OszbPY6DJ5bWClX3KooPTDl/RF2lHn+fweFk58UvuunHIyo4BWJUdilSXIjLun+P
|
||||
Qaik4ZAsZVwNhdNz05d+vtai4AwbYoO7adboMLRkYaXSQwGytkm+fM6r7OpXHYuS
|
||||
cR4zB/OK5hxCVEpWfiwN71N2NMvnEMaWd/9uhqxJzyvYgkVUXV9274TUe16pzXnW
|
||||
ZLfmitjwc91e7mJBBfKNenDdhaLEIlDRwKTLj7k58f9srpMnyZFacntu5pUMNblB
|
||||
cjXwWxz5ZaQikLnKYhIvrIEwtWPyjqOzNXNvYfZamve/LJ8HmWGCKao3QHoAIDvB
|
||||
9XBxrDyTJDpxbog6Qu4SY8AdgVlan6c/PsLDc7EUegeYiNTzsOK+eq3G5/E92eIu
|
||||
TsUXlciypFcRm1q8vLRr+HYYe2mJDo4GetB1zLkAFBcYJm/x9iJQbu0hn5NxJvZO
|
||||
R0Y5nOJQdyi+muJzKYwhkuzaOlswzqVXkq/7+QCjg7QsycdcwDjiQh3OrsgXHrwl
|
||||
M7gyafL9ABEBAAGJAh8EGAEKAAkFAlUwGHYCGwwACgkQzUmgr8V5Kb50BxAAhj9T
|
||||
TwmNrgRldTHszj+Qc+v8RWqV6j+R+zc0cn5XlUa6XFaXI1OFFg71H4dhCPEiYeN0
|
||||
IrnocyMNvCol+eKIlPKbPTmoixjQ4udPTR1DC1Bx1MyW5FqOrsgBl5t0e1VwEViM
|
||||
NspSStxu5Hsr6oWz2GD48lXZWJOgoL1RLs+uxjcyjySD/em2fOKASwchYmI+ezRv
|
||||
plfhAFIMKTSCN2pgVTEOaaz13M0U+MoprThqF1LWzkGkkC7n/1V1f5tn83BWiagG
|
||||
2N2Q4tHLfyouzMUKnX28kQ9sXfxwmYb2sA9FNIgxy+TdKU2ofLxivoWT8zS189z/
|
||||
Yj9fErmiMjns2FzEDX+bipAw55X4D/RsaFgC+2x2PDbxeQh6JalRA2Wjq32Ouubx
|
||||
u+I4QhEDJIcVwt9x6LPDuos1F+M5QW0AiUhKrZJ17UrxOtaquh/nPUL9T3l2qPUn
|
||||
1ChrZEEEhHO6vA8+jn0+cV9n5xEz30Str9iHnDQ5QyR5LyV4UBPgTdWyQzNVKA69
|
||||
KsSr9lbHEtQFRzGuBKwt6UlSFv9vPWWJkJit5XDKAlcKuGXj0J8OlltToocGElkF
|
||||
+gEBZfoOWi/IBjRLrFW2cT3p36DTR5O1Ud/1DLnWRqgWNBLrbs2/KMKE6EnHttyD
|
||||
7Tz8SQkuxltX/yBXMV3Ddy0t6nWV2SZEfuxJAQI=
|
||||
=spg4
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
```
|
||||
134
appveyor.yml
Normal file
@@ -0,0 +1,134 @@
|
||||
# Set environment variables.
|
||||
environment:
|
||||
PYTHON: C:/Python27-x64
|
||||
|
||||
# We bundle up protoc.exe and only the parts of boost and openssl we need so
|
||||
# that it's a small download. We also use appveyor's free cache, avoiding fees
|
||||
# downloading from S3 each time.
|
||||
# TODO: script to create this package.
|
||||
RIPPLED_DEPS_PATH: rippled_deps15.02
|
||||
RIPPLED_DEPS_URL: https://ripple.github.io/Downloads/appveyor/%RIPPLED_DEPS_PATH%.zip
|
||||
|
||||
# Other dependencies we just download each time.
|
||||
PIP_PATH: get-pip.py
|
||||
PIP_URL: https://bootstrap.pypa.io/%PIP_PATH%
|
||||
# The % in this URL messes up variable substition, so any updates will
|
||||
# need to update both PYWIN32_PATH and PYWIN32_URL
|
||||
PYWIN32_PATH: pywin32-220.win-amd64-py2.7.exe
|
||||
PYWIN32_URL: https://downloads.sourceforge.net/project/pywin32/pywin32/Build%20220/pywin32-220.win-amd64-py2.7.exe
|
||||
|
||||
# Scons honours these environment variables, setting the include/lib paths.
|
||||
BOOST_ROOT: C:/%RIPPLED_DEPS_PATH%/boost
|
||||
OPENSSL_ROOT: C:/%RIPPLED_DEPS_PATH%/openssl
|
||||
|
||||
matrix:
|
||||
# This build works, but our current Appveyor config runs matrix builds
|
||||
# sequentially, and the one build is already slow enough.
|
||||
# - build: scons
|
||||
# target: msvc.debug
|
||||
- build: cmake
|
||||
target: msvc.debug
|
||||
buildconfig: Debug
|
||||
|
||||
os: Visual Studio 2015
|
||||
|
||||
# At the end of each successful build we cache this directory.
|
||||
# https://www.appveyor.com/docs/build-cache/
|
||||
# Resulting archive should not exceed 100 MB.
|
||||
cache:
|
||||
- 'C:\%RIPPLED_DEPS_PATH%'
|
||||
- '%PIP_PATH%'
|
||||
- '%PYWIN32_PATH%'
|
||||
|
||||
# This means we'll download a zip of the branch we want, rather than the full
|
||||
# history.
|
||||
shallow_clone: true
|
||||
|
||||
install:
|
||||
# We want easy_install, python and protoc.exe on PATH.
|
||||
- SET PATH=%PYTHON%;%PYTHON%/Scripts;C:/%RIPPLED_DEPS_PATH%;%PATH%
|
||||
|
||||
# `ps` prefix means the command is executed by powershell.
|
||||
- ps: |
|
||||
if ($env:build -eq "scons") {
|
||||
if(-not(Test-Path $env:PIP_PATH)) {
|
||||
echo "Download from $env:PIP_URL"
|
||||
Start-FileDownload $env:PIP_URL
|
||||
}
|
||||
if(-not(Test-Path $env:PYWIN32_PATH)) {
|
||||
echo "Download from $env:PYWIN32_URL"
|
||||
Start-FileDownload $env:PYWIN32_URL
|
||||
}
|
||||
}
|
||||
- bin/ci/windows/install-dependencies.bat
|
||||
|
||||
# Download dependencies if appveyor didn't restore them from the cache.
|
||||
# Use 7zip to unzip.
|
||||
- ps: |
|
||||
if (-not(Test-Path 'C:/$env:RIPPLED_DEPS_PATH')) {
|
||||
echo "Download from $env:RIPPLED_DEPS_URL"
|
||||
Start-FileDownload "$env:RIPPLED_DEPS_URL"
|
||||
7z x "$($env:RIPPLED_DEPS_PATH).zip" -oC:\ -y > $null
|
||||
if ($LastExitCode -ne 0) { throw "7z failed" }
|
||||
}
|
||||
|
||||
# Newer DEPS include a versions file.
|
||||
# Dump it so we can verify correct behavior.
|
||||
- ps: |
|
||||
if (Test-Path "C:/$env:RIPPLED_DEPS_PATH/versions.txt") {
|
||||
cat "C:/$env:RIPPLED_DEPS_PATH/versions.txt"
|
||||
}
|
||||
|
||||
# TODO: This is giving me grief
|
||||
# artifacts:
|
||||
# # Save rippled.exe in the cloud after each build.
|
||||
# - path: "build\\rippled.exe"
|
||||
|
||||
build_script:
|
||||
# We set the environment variables needed to put compilers on the PATH.
|
||||
- '"%VS140COMNTOOLS%../../VC/vcvarsall.bat" x86_amd64'
|
||||
# Show which version of the compiler we are using.
|
||||
- cl
|
||||
- ps: |
|
||||
if ($env:build -eq "scons") {
|
||||
# Build with scons
|
||||
scons $env:target -j%NUMBER_OF_PROCESSORS%
|
||||
if ($LastExitCode -ne 0) { throw "scons build failed" }
|
||||
}
|
||||
else
|
||||
{
|
||||
# Build with cmake
|
||||
cmake --version
|
||||
$cmake_target="$($env:target).ci"
|
||||
"$cmake_target"
|
||||
New-Item -ItemType Directory -Force -Path "build/$cmake_target"
|
||||
Push-Location "build/$cmake_target"
|
||||
cmake -G"Visual Studio 14 2015 Win64" -Dtarget="$cmake_target" ../..
|
||||
if ($LastExitCode -ne 0) { throw "CMake failed" }
|
||||
cmake --build . --config $env:buildconfig -- -m
|
||||
if ($LastExitCode -ne 0) { throw "CMake build failed" }
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
after_build:
|
||||
- ps: |
|
||||
if ($env:build -eq "scons") {
|
||||
cp build/$($env:target)/rippled.exe build
|
||||
ls build
|
||||
$exe="build/rippled"
|
||||
}
|
||||
else
|
||||
{
|
||||
$exe="build/$cmake_target/$env:buildconfig/rippled"
|
||||
}
|
||||
"Exe is at $exe"
|
||||
|
||||
test_script:
|
||||
- ps: |
|
||||
& {
|
||||
# Run the rippled unit tests
|
||||
& $exe --unittest
|
||||
# https://connect.microsoft.com/PowerShell/feedback/details/751703/option-to-stop-script-if-command-line-exe-fails
|
||||
if ($LastExitCode -ne 0) { throw "Unit tests failed" }
|
||||
}
|
||||
|
||||
470
bin/browser.js
Executable file
@@ -0,0 +1,470 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// ledger?l=L
|
||||
// transaction?h=H
|
||||
// ledger_entry?l=L&h=H
|
||||
// account?l=L&a=A
|
||||
// directory?l=L&dir_root=H&i=I
|
||||
// directory?l=L&o=A&i=I // owner directory
|
||||
// offer?l=L&offer=H
|
||||
// offer?l=L&account=A&i=I
|
||||
// ripple_state=l=L&a=A&b=A&c=C
|
||||
// account_lines?l=L&a=A
|
||||
//
|
||||
// A=address
|
||||
// C=currency 3 letter code
|
||||
// H=hash
|
||||
// I=index
|
||||
// L=current | closed | validated | index | hash
|
||||
//
|
||||
|
||||
var async = require("async");
|
||||
var extend = require("extend");
|
||||
var http = require("http");
|
||||
var url = require("url");
|
||||
|
||||
var Remote = require("ripple-lib").Remote;
|
||||
|
||||
var program = process.argv[1];
|
||||
|
||||
var httpd_response = function (res, opts) {
|
||||
var self=this;
|
||||
|
||||
res.statusCode = opts.statusCode;
|
||||
res.end(
|
||||
"<HTML>"
|
||||
+ "<HEAD><TITLE>Title</TITLE></HEAD>"
|
||||
+ "<BODY BACKGROUND=\"#FFFFFF\">"
|
||||
+ "State:" + self.state
|
||||
+ "<UL>"
|
||||
+ "<LI><A HREF=\"/\">home</A>"
|
||||
+ "<LI>" + html_link('r4EM4gBQfr1QgQLXSPF4r7h84qE9mb6iCC')
|
||||
// + "<LI><A HREF=\""+test+"\">rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh</A>"
|
||||
+ "<LI><A HREF=\"/ledger\">ledger</A>"
|
||||
+ "</UL>"
|
||||
+ (opts.body || '')
|
||||
+ '<HR><PRE>'
|
||||
+ (opts.url || '')
|
||||
+ '</PRE>'
|
||||
+ "</BODY>"
|
||||
+ "</HTML>"
|
||||
);
|
||||
};
|
||||
|
||||
var html_link = function (generic) {
|
||||
return '<A HREF="' + build_uri({ type: 'account', account: generic}) + '">' + generic + '</A>';
|
||||
};
|
||||
|
||||
// Build a link to a type.
|
||||
var build_uri = function (params, opts) {
|
||||
var c;
|
||||
|
||||
if (params.type === 'account') {
|
||||
c = {
|
||||
pathname: 'account',
|
||||
query: {
|
||||
l: params.ledger,
|
||||
a: params.account,
|
||||
},
|
||||
};
|
||||
|
||||
} else if (params.type === 'ledger') {
|
||||
c = {
|
||||
pathname: 'ledger',
|
||||
query: {
|
||||
l: params.ledger,
|
||||
},
|
||||
};
|
||||
|
||||
} else if (params.type === 'transaction') {
|
||||
c = {
|
||||
pathname: 'transaction',
|
||||
query: {
|
||||
h: params.hash,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
c = {};
|
||||
}
|
||||
|
||||
opts = opts || {};
|
||||
|
||||
c.protocol = "http";
|
||||
c.hostname = opts.hostname || self.base.hostname;
|
||||
c.port = opts.port || self.base.port;
|
||||
|
||||
return url.format(c);
|
||||
};
|
||||
|
||||
var build_link = function (item, link) {
|
||||
console.log(link);
|
||||
return "<A HREF=" + link + ">" + item + "</A>";
|
||||
};
|
||||
|
||||
var rewrite_field = function (type, obj, field, opts) {
|
||||
if (field in obj) {
|
||||
obj[field] = rewrite_type(type, obj[field], opts);
|
||||
}
|
||||
};
|
||||
|
||||
var rewrite_type = function (type, obj, opts) {
|
||||
if ('amount' === type) {
|
||||
if ('string' === typeof obj) {
|
||||
// XRP.
|
||||
return '<B>' + obj + '</B>';
|
||||
|
||||
} else {
|
||||
rewrite_field('address', obj, 'issuer', opts);
|
||||
|
||||
return obj;
|
||||
}
|
||||
return build_link(
|
||||
obj,
|
||||
build_uri({
|
||||
type: 'account',
|
||||
account: obj
|
||||
}, opts)
|
||||
);
|
||||
}
|
||||
if ('address' === type) {
|
||||
return build_link(
|
||||
obj,
|
||||
build_uri({
|
||||
type: 'account',
|
||||
account: obj
|
||||
}, opts)
|
||||
);
|
||||
}
|
||||
else if ('ledger' === type) {
|
||||
return build_link(
|
||||
obj,
|
||||
build_uri({
|
||||
type: 'ledger',
|
||||
ledger: obj,
|
||||
}, opts)
|
||||
);
|
||||
}
|
||||
else if ('node' === type) {
|
||||
// A node
|
||||
if ('PreviousTxnID' in obj)
|
||||
obj.PreviousTxnID = rewrite_type('transaction', obj.PreviousTxnID, opts);
|
||||
|
||||
if ('Offer' === obj.LedgerEntryType) {
|
||||
if ('NewFields' in obj) {
|
||||
if ('TakerGets' in obj.NewFields)
|
||||
obj.NewFields.TakerGets = rewrite_type('amount', obj.NewFields.TakerGets, opts);
|
||||
|
||||
if ('TakerPays' in obj.NewFields)
|
||||
obj.NewFields.TakerPays = rewrite_type('amount', obj.NewFields.TakerPays, opts);
|
||||
}
|
||||
}
|
||||
|
||||
obj.LedgerEntryType = '<B>' + obj.LedgerEntryType + '</B>';
|
||||
|
||||
return obj;
|
||||
}
|
||||
else if ('transaction' === type) {
|
||||
// Reference to a transaction.
|
||||
return build_link(
|
||||
obj,
|
||||
build_uri({
|
||||
type: 'transaction',
|
||||
hash: obj
|
||||
}, opts)
|
||||
);
|
||||
}
|
||||
|
||||
return 'ERROR: ' + type;
|
||||
};
|
||||
|
||||
var rewrite_object = function (obj, opts) {
|
||||
var out = extend({}, obj);
|
||||
|
||||
rewrite_field('address', out, 'Account', opts);
|
||||
|
||||
rewrite_field('ledger', out, 'parent_hash', opts);
|
||||
rewrite_field('ledger', out, 'ledger_index', opts);
|
||||
rewrite_field('ledger', out, 'ledger_current_index', opts);
|
||||
rewrite_field('ledger', out, 'ledger_hash', opts);
|
||||
|
||||
if ('ledger' in obj) {
|
||||
// It's a ledger header.
|
||||
out.ledger = rewrite_object(out.ledger, opts);
|
||||
|
||||
if ('ledger_hash' in out.ledger)
|
||||
out.ledger.ledger_hash = '<B>' + out.ledger.ledger_hash + '</B>';
|
||||
|
||||
delete out.ledger.hash;
|
||||
delete out.ledger.totalCoins;
|
||||
}
|
||||
|
||||
if ('TransactionType' in obj) {
|
||||
// It's a transaction.
|
||||
out.TransactionType = '<B>' + obj.TransactionType + '</B>';
|
||||
|
||||
rewrite_field('amount', out, 'TakerGets', opts);
|
||||
rewrite_field('amount', out, 'TakerPays', opts);
|
||||
rewrite_field('ledger', out, 'inLedger', opts);
|
||||
|
||||
out.meta.AffectedNodes = out.meta.AffectedNodes.map(function (node) {
|
||||
var kind = 'CreatedNode' in node
|
||||
? 'CreatedNode'
|
||||
: 'ModifiedNode' in node
|
||||
? 'ModifiedNode'
|
||||
: 'DeletedNode' in node
|
||||
? 'DeletedNode'
|
||||
: undefined;
|
||||
|
||||
if (kind) {
|
||||
node[kind] = rewrite_type('node', node[kind], opts);
|
||||
}
|
||||
return node;
|
||||
});
|
||||
}
|
||||
else if ('node' in obj && 'LedgerEntryType' in obj.node) {
|
||||
// Its a ledger entry.
|
||||
|
||||
if (obj.node.LedgerEntryType === 'AccountRoot') {
|
||||
rewrite_field('address', out.node, 'Account', opts);
|
||||
rewrite_field('transaction', out.node, 'PreviousTxnID', opts);
|
||||
rewrite_field('ledger', out.node, 'PreviousTxnLgrSeq', opts);
|
||||
}
|
||||
|
||||
out.node.LedgerEntryType = '<B>' + out.node.LedgerEntryType + '</B>';
|
||||
}
|
||||
|
||||
return out;
|
||||
};
|
||||
|
||||
var augment_object = function (obj, opts, done) {
|
||||
if (obj.node.LedgerEntryType == 'AccountRoot') {
|
||||
var tx_hash = obj.node.PreviousTxnID;
|
||||
var tx_ledger = obj.node.PreviousTxnLgrSeq;
|
||||
|
||||
obj.history = [];
|
||||
|
||||
async.whilst(
|
||||
function () { return tx_hash; },
|
||||
function (callback) {
|
||||
// console.log("augment_object: request: %s %s", tx_hash, tx_ledger);
|
||||
opts.remote.request_tx(tx_hash)
|
||||
.on('success', function (m) {
|
||||
tx_hash = undefined;
|
||||
tx_ledger = undefined;
|
||||
|
||||
//console.log("augment_object: ", JSON.stringify(m));
|
||||
m.meta.AffectedNodes.filter(function(n) {
|
||||
// console.log("augment_object: ", JSON.stringify(n));
|
||||
// if (n.ModifiedNode)
|
||||
// console.log("augment_object: %s %s %s %s %s %s/%s", 'ModifiedNode' in n, n.ModifiedNode && (n.ModifiedNode.LedgerEntryType === 'AccountRoot'), n.ModifiedNode && n.ModifiedNode.FinalFields && (n.ModifiedNode.FinalFields.Account === obj.node.Account), Object.keys(n)[0], n.ModifiedNode && (n.ModifiedNode.LedgerEntryType), obj.node.Account, n.ModifiedNode && n.ModifiedNode.FinalFields && n.ModifiedNode.FinalFields.Account);
|
||||
// if ('ModifiedNode' in n && n.ModifiedNode.LedgerEntryType === 'AccountRoot')
|
||||
// {
|
||||
// console.log("***: ", JSON.stringify(m));
|
||||
// console.log("***: ", JSON.stringify(n));
|
||||
// }
|
||||
return 'ModifiedNode' in n
|
||||
&& n.ModifiedNode.LedgerEntryType === 'AccountRoot'
|
||||
&& n.ModifiedNode.FinalFields
|
||||
&& n.ModifiedNode.FinalFields.Account === obj.node.Account;
|
||||
})
|
||||
.forEach(function (n) {
|
||||
tx_hash = n.ModifiedNode.PreviousTxnID;
|
||||
tx_ledger = n.ModifiedNode.PreviousTxnLgrSeq;
|
||||
|
||||
obj.history.push({
|
||||
tx_hash: tx_hash,
|
||||
tx_ledger: tx_ledger
|
||||
});
|
||||
console.log("augment_object: next: %s %s", tx_hash, tx_ledger);
|
||||
});
|
||||
|
||||
callback();
|
||||
})
|
||||
.on('error', function (m) {
|
||||
callback(m);
|
||||
})
|
||||
.request();
|
||||
},
|
||||
function (err) {
|
||||
if (err) {
|
||||
done();
|
||||
}
|
||||
else {
|
||||
async.forEach(obj.history, function (o, callback) {
|
||||
opts.remote.request_account_info(obj.node.Account)
|
||||
.ledger_index(o.tx_ledger)
|
||||
.on('success', function (m) {
|
||||
//console.log("augment_object: ", JSON.stringify(m));
|
||||
o.Balance = m.account_data.Balance;
|
||||
// o.account_data = m.account_data;
|
||||
callback();
|
||||
})
|
||||
.on('error', function (m) {
|
||||
o.error = m;
|
||||
callback();
|
||||
})
|
||||
.request();
|
||||
},
|
||||
function (err) {
|
||||
done(err);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
done();
|
||||
}
|
||||
};
|
||||
|
||||
if (process.argv.length < 4 || process.argv.length > 7) {
|
||||
console.log("Usage: %s ws_ip ws_port [<ip> [<port> [<start>]]]", program);
|
||||
}
|
||||
else {
|
||||
var ws_ip = process.argv[2];
|
||||
var ws_port = process.argv[3];
|
||||
var ip = process.argv.length > 4 ? process.argv[4] : "127.0.0.1";
|
||||
var port = process.argv.length > 5 ? process.argv[5] : "8080";
|
||||
|
||||
// console.log("START");
|
||||
var self = this;
|
||||
|
||||
var remote = (new Remote({
|
||||
websocket_ip: ws_ip,
|
||||
websocket_port: ws_port,
|
||||
trace: false
|
||||
}))
|
||||
.on('state', function (m) {
|
||||
console.log("STATE: %s", m);
|
||||
|
||||
self.state = m;
|
||||
})
|
||||
// .once('ledger_closed', callback)
|
||||
.connect()
|
||||
;
|
||||
|
||||
self.base = {
|
||||
hostname: ip,
|
||||
port: port,
|
||||
remote: remote,
|
||||
};
|
||||
|
||||
// console.log("SERVE");
|
||||
var server = http.createServer(function (req, res) {
|
||||
var input = "";
|
||||
|
||||
req.setEncoding();
|
||||
|
||||
req.on('data', function (buffer) {
|
||||
// console.log("DATA: %s", buffer);
|
||||
input = input + buffer;
|
||||
});
|
||||
|
||||
req.on('end', function () {
|
||||
// console.log("URL: %s", req.url);
|
||||
// console.log("HEADERS: %s", JSON.stringify(req.headers, undefined, 2));
|
||||
|
||||
var _parsed = url.parse(req.url, true);
|
||||
var _url = JSON.stringify(_parsed, undefined, 2);
|
||||
|
||||
// console.log("HEADERS: %s", JSON.stringify(_parsed, undefined, 2));
|
||||
if (_parsed.pathname === "/account") {
|
||||
var request = remote
|
||||
.request_ledger_entry('account_root')
|
||||
.ledger_index(-1)
|
||||
.account_root(_parsed.query.a)
|
||||
.on('success', function (m) {
|
||||
// console.log("account_root: %s", JSON.stringify(m, undefined, 2));
|
||||
|
||||
augment_object(m, self.base, function() {
|
||||
httpd_response(res,
|
||||
{
|
||||
statusCode: 200,
|
||||
url: _url,
|
||||
body: "<PRE>"
|
||||
+ JSON.stringify(rewrite_object(m, self.base), undefined, 2)
|
||||
+ "</PRE>"
|
||||
});
|
||||
});
|
||||
})
|
||||
.request();
|
||||
|
||||
} else if (_parsed.pathname === "/ledger") {
|
||||
var request = remote
|
||||
.request_ledger(undefined, { expand: true, transactions: true })
|
||||
.on('success', function (m) {
|
||||
// console.log("Ledger: %s", JSON.stringify(m, undefined, 2));
|
||||
|
||||
httpd_response(res,
|
||||
{
|
||||
statusCode: 200,
|
||||
url: _url,
|
||||
body: "<PRE>"
|
||||
+ JSON.stringify(rewrite_object(m, self.base), undefined, 2)
|
||||
+"</PRE>"
|
||||
});
|
||||
})
|
||||
|
||||
if (_parsed.query.l && _parsed.query.l.length === 64) {
|
||||
request.ledger_hash(_parsed.query.l);
|
||||
}
|
||||
else if (_parsed.query.l) {
|
||||
request.ledger_index(Number(_parsed.query.l));
|
||||
}
|
||||
else {
|
||||
request.ledger_index(-1);
|
||||
}
|
||||
|
||||
request.request();
|
||||
|
||||
} else if (_parsed.pathname === "/transaction") {
|
||||
var request = remote
|
||||
.request_tx(_parsed.query.h)
|
||||
// .request_transaction_entry(_parsed.query.h)
|
||||
// .ledger_select(_parsed.query.l)
|
||||
.on('success', function (m) {
|
||||
// console.log("transaction: %s", JSON.stringify(m, undefined, 2));
|
||||
|
||||
httpd_response(res,
|
||||
{
|
||||
statusCode: 200,
|
||||
url: _url,
|
||||
body: "<PRE>"
|
||||
+ JSON.stringify(rewrite_object(m, self.base), undefined, 2)
|
||||
+"</PRE>"
|
||||
});
|
||||
})
|
||||
.on('error', function (m) {
|
||||
httpd_response(res,
|
||||
{
|
||||
statusCode: 200,
|
||||
url: _url,
|
||||
body: "<PRE>"
|
||||
+ 'ERROR: ' + JSON.stringify(m, undefined, 2)
|
||||
+"</PRE>"
|
||||
});
|
||||
})
|
||||
.request();
|
||||
|
||||
} else {
|
||||
var test = build_uri({
|
||||
type: 'account',
|
||||
ledger: 'closed',
|
||||
account: 'rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh',
|
||||
}, self.base);
|
||||
|
||||
httpd_response(res,
|
||||
{
|
||||
statusCode: req.url === "/" ? 200 : 404,
|
||||
url: _url,
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
server.listen(port, ip, undefined,
|
||||
function () {
|
||||
console.log("Listening at: http://%s:%s", ip, port);
|
||||
});
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
92
bin/ci/ubuntu/build-and-test.sh
Executable file
@@ -0,0 +1,92 @@
|
||||
#!/bin/bash -u
|
||||
# We use set -e and bash with -u to bail on first non zero exit code of any
|
||||
# processes launched or upon any unbound variable.
|
||||
# We use set -x to print commands before running them to help with
|
||||
# debugging.
|
||||
set -ex
|
||||
__dirname=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||
echo "using CC: $CC"
|
||||
echo "using TARGET: $TARGET"
|
||||
|
||||
# Ensure APP defaults to rippled if it's not set.
|
||||
: ${APP:=rippled}
|
||||
if [[ ${BUILD:-scons} == "cmake" ]]; then
|
||||
echo "cmake building ${APP}"
|
||||
CMAKE_TARGET=$CC.$TARGET
|
||||
if [[ ${CI:-} == true ]]; then
|
||||
CMAKE_TARGET=$CMAKE_TARGET.ci
|
||||
fi
|
||||
mkdir -p "build/${CMAKE_TARGET}"
|
||||
pushd "build/${CMAKE_TARGET}"
|
||||
cmake ../.. -Dtarget=$CMAKE_TARGET
|
||||
cmake --build . -- -j${NUM_PROCESSORS:-2}
|
||||
popd
|
||||
export APP_PATH="$PWD/build/${CMAKE_TARGET}/${APP}"
|
||||
echo "using APP_PATH: $APP_PATH"
|
||||
|
||||
else
|
||||
export APP_PATH="$PWD/build/$CC.$TARGET/${APP}"
|
||||
echo "using APP_PATH: $APP_PATH"
|
||||
# Make sure vcxproj is up to date
|
||||
scons vcxproj
|
||||
git diff --exit-code
|
||||
# $CC will be either `clang` or `gcc`
|
||||
# http://docs.travis-ci.com/user/migrating-from-legacy/?utm_source=legacy-notice&utm_medium=banner&utm_campaign=legacy-upgrade
|
||||
# indicates that 2 cores are available to containers.
|
||||
scons -j${NUM_PROCESSORS:-2} $CC.$TARGET
|
||||
fi
|
||||
# We can be sure we're using the build/$CC.$TARGET variant
|
||||
# (-f so never err)
|
||||
rm -f build/${APP}
|
||||
|
||||
# See what we've actually built
|
||||
ldd $APP_PATH
|
||||
|
||||
if [[ ${APP} == "rippled" ]]; then
|
||||
export APP_ARGS="--unittest"
|
||||
# Only report on src/ripple files
|
||||
export LCOV_FILES="*/src/ripple/*"
|
||||
# Nothing to explicitly exclude
|
||||
export LCOV_EXCLUDE_FILES="LCOV_NO_EXCLUDE"
|
||||
else
|
||||
: ${APP_ARGS:=}
|
||||
: ${LCOV_FILES:="*/src/*"}
|
||||
# Don't exclude anything
|
||||
: ${LCOV_EXCLUDE_FILES:="LCOV_NO_EXCLUDE"}
|
||||
fi
|
||||
|
||||
if [[ $TARGET == "coverage" ]]; then
|
||||
export PATH=$PATH:$LCOV_ROOT/usr/bin
|
||||
|
||||
# Create baseline coverage data file
|
||||
lcov --no-external -c -i -d . -o baseline.info
|
||||
fi
|
||||
|
||||
# Execute unit tests under gdb, printing a call stack
|
||||
# if we get a crash.
|
||||
gdb -return-child-result -quiet -batch \
|
||||
-ex "set env MALLOC_CHECK_=3" \
|
||||
-ex "set print thread-events off" \
|
||||
-ex run \
|
||||
-ex "thread apply all backtrace full" \
|
||||
-ex "quit" \
|
||||
--args $APP_PATH --unittest
|
||||
|
||||
if [[ $TARGET == "coverage" ]]; then
|
||||
# Create test coverage data file
|
||||
lcov --no-external -c -d . -o tests.info
|
||||
|
||||
# Combine baseline and test coverage data
|
||||
lcov -a baseline.info -a tests.info -o lcov-all.info
|
||||
|
||||
# Included files
|
||||
lcov -e "lcov-all.info" "${LCOV_FILES}" -o lcov.pre.info
|
||||
|
||||
# Excluded files
|
||||
lcov --remove lcov.pre.info "${LCOV_EXCLUDE_FILES}" -o lcov.info
|
||||
|
||||
# Push the results (lcov.info) to codecov
|
||||
codecov -X gcov # don't even try and look for .gcov files ;)
|
||||
fi
|
||||
|
||||
|
||||
67
bin/ci/ubuntu/install-dependencies.sh
Executable file
@@ -0,0 +1,67 @@
|
||||
#!/bin/bash -u
|
||||
# Exit if anything fails. Echo commands to aid debugging.
|
||||
set -ex
|
||||
|
||||
# Target working dir - defaults to current dir.
|
||||
# Can be set from caller, or in the first parameter
|
||||
TWD=$( cd ${TWD:-${1:-${PWD:-$( pwd )}}}; pwd )
|
||||
echo "Target path is: $TWD"
|
||||
# Override gcc version to $GCC_VER.
|
||||
# Put an appropriate symlink at the front of the path.
|
||||
mkdir -v $HOME/bin
|
||||
for g in gcc g++ gcov gcc-ar gcc-nm gcc-ranlib
|
||||
do
|
||||
test -x $( type -p ${g}-$GCC_VER )
|
||||
ln -sv $(type -p ${g}-$GCC_VER) $HOME/bin/${g}
|
||||
done
|
||||
|
||||
if [[ -n ${CLANG_VER:-} ]]; then
|
||||
# There are cases where the directory exists, but the exe is not available.
|
||||
# Use this workaround for now.
|
||||
if [[ ! -x ${TWD}/llvm-${LLVM_VERSION}/bin/llvm-config && -d ${TWD}/llvm-${LLVM_VERSION} ]]; then
|
||||
rm -fr ${TWD}/llvm-${LLVM_VERSION}
|
||||
fi
|
||||
if [[ ! -d ${TWD}/llvm-${LLVM_VERSION} ]]; then
|
||||
mkdir ${TWD}/llvm-${LLVM_VERSION}
|
||||
LLVM_URL="http://llvm.org/releases/${LLVM_VERSION}/clang+llvm-${LLVM_VERSION}-x86_64-linux-gnu-ubuntu-14.04.tar.xz"
|
||||
wget -O - ${LLVM_URL} | tar -Jxvf - --strip 1 -C ${TWD}/llvm-${LLVM_VERSION}
|
||||
fi
|
||||
${TWD}/llvm-${LLVM_VERSION}/bin/llvm-config --version;
|
||||
export LLVM_CONFIG="${TWD}/llvm-${LLVM_VERSION}/bin/llvm-config";
|
||||
fi
|
||||
|
||||
if [[ ${BUILD:-} == cmake ]]; then
|
||||
# There are cases where the directory exists, but the exe is not available.
|
||||
# Use this workaround for now.
|
||||
if [[ ! -x ${TWD}/cmake/bin/cmake && -d ${TWD}/cmake ]]; then
|
||||
rm -fr ${TWD}/cmake
|
||||
fi
|
||||
if [[ ! -d ${TWD}/cmake ]]; then
|
||||
CMAKE_URL="https://www.cmake.org/files/v3.6/cmake-3.6.1-Linux-x86_64.tar.gz"
|
||||
wget --version
|
||||
# wget version 1.13.4 thinks this certificate is invalid, even though it's fine.
|
||||
# "ERROR: no certificate subject alternative name matches"
|
||||
# See also: https://github.com/travis-ci/travis-ci/issues/5059
|
||||
mkdir ${TWD}/cmake &&
|
||||
wget -O - --no-check-certificate ${CMAKE_URL} | tar --strip-components=1 -xz -C ${TWD}/cmake
|
||||
cmake --version
|
||||
fi
|
||||
fi
|
||||
|
||||
# What versions are we ACTUALLY running?
|
||||
if [ -x $HOME/bin/g++ ]; then
|
||||
$HOME/bin/g++ -v
|
||||
fi
|
||||
|
||||
pip install --user https://github.com/codecov/codecov-python/archive/master.zip
|
||||
|
||||
bash bin/sh/install-boost.sh
|
||||
|
||||
# Install lcov
|
||||
# Download the archive
|
||||
wget https://github.com/linux-test-project/lcov/releases/download/v1.12/lcov-1.12.tar.gz
|
||||
# Extract to ~/lcov-1.12
|
||||
tar xfvz lcov-1.12.tar.gz -C $HOME
|
||||
# Set install path
|
||||
mkdir -p $LCOV_ROOT
|
||||
cd $HOME/lcov-1.12 && make install PREFIX=$LCOV_ROOT
|
||||
13
bin/ci/windows/install-dependencies.bat
Normal file
@@ -0,0 +1,13 @@
|
||||
if "%build%" == "scons" (
|
||||
rem Installing pip will install setuptools/easy_install.
|
||||
python "%PIP_PATH%"
|
||||
|
||||
rem Pip has some problems installing scons on windows so we use easy install.
|
||||
rem - easy_install scons
|
||||
rem Workaround
|
||||
easy_install https://pypi.python.org/packages/source/S/SCons/scons-2.5.0.tar.gz#md5=bda5530a70a41a7831d83c8b191c021e
|
||||
|
||||
rem Scons has problems with parallel builds on windows without pywin32.
|
||||
easy_install "%PYWIN32_PATH%"
|
||||
rem (easy_install can do headless installs of .exe wizards)
|
||||
)
|
||||
64
bin/debug_local_sign.js
Normal file
@@ -0,0 +1,64 @@
|
||||
var ripple = require('ripple-lib');
|
||||
|
||||
var v = {
|
||||
seed: "snoPBrXtMeMyMHUVTgbuqAfg1SUTb",
|
||||
addr: "rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh"
|
||||
};
|
||||
|
||||
var remote = ripple.Remote.from_config({
|
||||
"trusted" : true,
|
||||
"websocket_ip" : "127.0.0.1",
|
||||
"websocket_port" : 5006,
|
||||
"websocket_ssl" : false,
|
||||
"local_signing" : true
|
||||
});
|
||||
|
||||
var tx_json = {
|
||||
"Account" : v.addr,
|
||||
"Amount" : "10000000",
|
||||
"Destination" : "rEu2ULPiEQm1BAL8pYzmXnNX1aFX9sCks",
|
||||
"Fee" : "10",
|
||||
"Flags" : 0,
|
||||
"Sequence" : 3,
|
||||
"TransactionType" : "Payment"
|
||||
|
||||
//"SigningPubKey": '0396941B22791A448E5877A44CE98434DB217D6FB97D63F0DAD23BE49ED45173C9'
|
||||
};
|
||||
|
||||
remote.on('connected', function () {
|
||||
var req = remote.request_sign(v.seed, tx_json);
|
||||
req.message.debug_signing = true;
|
||||
req.on('success', function (result) {
|
||||
console.log("SERVER RESULT");
|
||||
console.log(result);
|
||||
|
||||
var sim = {};
|
||||
var tx = remote.transaction();
|
||||
tx.tx_json = tx_json;
|
||||
tx._secret = v.seed;
|
||||
tx.complete();
|
||||
var unsigned = tx.serialize().to_hex();
|
||||
tx.sign();
|
||||
|
||||
sim.tx_blob = tx.serialize().to_hex();
|
||||
sim.tx_json = tx.tx_json;
|
||||
sim.tx_signing_hash = tx.signing_hash().to_hex();
|
||||
sim.tx_unsigned = unsigned;
|
||||
|
||||
console.log("\nLOCAL RESULT");
|
||||
console.log(sim);
|
||||
|
||||
remote.connect(false);
|
||||
});
|
||||
req.on('error', function (err) {
|
||||
if (err.error === "remoteError" && err.remote.error === "srcActNotFound") {
|
||||
console.log("Please fund account "+v.addr+" to run this test.");
|
||||
} else {
|
||||
console.log('error', err);
|
||||
}
|
||||
remote.connect(false);
|
||||
});
|
||||
req.request();
|
||||
|
||||
});
|
||||
remote.connect();
|
||||
18
bin/email_hash.js
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// Returns a Gravatar style hash as per: http://en.gravatar.com/site/implement/hash/
|
||||
//
|
||||
|
||||
if (3 != process.argv.length) {
|
||||
process.stderr.write("Usage: " + process.argv[1] + " email_address\n\nReturns gravatar style hash.\n");
|
||||
process.exit(1);
|
||||
|
||||
} else {
|
||||
var md5 = require('crypto').createHash('md5');
|
||||
|
||||
md5.update(process.argv[2].trim().toLowerCase());
|
||||
|
||||
process.stdout.write(md5.digest('hex') + "\n");
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
31
bin/flash_policy.js
Executable file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// This program allows IE 9 ripple-clients to make websocket connections to
|
||||
// rippled using flash. As IE 9 does not have websocket support, this required
|
||||
// if you wish to support IE 9 ripple-clients.
|
||||
//
|
||||
// http://www.lightsphere.com/dev/articles/flash_socket_policy.html
|
||||
//
|
||||
// For better security, be sure to set the Port below to the port of your
|
||||
// [websocket_public_port].
|
||||
//
|
||||
|
||||
var net = require("net"),
|
||||
port = "*",
|
||||
domains = ["*:"+port]; // Domain:Port
|
||||
|
||||
net.createServer(
|
||||
function(socket) {
|
||||
socket.write("<?xml version='1.0' ?>\n");
|
||||
socket.write("<!DOCTYPE cross-domain-policy SYSTEM 'http://www.macromedia.com/xml/dtds/cross-domain-policy.dtd'>\n");
|
||||
socket.write("<cross-domain-policy>\n");
|
||||
domains.forEach(
|
||||
function(domain) {
|
||||
var parts = domain.split(':');
|
||||
socket.write("\t<allow-access-from domain='" + parts[0] + "' to-ports='" + parts[1] + "' />\n");
|
||||
}
|
||||
);
|
||||
socket.write("</cross-domain-policy>\n");
|
||||
socket.end();
|
||||
}
|
||||
).listen(843);
|
||||
84
bin/getInfoRippled.sh
Normal file
@@ -0,0 +1,84 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
rippled_exe=/opt/ripple/bin/rippled
|
||||
conf_file=/etc/opt/ripple/rippled.cfg
|
||||
|
||||
while getopts ":e:c:" opt; do
|
||||
case $opt in
|
||||
e)
|
||||
rippled_exe=${OPTARG}
|
||||
;;
|
||||
c)
|
||||
conf_file=${OPTARG}
|
||||
;;
|
||||
\?)
|
||||
echo "Invalid option: -$OPTARG"
|
||||
esac
|
||||
done
|
||||
|
||||
tmp_loc=$(mktemp -d --tmpdir ripple_info.XXXX)
|
||||
cd /tmp
|
||||
chmod 751 ripple_info.*
|
||||
cd ~
|
||||
echo ${tmp_loc}
|
||||
|
||||
cleaned_conf=${tmp_loc}/cleaned_rippled_cfg.txt
|
||||
|
||||
if [[ -f ${conf_file} ]]
|
||||
then
|
||||
db=$(sed -r -e 's/\<s[a-zA-Z0-9]{28}\>/secretsecretsecretsecretmaybe/g' ${conf_file} |\
|
||||
awk -v OUT_FILE=${cleaned_conf} '
|
||||
BEGIN {skip=0; db_path="";print > OUT_FILE}
|
||||
/^\[validation_seed\]/ {skip=1; next}
|
||||
/^\[node_seed\]/ {skip=1; next}
|
||||
/^\[.*\]/ {skip=0}
|
||||
skip==1 {next}
|
||||
save==1 {save=0;db_path=$0}
|
||||
/^\[database_path\]/ {save=1}
|
||||
{print >> OUT_FILE}
|
||||
END {print db_path}
|
||||
')
|
||||
fi
|
||||
|
||||
echo "database_path: ${db}"
|
||||
df ${db} > ${tmp_loc}/db_path_df.txt
|
||||
echo
|
||||
|
||||
# Send output from this script to a log file
|
||||
## this captures any messages
|
||||
## or errors from the script itself
|
||||
|
||||
log_file=${tmp_loc}/get_info.log
|
||||
exec 3>&1 1>>${log_file} 2>&1
|
||||
|
||||
## Send all stdout files to /tmp
|
||||
|
||||
if [[ -x ${rippled_exe} ]]
|
||||
then
|
||||
pgrep rippled && \
|
||||
${rippled_exe} --conf ${conf_file} \
|
||||
-- server_info > ${tmp_loc}/server_info.txt
|
||||
fi
|
||||
|
||||
df -h > ${tmp_loc}/free_disk_space.txt
|
||||
cat /proc/meminfo > ${tmp_loc}/amount_mem.txt
|
||||
cat /proc/swaps > ${tmp_loc}/swap_space.txt
|
||||
ulimit -a > ${tmp_loc}/reported_current_limits.txt
|
||||
|
||||
for dev_path in $(df | awk '$1 ~ /^\/dev\// {print $1}'); do
|
||||
# strip numbers from end and remove '/dev/'
|
||||
dev=$(basename ${dev_path%%[0-9]})
|
||||
if [[ "$(cat /sys/block/${dev}/queue/rotational)" = 0 ]]
|
||||
then
|
||||
echo "${dev} : SSD" >> ${tmp_loc}/is_ssd.txt
|
||||
else
|
||||
echo "${dev} : NO SSD" >> ${tmp_loc}/is_ssd.txt
|
||||
fi
|
||||
done
|
||||
|
||||
pushd ${tmp_loc}
|
||||
tar -czvf info-package.tar.gz *.txt *.log
|
||||
popd
|
||||
|
||||
echo "Use the following command on your local machine to download from your rippled instance: scp <remote_rippled_username>@<remote_host>:${tmp_loc}/info-package.tar.gz <path/to/local_machine/directory>"| tee /dev/fd/3
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [[ $# -ne 1 || "$1" == "--help" || "$1" == "-h" ]]
|
||||
then
|
||||
name=$( basename $0 )
|
||||
cat <<- USAGE
|
||||
Usage: $name <username>
|
||||
|
||||
Where <username> is the Github username of the upstream repo. e.g. XRPLF
|
||||
USAGE
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create upstream remotes based on origin
|
||||
shift
|
||||
user="$1"
|
||||
# Get the origin URL. Expect it be an SSH-style URL
|
||||
origin=$( git remote get-url origin )
|
||||
if [[ "${origin}" == "" ]]
|
||||
then
|
||||
echo Invalid origin remote >&2
|
||||
exit 1
|
||||
fi
|
||||
# echo "Origin: ${origin}"
|
||||
# Parse the origin
|
||||
ifs_orig="${IFS}"
|
||||
IFS=':' read remote originpath <<< "${origin}"
|
||||
# echo "Remote: ${remote}, Originpath: ${originpath}"
|
||||
IFS='@' read sshuser server <<< "${remote}"
|
||||
# echo "SSHUser: ${sshuser}, Server: ${server}"
|
||||
IFS='/' read originuser repo <<< "${originpath}"
|
||||
# echo "Originuser: ${originuser}, Repo: ${repo}"
|
||||
if [[ "${sshuser}" == "" || "${server}" == "" || "${originuser}" == ""
|
||||
|| "${repo}" == "" ]]
|
||||
then
|
||||
echo "Can't parse origin URL: ${origin}" >&2
|
||||
exit 1
|
||||
fi
|
||||
upstream="https://${server}/${user}/${repo}"
|
||||
upstreampush="${remote}:${user}/${repo}"
|
||||
upstreamgroup="upstream upstream-push"
|
||||
current=$( git remote get-url upstream 2>/dev/null )
|
||||
currentpush=$( git remote get-url upstream-push 2>/dev/null )
|
||||
currentgroup=$( git config remotes.upstreams )
|
||||
if [[ "${current}" == "${upstream}" ]]
|
||||
then
|
||||
echo "Upstream already set up correctly. Skip"
|
||||
elif [[ -n "${current}" && "${current}" != "${upstream}" &&
|
||||
"${current}" != "${upstreampush}" ]]
|
||||
then
|
||||
echo "Upstream already set up as: ${current}. Skip"
|
||||
else
|
||||
if [[ "${current}" == "${upstreampush}" ]]
|
||||
then
|
||||
echo "Upstream set to dangerous push URL. Update."
|
||||
_run git remote rename upstream upstream-push || \
|
||||
_run git remote remove upstream
|
||||
currentpush=$( git remote get-url upstream-push 2>/dev/null )
|
||||
fi
|
||||
_run git remote add upstream "${upstream}"
|
||||
fi
|
||||
|
||||
if [[ "${currentpush}" == "${upstreampush}" ]]
|
||||
then
|
||||
echo "upstream-push already set up correctly. Skip"
|
||||
elif [[ -n "${currentpush}" && "${currentpush}" != "${upstreampush}" ]]
|
||||
then
|
||||
echo "upstream-push already set up as: ${currentpush}. Skip"
|
||||
else
|
||||
_run git remote add upstream-push "${upstreampush}"
|
||||
fi
|
||||
|
||||
if [[ "${currentgroup}" == "${upstreamgroup}" ]]
|
||||
then
|
||||
echo "Upstreams group already set up correctly. Skip"
|
||||
elif [[ -n "${currentgroup}" && "${currentgroup}" != "${upstreamgroup}" ]]
|
||||
then
|
||||
echo "Upstreams group already set up as: ${currentgroup}. Skip"
|
||||
else
|
||||
_run git config --add remotes.upstreams "${upstreamgroup}"
|
||||
fi
|
||||
|
||||
_run git fetch --jobs=$(nproc) upstreams
|
||||
|
||||
exit 0
|
||||
@@ -1,68 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [[ $# -lt 3 || "$1" == "--help" || "$1" = "-h" ]]
|
||||
then
|
||||
name=$( basename $0 )
|
||||
cat <<- USAGE
|
||||
Usage: $name workbranch base/branch user/branch [user/branch [...]]
|
||||
|
||||
* workbranch will be created locally from base/branch
|
||||
* base/branch and user/branch may be specified as user:branch to allow
|
||||
easy copying from Github PRs
|
||||
* Remotes for each user must already be set up
|
||||
USAGE
|
||||
exit 0
|
||||
fi
|
||||
|
||||
work="$1"
|
||||
shift
|
||||
|
||||
branches=( $( echo "${@}" | sed "s/:/\//" ) )
|
||||
base="${branches[0]}"
|
||||
unset branches[0]
|
||||
|
||||
set -e
|
||||
|
||||
users=()
|
||||
for b in "${branches[@]}"
|
||||
do
|
||||
users+=( $( echo $b | cut -d/ -f1 ) )
|
||||
done
|
||||
|
||||
users=( $( printf '%s\n' "${users[@]}" | sort -u ) )
|
||||
|
||||
git fetch --multiple upstreams "${users[@]}"
|
||||
git checkout -B "$work" --no-track "$base"
|
||||
|
||||
for b in "${branches[@]}"
|
||||
do
|
||||
git merge --squash "${b}"
|
||||
git commit -S # Use the commit message provided on the PR
|
||||
done
|
||||
|
||||
# Make sure the commits look right
|
||||
git log --show-signature "$base..HEAD"
|
||||
|
||||
parts=( $( echo $base | sed "s/\// /" ) )
|
||||
repo="${parts[0]}"
|
||||
b="${parts[1]}"
|
||||
push=$repo
|
||||
if [[ "$push" == "upstream" ]]
|
||||
then
|
||||
push="upstream-push"
|
||||
fi
|
||||
if [[ "$repo" == "upstream" ]]
|
||||
then
|
||||
repo="upstreams"
|
||||
fi
|
||||
cat << PUSH
|
||||
|
||||
-------------------------------------------------------------------
|
||||
This script will not push. Verify everything is correct, then push
|
||||
to your repo, and create a PR if necessary. Once the PR is approved,
|
||||
run:
|
||||
|
||||
git push $push HEAD:$b
|
||||
git fetch $repo
|
||||
-------------------------------------------------------------------
|
||||
PUSH
|
||||
@@ -1,58 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [[ $# -ne 3 || "$1" == "--help" || "$1" = "-h" ]]
|
||||
then
|
||||
name=$( basename $0 )
|
||||
cat <<- USAGE
|
||||
Usage: $name workbranch base/branch version
|
||||
|
||||
* workbranch will be created locally from base/branch. If it exists,
|
||||
it will be reused, so make sure you don't overwrite any work.
|
||||
* base/branch may be specified as user:branch to allow easy copying
|
||||
from Github PRs.
|
||||
USAGE
|
||||
exit 0
|
||||
fi
|
||||
|
||||
work="$1"
|
||||
shift
|
||||
|
||||
base=$( echo "$1" | sed "s/:/\//" )
|
||||
shift
|
||||
|
||||
version=$1
|
||||
shift
|
||||
|
||||
set -e
|
||||
|
||||
git fetch upstreams
|
||||
|
||||
git checkout -B "${work}" --no-track "${base}"
|
||||
|
||||
push=$( git rev-parse --abbrev-ref --symbolic-full-name '@{push}' \
|
||||
2>/dev/null ) || true
|
||||
if [[ "${push}" != "" ]]
|
||||
then
|
||||
echo "Warning: ${push} may already exist."
|
||||
fi
|
||||
|
||||
build=$( find -name BuildInfo.cpp )
|
||||
sed 's/\(^.*versionString =\).*$/\1 "'${version}'"/' ${build} > version.cpp && \
|
||||
diff "${build}" version.cpp && exit 1 || \
|
||||
mv -vi version.cpp ${build}
|
||||
|
||||
git diff
|
||||
|
||||
git add ${build}
|
||||
|
||||
git commit -S -m "Set version to ${version}"
|
||||
|
||||
git log --oneline --first-parent ${base}^..
|
||||
|
||||
cat << PUSH
|
||||
|
||||
-------------------------------------------------------------------
|
||||
This script will not push. Verify everything is correct, then push
|
||||
to your repo, and create a PR as described in CONTRIBUTING.md.
|
||||
-------------------------------------------------------------------
|
||||
PUSH
|
||||
23
bin/hexify.js
Executable file
@@ -0,0 +1,23 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// Returns hex of lowercasing a string.
|
||||
//
|
||||
|
||||
var stringToHex = function (s) {
|
||||
return Array.prototype.map.call(s, function (c) {
|
||||
var b = c.charCodeAt(0);
|
||||
|
||||
return b < 16 ? "0" + b.toString(16) : b.toString(16);
|
||||
}).join("");
|
||||
};
|
||||
|
||||
if (3 != process.argv.length) {
|
||||
process.stderr.write("Usage: " + process.argv[1] + " string\n\nReturns hex of lowercasing string.\n");
|
||||
process.exit(1);
|
||||
|
||||
} else {
|
||||
|
||||
process.stdout.write(stringToHex(process.argv[2].toLowerCase()) + "\n");
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
42
bin/jsonrpc_request.js
Executable file
@@ -0,0 +1,42 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// This is a tool to issue JSON-RPC requests from the command line.
|
||||
//
|
||||
// This can be used to test a JSON-RPC server.
|
||||
//
|
||||
// Requires: npm simple-jsonrpc
|
||||
//
|
||||
|
||||
var jsonrpc = require('simple-jsonrpc');
|
||||
|
||||
var program = process.argv[1];
|
||||
|
||||
if (5 !== process.argv.length) {
|
||||
console.log("Usage: %s <URL> <method> <json>", program);
|
||||
}
|
||||
else {
|
||||
var url = process.argv[2];
|
||||
var method = process.argv[3];
|
||||
var json_raw = process.argv[4];
|
||||
var json;
|
||||
|
||||
try {
|
||||
json = JSON.parse(json_raw);
|
||||
}
|
||||
catch (e) {
|
||||
console.log("JSON parse error: %s", e.message);
|
||||
throw e;
|
||||
}
|
||||
|
||||
var client = jsonrpc.client(url);
|
||||
|
||||
client.call(method, json,
|
||||
function (result) {
|
||||
console.log(JSON.stringify(result, undefined, 2));
|
||||
},
|
||||
function (error) {
|
||||
console.log(JSON.stringify(error, undefined, 2));
|
||||
});
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
68
bin/jsonrpc_server.js
Executable file
@@ -0,0 +1,68 @@
|
||||
#!/usr/bin/node
|
||||
//
|
||||
// This is a tool to listen for JSON-RPC requests at an IP and port.
|
||||
//
|
||||
// This will report the request to console and echo back the request as the response.
|
||||
//
|
||||
|
||||
var http = require("http");
|
||||
|
||||
var program = process.argv[1];
|
||||
|
||||
if (4 !== process.argv.length) {
|
||||
console.log("Usage: %s <ip> <port>", program);
|
||||
}
|
||||
else {
|
||||
var ip = process.argv[2];
|
||||
var port = process.argv[3];
|
||||
|
||||
var server = http.createServer(function (req, res) {
|
||||
console.log("CONNECT");
|
||||
var input = "";
|
||||
|
||||
req.setEncoding();
|
||||
|
||||
req.on('data', function (buffer) {
|
||||
// console.log("DATA: %s", buffer);
|
||||
input = input + buffer;
|
||||
});
|
||||
|
||||
req.on('end', function () {
|
||||
// console.log("END");
|
||||
|
||||
var json_req;
|
||||
|
||||
console.log("URL: %s", req.url);
|
||||
console.log("HEADERS: %s", JSON.stringify(req.headers, undefined, 2));
|
||||
|
||||
try {
|
||||
json_req = JSON.parse(input);
|
||||
|
||||
console.log("REQ: %s", JSON.stringify(json_req, undefined, 2));
|
||||
}
|
||||
catch (e) {
|
||||
console.log("BAD JSON: %s", e.message);
|
||||
|
||||
json_req = { error : e.message }
|
||||
}
|
||||
|
||||
res.statusCode = 200;
|
||||
res.end(JSON.stringify({
|
||||
jsonrpc: "2.0",
|
||||
result: { request : json_req },
|
||||
id: req.id
|
||||
}));
|
||||
});
|
||||
|
||||
req.on('close', function () {
|
||||
console.log("CLOSE");
|
||||
});
|
||||
});
|
||||
|
||||
server.listen(port, ip, undefined,
|
||||
function () {
|
||||
console.log("Listening at: %s:%s", ip, port);
|
||||
});
|
||||
}
|
||||
|
||||
// vim:sw=2:sts=2:ts=8:et
|
||||
1
bin/manifest
Symbolic link
@@ -0,0 +1 @@
|
||||
python/Manifest.py
|
||||
24
bin/python/LedgerTool.py
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from ripple.ledger import Server
|
||||
from ripple.ledger.commands import Cache, Info, Print
|
||||
from ripple.ledger.Args import ARGS
|
||||
from ripple.util import Log
|
||||
from ripple.util.CommandList import CommandList
|
||||
|
||||
_COMMANDS = CommandList(Cache, Info, Print)
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
server = Server.Server()
|
||||
args = list(ARGS.command)
|
||||
_COMMANDS.run_safe(args.pop(0), server, *args)
|
||||
except Exception as e:
|
||||
if ARGS.verbose:
|
||||
print(traceback.format_exc(), sys.stderr)
|
||||
Log.error(e)
|
||||
7
bin/python/Manifest.py
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
from ripple.util import Sign
|
||||
|
||||
result = Sign.run_command(sys.argv[1:])
|
||||
sys.exit(0 if result else -1)
|
||||
15
bin/python/README.md
Normal file
@@ -0,0 +1,15 @@
|
||||
Unit Tests
|
||||
==========
|
||||
|
||||
To run the Python unit tests, execute:
|
||||
|
||||
python -m unittest discover
|
||||
|
||||
from this directory.
|
||||
|
||||
To run Python unit tests from a particular file (such as
|
||||
`ripple/util/test_Sign.py`), execute:
|
||||
|
||||
python -m unittest ripple.util.test_Sign
|
||||
|
||||
Add `-v` to run tests in verbose mode.
|
||||
251
bin/python/decorator.py
Normal file
@@ -0,0 +1,251 @@
|
||||
########################## LICENCE ###############################
|
||||
|
||||
# Copyright (c) 2005-2012, Michele Simionato
|
||||
# All rights reserved.
|
||||
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
|
||||
# Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# Redistributions in bytecode form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in
|
||||
# the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
|
||||
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||
# DAMAGE.
|
||||
|
||||
"""
|
||||
Decorator module, see http://pypi.python.org/pypi/decorator
|
||||
for the documentation.
|
||||
"""
|
||||
|
||||
__version__ = '3.4.0'
|
||||
|
||||
__all__ = ["decorator", "FunctionMaker", "contextmanager"]
|
||||
|
||||
import sys, re, inspect
|
||||
if sys.version >= '3':
|
||||
from inspect import getfullargspec
|
||||
def get_init(cls):
|
||||
return cls.__init__
|
||||
else:
|
||||
class getfullargspec(object):
|
||||
"A quick and dirty replacement for getfullargspec for Python 2.X"
|
||||
def __init__(self, f):
|
||||
self.args, self.varargs, self.varkw, self.defaults = \
|
||||
inspect.getargspec(f)
|
||||
self.kwonlyargs = []
|
||||
self.kwonlydefaults = None
|
||||
def __iter__(self):
|
||||
yield self.args
|
||||
yield self.varargs
|
||||
yield self.varkw
|
||||
yield self.defaults
|
||||
def get_init(cls):
|
||||
return cls.__init__.im_func
|
||||
|
||||
DEF = re.compile('\s*def\s*([_\w][_\w\d]*)\s*\(')
|
||||
|
||||
# basic functionality
|
||||
class FunctionMaker(object):
|
||||
"""
|
||||
An object with the ability to create functions with a given signature.
|
||||
It has attributes name, doc, module, signature, defaults, dict and
|
||||
methods update and make.
|
||||
"""
|
||||
def __init__(self, func=None, name=None, signature=None,
|
||||
defaults=None, doc=None, module=None, funcdict=None):
|
||||
self.shortsignature = signature
|
||||
if func:
|
||||
# func can be a class or a callable, but not an instance method
|
||||
self.name = func.__name__
|
||||
if self.name == '<lambda>': # small hack for lambda functions
|
||||
self.name = '_lambda_'
|
||||
self.doc = func.__doc__
|
||||
self.module = func.__module__
|
||||
if inspect.isfunction(func):
|
||||
argspec = getfullargspec(func)
|
||||
self.annotations = getattr(func, '__annotations__', {})
|
||||
for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs',
|
||||
'kwonlydefaults'):
|
||||
setattr(self, a, getattr(argspec, a))
|
||||
for i, arg in enumerate(self.args):
|
||||
setattr(self, 'arg%d' % i, arg)
|
||||
if sys.version < '3': # easy way
|
||||
self.shortsignature = self.signature = \
|
||||
inspect.formatargspec(
|
||||
formatvalue=lambda val: "", *argspec)[1:-1]
|
||||
else: # Python 3 way
|
||||
allargs = list(self.args)
|
||||
allshortargs = list(self.args)
|
||||
if self.varargs:
|
||||
allargs.append('*' + self.varargs)
|
||||
allshortargs.append('*' + self.varargs)
|
||||
elif self.kwonlyargs:
|
||||
allargs.append('*') # single star syntax
|
||||
for a in self.kwonlyargs:
|
||||
allargs.append('%s=None' % a)
|
||||
allshortargs.append('%s=%s' % (a, a))
|
||||
if self.varkw:
|
||||
allargs.append('**' + self.varkw)
|
||||
allshortargs.append('**' + self.varkw)
|
||||
self.signature = ', '.join(allargs)
|
||||
self.shortsignature = ', '.join(allshortargs)
|
||||
self.dict = func.__dict__.copy()
|
||||
# func=None happens when decorating a caller
|
||||
if name:
|
||||
self.name = name
|
||||
if signature is not None:
|
||||
self.signature = signature
|
||||
if defaults:
|
||||
self.defaults = defaults
|
||||
if doc:
|
||||
self.doc = doc
|
||||
if module:
|
||||
self.module = module
|
||||
if funcdict:
|
||||
self.dict = funcdict
|
||||
# check existence required attributes
|
||||
assert hasattr(self, 'name')
|
||||
if not hasattr(self, 'signature'):
|
||||
raise TypeError('You are decorating a non function: %s' % func)
|
||||
|
||||
def update(self, func, **kw):
|
||||
"Update the signature of func with the data in self"
|
||||
func.__name__ = self.name
|
||||
func.__doc__ = getattr(self, 'doc', None)
|
||||
func.__dict__ = getattr(self, 'dict', {})
|
||||
func.func_defaults = getattr(self, 'defaults', ())
|
||||
func.__kwdefaults__ = getattr(self, 'kwonlydefaults', None)
|
||||
func.__annotations__ = getattr(self, 'annotations', None)
|
||||
callermodule = sys._getframe(3).f_globals.get('__name__', '?')
|
||||
func.__module__ = getattr(self, 'module', callermodule)
|
||||
func.__dict__.update(kw)
|
||||
|
||||
def make(self, src_templ, evaldict=None, addsource=False, **attrs):
|
||||
"Make a new function from a given template and update the signature"
|
||||
src = src_templ % vars(self) # expand name and signature
|
||||
evaldict = evaldict or {}
|
||||
mo = DEF.match(src)
|
||||
if mo is None:
|
||||
raise SyntaxError('not a valid function template\n%s' % src)
|
||||
name = mo.group(1) # extract the function name
|
||||
names = set([name] + [arg.strip(' *') for arg in
|
||||
self.shortsignature.split(',')])
|
||||
for n in names:
|
||||
if n in ('_func_', '_call_'):
|
||||
raise NameError('%s is overridden in\n%s' % (n, src))
|
||||
if not src.endswith('\n'): # add a newline just for safety
|
||||
src += '\n' # this is needed in old versions of Python
|
||||
try:
|
||||
code = compile(src, '<string>', 'single')
|
||||
# print >> sys.stderr, 'Compiling %s' % src
|
||||
exec code in evaldict
|
||||
except:
|
||||
print >> sys.stderr, 'Error in generated code:'
|
||||
print >> sys.stderr, src
|
||||
raise
|
||||
func = evaldict[name]
|
||||
if addsource:
|
||||
attrs['__source__'] = src
|
||||
self.update(func, **attrs)
|
||||
return func
|
||||
|
||||
@classmethod
|
||||
def create(cls, obj, body, evaldict, defaults=None,
|
||||
doc=None, module=None, addsource=True, **attrs):
|
||||
"""
|
||||
Create a function from the strings name, signature and body.
|
||||
evaldict is the evaluation dictionary. If addsource is true an attribute
|
||||
__source__ is added to the result. The attributes attrs are added,
|
||||
if any.
|
||||
"""
|
||||
if isinstance(obj, str): # "name(signature)"
|
||||
name, rest = obj.strip().split('(', 1)
|
||||
signature = rest[:-1] #strip a right parens
|
||||
func = None
|
||||
else: # a function
|
||||
name = None
|
||||
signature = None
|
||||
func = obj
|
||||
self = cls(func, name, signature, defaults, doc, module)
|
||||
ibody = '\n'.join(' ' + line for line in body.splitlines())
|
||||
return self.make('def %(name)s(%(signature)s):\n' + ibody,
|
||||
evaldict, addsource, **attrs)
|
||||
|
||||
def decorator(caller, func=None):
|
||||
"""
|
||||
decorator(caller) converts a caller function into a decorator;
|
||||
decorator(caller, func) decorates a function using a caller.
|
||||
"""
|
||||
if func is not None: # returns a decorated function
|
||||
evaldict = func.func_globals.copy()
|
||||
evaldict['_call_'] = caller
|
||||
evaldict['_func_'] = func
|
||||
return FunctionMaker.create(
|
||||
func, "return _call_(_func_, %(shortsignature)s)",
|
||||
evaldict, undecorated=func, __wrapped__=func)
|
||||
else: # returns a decorator
|
||||
if inspect.isclass(caller):
|
||||
name = caller.__name__.lower()
|
||||
callerfunc = get_init(caller)
|
||||
doc = 'decorator(%s) converts functions/generators into ' \
|
||||
'factories of %s objects' % (caller.__name__, caller.__name__)
|
||||
fun = getfullargspec(callerfunc).args[1] # second arg
|
||||
elif inspect.isfunction(caller):
|
||||
name = '_lambda_' if caller.__name__ == '<lambda>' \
|
||||
else caller.__name__
|
||||
callerfunc = caller
|
||||
doc = caller.__doc__
|
||||
fun = getfullargspec(callerfunc).args[0] # first arg
|
||||
else: # assume caller is an object with a __call__ method
|
||||
name = caller.__class__.__name__.lower()
|
||||
callerfunc = caller.__call__.im_func
|
||||
doc = caller.__call__.__doc__
|
||||
fun = getfullargspec(callerfunc).args[1] # second arg
|
||||
evaldict = callerfunc.func_globals.copy()
|
||||
evaldict['_call_'] = caller
|
||||
evaldict['decorator'] = decorator
|
||||
return FunctionMaker.create(
|
||||
'%s(%s)' % (name, fun),
|
||||
'return decorator(_call_, %s)' % fun,
|
||||
evaldict, undecorated=caller, __wrapped__=caller,
|
||||
doc=doc, module=caller.__module__)
|
||||
|
||||
######################### contextmanager ########################
|
||||
|
||||
def __call__(self, func):
|
||||
'Context manager decorator'
|
||||
return FunctionMaker.create(
|
||||
func, "with _self_: return _func_(%(shortsignature)s)",
|
||||
dict(_self_=self, _func_=func), __wrapped__=func)
|
||||
|
||||
try: # Python >= 3.2
|
||||
|
||||
from contextlib import _GeneratorContextManager
|
||||
ContextManager = type(
|
||||
'ContextManager', (_GeneratorContextManager,), dict(__call__=__call__))
|
||||
|
||||
except ImportError: # Python >= 2.5
|
||||
|
||||
from contextlib import GeneratorContextManager
|
||||
def __init__(self, f, *a, **k):
|
||||
return GeneratorContextManager.__init__(self, f(*a, **k))
|
||||
ContextManager = type(
|
||||
'ContextManager', (GeneratorContextManager,),
|
||||
dict(__call__=__call__, __init__=__init__))
|
||||
|
||||
contextmanager = decorator(ContextManager)
|
||||
14
bin/python/ecdsa/__init__.py
Normal file
@@ -0,0 +1,14 @@
|
||||
__all__ = ["curves", "der", "ecdsa", "ellipticcurve", "keys", "numbertheory",
|
||||
"test_pyecdsa", "util", "six"]
|
||||
from .keys import SigningKey, VerifyingKey, BadSignatureError, BadDigestError
|
||||
from .curves import NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1
|
||||
|
||||
_hush_pyflakes = [SigningKey, VerifyingKey, BadSignatureError, BadDigestError,
|
||||
NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1]
|
||||
del _hush_pyflakes
|
||||
|
||||
# This code comes from http://github.com/warner/python-ecdsa
|
||||
|
||||
from ._version import get_versions
|
||||
__version__ = get_versions()['version']
|
||||
del get_versions
|
||||
183
bin/python/ecdsa/_version.py
Normal file
@@ -0,0 +1,183 @@
|
||||
|
||||
# This file helps to compute a version number in source trees obtained from
|
||||
# git-archive tarball (such as those provided by githubs download-from-tag
|
||||
# feature). Distribution tarballs (built by setup.py sdist) and build
|
||||
# directories (produced by setup.py build) will contain a much shorter file
|
||||
# that just contains the computed version number.
|
||||
|
||||
# This file is released into the public domain. Generated by
|
||||
# versioneer-0.12 (https://github.com/warner/python-versioneer)
|
||||
|
||||
# these strings will be replaced by git during git-archive
|
||||
git_refnames = " (HEAD, master)"
|
||||
git_full = "e7a6daff51221b8edd888cff404596ef90432869"
|
||||
|
||||
# these strings are filled in when 'setup.py versioneer' creates _version.py
|
||||
tag_prefix = "python-ecdsa-"
|
||||
parentdir_prefix = "ecdsa-"
|
||||
versionfile_source = "ecdsa/_version.py"
|
||||
|
||||
import os, sys, re, subprocess, errno
|
||||
|
||||
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
|
||||
assert isinstance(commands, list)
|
||||
p = None
|
||||
for c in commands:
|
||||
try:
|
||||
# remember shell=False, so use git.cmd on windows, not just git
|
||||
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
|
||||
stderr=(subprocess.PIPE if hide_stderr
|
||||
else None))
|
||||
break
|
||||
except EnvironmentError:
|
||||
e = sys.exc_info()[1]
|
||||
if e.errno == errno.ENOENT:
|
||||
continue
|
||||
if verbose:
|
||||
print("unable to run %s" % args[0])
|
||||
print(e)
|
||||
return None
|
||||
else:
|
||||
if verbose:
|
||||
print("unable to find command, tried %s" % (commands,))
|
||||
return None
|
||||
stdout = p.communicate()[0].strip()
|
||||
if sys.version >= '3':
|
||||
stdout = stdout.decode()
|
||||
if p.returncode != 0:
|
||||
if verbose:
|
||||
print("unable to run %s (error)" % args[0])
|
||||
return None
|
||||
return stdout
|
||||
|
||||
|
||||
def versions_from_parentdir(parentdir_prefix, root, verbose=False):
|
||||
# Source tarballs conventionally unpack into a directory that includes
|
||||
# both the project name and a version string.
|
||||
dirname = os.path.basename(root)
|
||||
if not dirname.startswith(parentdir_prefix):
|
||||
if verbose:
|
||||
print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
|
||||
(root, dirname, parentdir_prefix))
|
||||
return None
|
||||
return {"version": dirname[len(parentdir_prefix):], "full": ""}
|
||||
|
||||
def git_get_keywords(versionfile_abs):
|
||||
# the code embedded in _version.py can just fetch the value of these
|
||||
# keywords. When used from setup.py, we don't want to import _version.py,
|
||||
# so we do it with a regexp instead. This function is not used from
|
||||
# _version.py.
|
||||
keywords = {}
|
||||
try:
|
||||
f = open(versionfile_abs,"r")
|
||||
for line in f.readlines():
|
||||
if line.strip().startswith("git_refnames ="):
|
||||
mo = re.search(r'=\s*"(.*)"', line)
|
||||
if mo:
|
||||
keywords["refnames"] = mo.group(1)
|
||||
if line.strip().startswith("git_full ="):
|
||||
mo = re.search(r'=\s*"(.*)"', line)
|
||||
if mo:
|
||||
keywords["full"] = mo.group(1)
|
||||
f.close()
|
||||
except EnvironmentError:
|
||||
pass
|
||||
return keywords
|
||||
|
||||
def git_versions_from_keywords(keywords, tag_prefix, verbose=False):
|
||||
if not keywords:
|
||||
return {} # keyword-finding function failed to find keywords
|
||||
refnames = keywords["refnames"].strip()
|
||||
if refnames.startswith("$Format"):
|
||||
if verbose:
|
||||
print("keywords are unexpanded, not using")
|
||||
return {} # unexpanded, so not in an unpacked git-archive tarball
|
||||
refs = set([r.strip() for r in refnames.strip("()").split(",")])
|
||||
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
|
||||
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
|
||||
TAG = "tag: "
|
||||
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
|
||||
if not tags:
|
||||
# Either we're using git < 1.8.3, or there really are no tags. We use
|
||||
# a heuristic: assume all version tags have a digit. The old git %d
|
||||
# expansion behaves like git log --decorate=short and strips out the
|
||||
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
|
||||
# between branches and tags. By ignoring refnames without digits, we
|
||||
# filter out many common branch names like "release" and
|
||||
# "stabilization", as well as "HEAD" and "master".
|
||||
tags = set([r for r in refs if re.search(r'\d', r)])
|
||||
if verbose:
|
||||
print("discarding '%s', no digits" % ",".join(refs-tags))
|
||||
if verbose:
|
||||
print("likely tags: %s" % ",".join(sorted(tags)))
|
||||
for ref in sorted(tags):
|
||||
# sorting will prefer e.g. "2.0" over "2.0rc1"
|
||||
if ref.startswith(tag_prefix):
|
||||
r = ref[len(tag_prefix):]
|
||||
if verbose:
|
||||
print("picking %s" % r)
|
||||
return { "version": r,
|
||||
"full": keywords["full"].strip() }
|
||||
# no suitable tags, so we use the full revision id
|
||||
if verbose:
|
||||
print("no suitable tags, using full revision id")
|
||||
return { "version": keywords["full"].strip(),
|
||||
"full": keywords["full"].strip() }
|
||||
|
||||
|
||||
def git_versions_from_vcs(tag_prefix, root, verbose=False):
|
||||
# this runs 'git' from the root of the source tree. This only gets called
|
||||
# if the git-archive 'subst' keywords were *not* expanded, and
|
||||
# _version.py hasn't already been rewritten with a short version string,
|
||||
# meaning we're inside a checked out source tree.
|
||||
|
||||
if not os.path.exists(os.path.join(root, ".git")):
|
||||
if verbose:
|
||||
print("no .git in %s" % root)
|
||||
return {}
|
||||
|
||||
GITS = ["git"]
|
||||
if sys.platform == "win32":
|
||||
GITS = ["git.cmd", "git.exe"]
|
||||
stdout = run_command(GITS, ["describe", "--tags", "--dirty", "--always"],
|
||||
cwd=root)
|
||||
if stdout is None:
|
||||
return {}
|
||||
if not stdout.startswith(tag_prefix):
|
||||
if verbose:
|
||||
print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
|
||||
return {}
|
||||
tag = stdout[len(tag_prefix):]
|
||||
stdout = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
|
||||
if stdout is None:
|
||||
return {}
|
||||
full = stdout.strip()
|
||||
if tag.endswith("-dirty"):
|
||||
full += "-dirty"
|
||||
return {"version": tag, "full": full}
|
||||
|
||||
|
||||
def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
|
||||
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
|
||||
# __file__, we can work backwards from there to the root. Some
|
||||
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
|
||||
# case we can only use expanded keywords.
|
||||
|
||||
keywords = { "refnames": git_refnames, "full": git_full }
|
||||
ver = git_versions_from_keywords(keywords, tag_prefix, verbose)
|
||||
if ver:
|
||||
return ver
|
||||
|
||||
try:
|
||||
root = os.path.abspath(__file__)
|
||||
# versionfile_source is the relative path from the top of the source
|
||||
# tree (where the .git directory might live) to this file. Invert
|
||||
# this to find the root from __file__.
|
||||
for i in range(len(versionfile_source.split(os.sep))):
|
||||
root = os.path.dirname(root)
|
||||
except NameError:
|
||||
return default
|
||||
|
||||
return (git_versions_from_vcs(tag_prefix, root, verbose)
|
||||
or versions_from_parentdir(parentdir_prefix, root, verbose)
|
||||
or default)
|
||||
53
bin/python/ecdsa/curves.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from __future__ import division
|
||||
|
||||
from . import der, ecdsa
|
||||
|
||||
class UnknownCurveError(Exception):
|
||||
pass
|
||||
|
||||
def orderlen(order):
|
||||
return (1+len("%x"%order))//2 # bytes
|
||||
|
||||
# the NIST curves
|
||||
class Curve:
|
||||
def __init__(self, name, openssl_name,
|
||||
curve, generator, oid):
|
||||
self.name = name
|
||||
self.openssl_name = openssl_name # maybe None
|
||||
self.curve = curve
|
||||
self.generator = generator
|
||||
self.order = generator.order()
|
||||
self.baselen = orderlen(self.order)
|
||||
self.verifying_key_length = 2*self.baselen
|
||||
self.signature_length = 2*self.baselen
|
||||
self.oid = oid
|
||||
self.encoded_oid = der.encode_oid(*oid)
|
||||
|
||||
NIST192p = Curve("NIST192p", "prime192v1",
|
||||
ecdsa.curve_192, ecdsa.generator_192,
|
||||
(1, 2, 840, 10045, 3, 1, 1))
|
||||
NIST224p = Curve("NIST224p", "secp224r1",
|
||||
ecdsa.curve_224, ecdsa.generator_224,
|
||||
(1, 3, 132, 0, 33))
|
||||
NIST256p = Curve("NIST256p", "prime256v1",
|
||||
ecdsa.curve_256, ecdsa.generator_256,
|
||||
(1, 2, 840, 10045, 3, 1, 7))
|
||||
NIST384p = Curve("NIST384p", "secp384r1",
|
||||
ecdsa.curve_384, ecdsa.generator_384,
|
||||
(1, 3, 132, 0, 34))
|
||||
NIST521p = Curve("NIST521p", "secp521r1",
|
||||
ecdsa.curve_521, ecdsa.generator_521,
|
||||
(1, 3, 132, 0, 35))
|
||||
SECP256k1 = Curve("SECP256k1", "secp256k1",
|
||||
ecdsa.curve_secp256k1, ecdsa.generator_secp256k1,
|
||||
(1, 3, 132, 0, 10))
|
||||
|
||||
curves = [NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1]
|
||||
|
||||
def find_curve(oid_curve):
|
||||
for c in curves:
|
||||
if c.oid == oid_curve:
|
||||
return c
|
||||
raise UnknownCurveError("I don't know about the curve with oid %s."
|
||||
"I only know about these: %s" %
|
||||
(oid_curve, [c.name for c in curves]))
|
||||
199
bin/python/ecdsa/der.py
Normal file
@@ -0,0 +1,199 @@
|
||||
from __future__ import division
|
||||
|
||||
import binascii
|
||||
import base64
|
||||
from .six import int2byte, b, integer_types, text_type
|
||||
|
||||
class UnexpectedDER(Exception):
|
||||
pass
|
||||
|
||||
def encode_constructed(tag, value):
|
||||
return int2byte(0xa0+tag) + encode_length(len(value)) + value
|
||||
def encode_integer(r):
|
||||
assert r >= 0 # can't support negative numbers yet
|
||||
h = ("%x" % r).encode()
|
||||
if len(h) % 2:
|
||||
h = b("0") + h
|
||||
s = binascii.unhexlify(h)
|
||||
num = s[0] if isinstance(s[0], integer_types) else ord(s[0])
|
||||
if num <= 0x7f:
|
||||
return b("\x02") + int2byte(len(s)) + s
|
||||
else:
|
||||
# DER integers are two's complement, so if the first byte is
|
||||
# 0x80-0xff then we need an extra 0x00 byte to prevent it from
|
||||
# looking negative.
|
||||
return b("\x02") + int2byte(len(s)+1) + b("\x00") + s
|
||||
|
||||
def encode_bitstring(s):
|
||||
return b("\x03") + encode_length(len(s)) + s
|
||||
def encode_octet_string(s):
|
||||
return b("\x04") + encode_length(len(s)) + s
|
||||
def encode_oid(first, second, *pieces):
|
||||
assert first <= 2
|
||||
assert second <= 39
|
||||
encoded_pieces = [int2byte(40*first+second)] + [encode_number(p)
|
||||
for p in pieces]
|
||||
body = b('').join(encoded_pieces)
|
||||
return b('\x06') + encode_length(len(body)) + body
|
||||
def encode_sequence(*encoded_pieces):
|
||||
total_len = sum([len(p) for p in encoded_pieces])
|
||||
return b('\x30') + encode_length(total_len) + b('').join(encoded_pieces)
|
||||
def encode_number(n):
|
||||
b128_digits = []
|
||||
while n:
|
||||
b128_digits.insert(0, (n & 0x7f) | 0x80)
|
||||
n = n >> 7
|
||||
if not b128_digits:
|
||||
b128_digits.append(0)
|
||||
b128_digits[-1] &= 0x7f
|
||||
return b('').join([int2byte(d) for d in b128_digits])
|
||||
|
||||
def remove_constructed(string):
|
||||
s0 = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
if (s0 & 0xe0) != 0xa0:
|
||||
raise UnexpectedDER("wanted constructed tag (0xa0-0xbf), got 0x%02x"
|
||||
% s0)
|
||||
tag = s0 & 0x1f
|
||||
length, llen = read_length(string[1:])
|
||||
body = string[1+llen:1+llen+length]
|
||||
rest = string[1+llen+length:]
|
||||
return tag, body, rest
|
||||
|
||||
def remove_sequence(string):
|
||||
if not string.startswith(b("\x30")):
|
||||
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
raise UnexpectedDER("wanted sequence (0x30), got 0x%02x" % n)
|
||||
length, lengthlength = read_length(string[1:])
|
||||
endseq = 1+lengthlength+length
|
||||
return string[1+lengthlength:endseq], string[endseq:]
|
||||
|
||||
def remove_octet_string(string):
|
||||
if not string.startswith(b("\x04")):
|
||||
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
raise UnexpectedDER("wanted octetstring (0x04), got 0x%02x" % n)
|
||||
length, llen = read_length(string[1:])
|
||||
body = string[1+llen:1+llen+length]
|
||||
rest = string[1+llen+length:]
|
||||
return body, rest
|
||||
|
||||
def remove_object(string):
|
||||
if not string.startswith(b("\x06")):
|
||||
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
raise UnexpectedDER("wanted object (0x06), got 0x%02x" % n)
|
||||
length, lengthlength = read_length(string[1:])
|
||||
body = string[1+lengthlength:1+lengthlength+length]
|
||||
rest = string[1+lengthlength+length:]
|
||||
numbers = []
|
||||
while body:
|
||||
n, ll = read_number(body)
|
||||
numbers.append(n)
|
||||
body = body[ll:]
|
||||
n0 = numbers.pop(0)
|
||||
first = n0//40
|
||||
second = n0-(40*first)
|
||||
numbers.insert(0, first)
|
||||
numbers.insert(1, second)
|
||||
return tuple(numbers), rest
|
||||
|
||||
def remove_integer(string):
|
||||
if not string.startswith(b("\x02")):
|
||||
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
raise UnexpectedDER("wanted integer (0x02), got 0x%02x" % n)
|
||||
length, llen = read_length(string[1:])
|
||||
numberbytes = string[1+llen:1+llen+length]
|
||||
rest = string[1+llen+length:]
|
||||
nbytes = numberbytes[0] if isinstance(numberbytes[0], integer_types) else ord(numberbytes[0])
|
||||
assert nbytes < 0x80 # can't support negative numbers yet
|
||||
return int(binascii.hexlify(numberbytes), 16), rest
|
||||
|
||||
def read_number(string):
|
||||
number = 0
|
||||
llen = 0
|
||||
# base-128 big endian, with b7 set in all but the last byte
|
||||
while True:
|
||||
if llen > len(string):
|
||||
raise UnexpectedDER("ran out of length bytes")
|
||||
number = number << 7
|
||||
d = string[llen] if isinstance(string[llen], integer_types) else ord(string[llen])
|
||||
number += (d & 0x7f)
|
||||
llen += 1
|
||||
if not d & 0x80:
|
||||
break
|
||||
return number, llen
|
||||
|
||||
def encode_length(l):
|
||||
assert l >= 0
|
||||
if l < 0x80:
|
||||
return int2byte(l)
|
||||
s = ("%x" % l).encode()
|
||||
if len(s)%2:
|
||||
s = b("0")+s
|
||||
s = binascii.unhexlify(s)
|
||||
llen = len(s)
|
||||
return int2byte(0x80|llen) + s
|
||||
|
||||
def read_length(string):
|
||||
num = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
if not (num & 0x80):
|
||||
# short form
|
||||
return (num & 0x7f), 1
|
||||
# else long-form: b0&0x7f is number of additional base256 length bytes,
|
||||
# big-endian
|
||||
llen = num & 0x7f
|
||||
if llen > len(string)-1:
|
||||
raise UnexpectedDER("ran out of length bytes")
|
||||
return int(binascii.hexlify(string[1:1+llen]), 16), 1+llen
|
||||
|
||||
def remove_bitstring(string):
|
||||
num = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
if not string.startswith(b("\x03")):
|
||||
raise UnexpectedDER("wanted bitstring (0x03), got 0x%02x" % num)
|
||||
length, llen = read_length(string[1:])
|
||||
body = string[1+llen:1+llen+length]
|
||||
rest = string[1+llen+length:]
|
||||
return body, rest
|
||||
|
||||
# SEQUENCE([1, STRING(secexp), cont[0], OBJECT(curvename), cont[1], BINTSTRING)
|
||||
|
||||
|
||||
# signatures: (from RFC3279)
|
||||
# ansi-X9-62 OBJECT IDENTIFIER ::= {
|
||||
# iso(1) member-body(2) us(840) 10045 }
|
||||
#
|
||||
# id-ecSigType OBJECT IDENTIFIER ::= {
|
||||
# ansi-X9-62 signatures(4) }
|
||||
# ecdsa-with-SHA1 OBJECT IDENTIFIER ::= {
|
||||
# id-ecSigType 1 }
|
||||
## so 1,2,840,10045,4,1
|
||||
## so 0x42, .. ..
|
||||
|
||||
# Ecdsa-Sig-Value ::= SEQUENCE {
|
||||
# r INTEGER,
|
||||
# s INTEGER }
|
||||
|
||||
# id-public-key-type OBJECT IDENTIFIER ::= { ansi-X9.62 2 }
|
||||
#
|
||||
# id-ecPublicKey OBJECT IDENTIFIER ::= { id-publicKeyType 1 }
|
||||
|
||||
# I think the secp224r1 identifier is (t=06,l=05,v=2b81040021)
|
||||
# secp224r1 OBJECT IDENTIFIER ::= {
|
||||
# iso(1) identified-organization(3) certicom(132) curve(0) 33 }
|
||||
# and the secp384r1 is (t=06,l=05,v=2b81040022)
|
||||
# secp384r1 OBJECT IDENTIFIER ::= {
|
||||
# iso(1) identified-organization(3) certicom(132) curve(0) 34 }
|
||||
|
||||
def unpem(pem):
|
||||
if isinstance(pem, text_type):
|
||||
pem = pem.encode()
|
||||
|
||||
d = b("").join([l.strip() for l in pem.split(b("\n"))
|
||||
if l and not l.startswith(b("-----"))])
|
||||
return base64.b64decode(d)
|
||||
def topem(der, name):
|
||||
b64 = base64.b64encode(der)
|
||||
lines = [("-----BEGIN %s-----\n" % name).encode()]
|
||||
lines.extend([b64[start:start+64]+b("\n")
|
||||
for start in range(0, len(b64), 64)])
|
||||
lines.append(("-----END %s-----\n" % name).encode())
|
||||
return b("").join(lines)
|
||||
|
||||
576
bin/python/ecdsa/ecdsa.py
Normal file
@@ -0,0 +1,576 @@
|
||||
#! /usr/bin/env python
|
||||
|
||||
"""
|
||||
Implementation of Elliptic-Curve Digital Signatures.
|
||||
|
||||
Classes and methods for elliptic-curve signatures:
|
||||
private keys, public keys, signatures,
|
||||
NIST prime-modulus curves with modulus lengths of
|
||||
192, 224, 256, 384, and 521 bits.
|
||||
|
||||
Example:
|
||||
|
||||
# (In real-life applications, you would probably want to
|
||||
# protect against defects in SystemRandom.)
|
||||
from random import SystemRandom
|
||||
randrange = SystemRandom().randrange
|
||||
|
||||
# Generate a public/private key pair using the NIST Curve P-192:
|
||||
|
||||
g = generator_192
|
||||
n = g.order()
|
||||
secret = randrange( 1, n )
|
||||
pubkey = Public_key( g, g * secret )
|
||||
privkey = Private_key( pubkey, secret )
|
||||
|
||||
# Signing a hash value:
|
||||
|
||||
hash = randrange( 1, n )
|
||||
signature = privkey.sign( hash, randrange( 1, n ) )
|
||||
|
||||
# Verifying a signature for a hash value:
|
||||
|
||||
if pubkey.verifies( hash, signature ):
|
||||
print_("Demo verification succeeded.")
|
||||
else:
|
||||
print_("*** Demo verification failed.")
|
||||
|
||||
# Verification fails if the hash value is modified:
|
||||
|
||||
if pubkey.verifies( hash-1, signature ):
|
||||
print_("**** Demo verification failed to reject tampered hash.")
|
||||
else:
|
||||
print_("Demo verification correctly rejected tampered hash.")
|
||||
|
||||
Version of 2009.05.16.
|
||||
|
||||
Revision history:
|
||||
2005.12.31 - Initial version.
|
||||
2008.11.25 - Substantial revisions introducing new classes.
|
||||
2009.05.16 - Warn against using random.randrange in real applications.
|
||||
2009.05.17 - Use random.SystemRandom by default.
|
||||
|
||||
Written in 2005 by Peter Pearson and placed in the public domain.
|
||||
"""
|
||||
|
||||
from .six import int2byte, b, print_
|
||||
from . import ellipticcurve
|
||||
from . import numbertheory
|
||||
import random
|
||||
|
||||
|
||||
|
||||
class Signature( object ):
|
||||
"""ECDSA signature.
|
||||
"""
|
||||
def __init__( self, r, s ):
|
||||
self.r = r
|
||||
self.s = s
|
||||
|
||||
|
||||
|
||||
class Public_key( object ):
|
||||
"""Public key for ECDSA.
|
||||
"""
|
||||
|
||||
def __init__( self, generator, point ):
|
||||
"""generator is the Point that generates the group,
|
||||
point is the Point that defines the public key.
|
||||
"""
|
||||
|
||||
self.curve = generator.curve()
|
||||
self.generator = generator
|
||||
self.point = point
|
||||
n = generator.order()
|
||||
if not n:
|
||||
raise RuntimeError("Generator point must have order.")
|
||||
if not n * point == ellipticcurve.INFINITY:
|
||||
raise RuntimeError("Generator point order is bad.")
|
||||
if point.x() < 0 or n <= point.x() or point.y() < 0 or n <= point.y():
|
||||
raise RuntimeError("Generator point has x or y out of range.")
|
||||
|
||||
|
||||
def verifies( self, hash, signature ):
|
||||
"""Verify that signature is a valid signature of hash.
|
||||
Return True if the signature is valid.
|
||||
"""
|
||||
|
||||
# From X9.62 J.3.1.
|
||||
|
||||
G = self.generator
|
||||
n = G.order()
|
||||
r = signature.r
|
||||
s = signature.s
|
||||
if r < 1 or r > n-1: return False
|
||||
if s < 1 or s > n-1: return False
|
||||
c = numbertheory.inverse_mod( s, n )
|
||||
u1 = ( hash * c ) % n
|
||||
u2 = ( r * c ) % n
|
||||
xy = u1 * G + u2 * self.point
|
||||
v = xy.x() % n
|
||||
return v == r
|
||||
|
||||
|
||||
|
||||
class Private_key( object ):
|
||||
"""Private key for ECDSA.
|
||||
"""
|
||||
|
||||
def __init__( self, public_key, secret_multiplier ):
|
||||
"""public_key is of class Public_key;
|
||||
secret_multiplier is a large integer.
|
||||
"""
|
||||
|
||||
self.public_key = public_key
|
||||
self.secret_multiplier = secret_multiplier
|
||||
|
||||
def sign( self, hash, random_k ):
|
||||
"""Return a signature for the provided hash, using the provided
|
||||
random nonce. It is absolutely vital that random_k be an unpredictable
|
||||
number in the range [1, self.public_key.point.order()-1]. If
|
||||
an attacker can guess random_k, he can compute our private key from a
|
||||
single signature. Also, if an attacker knows a few high-order
|
||||
bits (or a few low-order bits) of random_k, he can compute our private
|
||||
key from many signatures. The generation of nonces with adequate
|
||||
cryptographic strength is very difficult and far beyond the scope
|
||||
of this comment.
|
||||
|
||||
May raise RuntimeError, in which case retrying with a new
|
||||
random value k is in order.
|
||||
"""
|
||||
|
||||
G = self.public_key.generator
|
||||
n = G.order()
|
||||
k = random_k % n
|
||||
p1 = k * G
|
||||
r = p1.x()
|
||||
if r == 0: raise RuntimeError("amazingly unlucky random number r")
|
||||
s = ( numbertheory.inverse_mod( k, n ) * \
|
||||
( hash + ( self.secret_multiplier * r ) % n ) ) % n
|
||||
if s == 0: raise RuntimeError("amazingly unlucky random number s")
|
||||
return Signature( r, s )
|
||||
|
||||
|
||||
|
||||
def int_to_string( x ):
|
||||
"""Convert integer x into a string of bytes, as per X9.62."""
|
||||
assert x >= 0
|
||||
if x == 0: return b('\0')
|
||||
result = []
|
||||
while x:
|
||||
ordinal = x & 0xFF
|
||||
result.append(int2byte(ordinal))
|
||||
x >>= 8
|
||||
|
||||
result.reverse()
|
||||
return b('').join(result)
|
||||
|
||||
|
||||
def string_to_int( s ):
|
||||
"""Convert a string of bytes into an integer, as per X9.62."""
|
||||
result = 0
|
||||
for c in s:
|
||||
if not isinstance(c, int): c = ord( c )
|
||||
result = 256 * result + c
|
||||
return result
|
||||
|
||||
|
||||
def digest_integer( m ):
|
||||
"""Convert an integer into a string of bytes, compute
|
||||
its SHA-1 hash, and convert the result to an integer."""
|
||||
#
|
||||
# I don't expect this function to be used much. I wrote
|
||||
# it in order to be able to duplicate the examples
|
||||
# in ECDSAVS.
|
||||
#
|
||||
from hashlib import sha1
|
||||
return string_to_int( sha1( int_to_string( m ) ).digest() )
|
||||
|
||||
|
||||
def point_is_valid( generator, x, y ):
|
||||
"""Is (x,y) a valid public key based on the specified generator?"""
|
||||
|
||||
# These are the tests specified in X9.62.
|
||||
|
||||
n = generator.order()
|
||||
curve = generator.curve()
|
||||
if x < 0 or n <= x or y < 0 or n <= y:
|
||||
return False
|
||||
if not curve.contains_point( x, y ):
|
||||
return False
|
||||
if not n*ellipticcurve.Point( curve, x, y ) == \
|
||||
ellipticcurve.INFINITY:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
|
||||
# NIST Curve P-192:
|
||||
_p = 6277101735386680763835789423207666416083908700390324961279
|
||||
_r = 6277101735386680763835789423176059013767194773182842284081
|
||||
# s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L
|
||||
# c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65L
|
||||
_b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
|
||||
_Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
|
||||
_Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
|
||||
|
||||
curve_192 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||
generator_192 = ellipticcurve.Point( curve_192, _Gx, _Gy, _r )
|
||||
|
||||
|
||||
# NIST Curve P-224:
|
||||
_p = 26959946667150639794667015087019630673557916260026308143510066298881
|
||||
_r = 26959946667150639794667015087019625940457807714424391721682722368061
|
||||
# s = 0xbd71344799d5c7fcdc45b59fa3b9ab8f6a948bc5L
|
||||
# c = 0x5b056c7e11dd68f40469ee7f3c7a7d74f7d121116506d031218291fbL
|
||||
_b = 0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4
|
||||
_Gx =0xb70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21
|
||||
_Gy = 0xbd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34
|
||||
|
||||
curve_224 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||
generator_224 = ellipticcurve.Point( curve_224, _Gx, _Gy, _r )
|
||||
|
||||
# NIST Curve P-256:
|
||||
_p = 115792089210356248762697446949407573530086143415290314195533631308867097853951
|
||||
_r = 115792089210356248762697446949407573529996955224135760342422259061068512044369
|
||||
# s = 0xc49d360886e704936a6678e1139d26b7819f7e90L
|
||||
# c = 0x7efba1662985be9403cb055c75d4f7e0ce8d84a9c5114abcaf3177680104fa0dL
|
||||
_b = 0x5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604b
|
||||
_Gx = 0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296
|
||||
_Gy = 0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5
|
||||
|
||||
curve_256 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||
generator_256 = ellipticcurve.Point( curve_256, _Gx, _Gy, _r )
|
||||
|
||||
# NIST Curve P-384:
|
||||
_p = 39402006196394479212279040100143613805079739270465446667948293404245721771496870329047266088258938001861606973112319
|
||||
_r = 39402006196394479212279040100143613805079739270465446667946905279627659399113263569398956308152294913554433653942643
|
||||
# s = 0xa335926aa319a27a1d00896a6773a4827acdac73L
|
||||
# c = 0x79d1e655f868f02fff48dcdee14151ddb80643c1406d0ca10dfe6fc52009540a495e8042ea5f744f6e184667cc722483L
|
||||
_b = 0xb3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aef
|
||||
_Gx = 0xaa87ca22be8b05378eb1c71ef320ad746e1d3b628ba79b9859f741e082542a385502f25dbf55296c3a545e3872760ab7
|
||||
_Gy = 0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c00a60b1ce1d7e819d7a431d7c90ea0e5f
|
||||
|
||||
curve_384 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||
generator_384 = ellipticcurve.Point( curve_384, _Gx, _Gy, _r )
|
||||
|
||||
# NIST Curve P-521:
|
||||
_p = 6864797660130609714981900799081393217269435300143305409394463459185543183397656052122559640661454554977296311391480858037121987999716643812574028291115057151
|
||||
_r = 6864797660130609714981900799081393217269435300143305409394463459185543183397655394245057746333217197532963996371363321113864768612440380340372808892707005449
|
||||
# s = 0xd09e8800291cb85396cc6717393284aaa0da64baL
|
||||
# c = 0x0b48bfa5f420a34949539d2bdfc264eeeeb077688e44fbf0ad8f6d0edb37bd6b533281000518e19f1b9ffbe0fe9ed8a3c2200b8f875e523868c70c1e5bf55bad637L
|
||||
_b = 0x051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00
|
||||
_Gx = 0xc6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66
|
||||
_Gy = 0x11839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650
|
||||
|
||||
curve_521 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||
generator_521 = ellipticcurve.Point( curve_521, _Gx, _Gy, _r )
|
||||
|
||||
# Certicom secp256-k1
|
||||
_a = 0x0000000000000000000000000000000000000000000000000000000000000000
|
||||
_b = 0x0000000000000000000000000000000000000000000000000000000000000007
|
||||
_p = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f
|
||||
_Gx = 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798
|
||||
_Gy = 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8
|
||||
_r = 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
|
||||
|
||||
curve_secp256k1 = ellipticcurve.CurveFp( _p, _a, _b)
|
||||
generator_secp256k1 = ellipticcurve.Point( curve_secp256k1, _Gx, _Gy, _r)
|
||||
|
||||
|
||||
|
||||
def __main__():
|
||||
class TestFailure(Exception): pass
|
||||
|
||||
def test_point_validity( generator, x, y, expected ):
|
||||
"""generator defines the curve; is (x,y) a point on
|
||||
this curve? "expected" is True if the right answer is Yes."""
|
||||
if point_is_valid( generator, x, y ) == expected:
|
||||
print_("Point validity tested as expected.")
|
||||
else:
|
||||
raise TestFailure("*** Point validity test gave wrong result.")
|
||||
|
||||
def test_signature_validity( Msg, Qx, Qy, R, S, expected ):
|
||||
"""Msg = message, Qx and Qy represent the base point on
|
||||
elliptic curve c192, R and S are the signature, and
|
||||
"expected" is True iff the signature is expected to be valid."""
|
||||
pubk = Public_key( generator_192,
|
||||
ellipticcurve.Point( curve_192, Qx, Qy ) )
|
||||
got = pubk.verifies( digest_integer( Msg ), Signature( R, S ) )
|
||||
if got == expected:
|
||||
print_("Signature tested as expected: got %s, expected %s." % \
|
||||
( got, expected ))
|
||||
else:
|
||||
raise TestFailure("*** Signature test failed: got %s, expected %s." % \
|
||||
( got, expected ))
|
||||
|
||||
print_("NIST Curve P-192:")
|
||||
|
||||
p192 = generator_192
|
||||
|
||||
# From X9.62:
|
||||
|
||||
d = 651056770906015076056810763456358567190100156695615665659
|
||||
Q = d * p192
|
||||
if Q.x() != 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5:
|
||||
raise TestFailure("*** p192 * d came out wrong.")
|
||||
else:
|
||||
print_("p192 * d came out right.")
|
||||
|
||||
k = 6140507067065001063065065565667405560006161556565665656654
|
||||
R = k * p192
|
||||
if R.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||
or R.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||
raise TestFailure("*** k * p192 came out wrong.")
|
||||
else:
|
||||
print_("k * p192 came out right.")
|
||||
|
||||
u1 = 2563697409189434185194736134579731015366492496392189760599
|
||||
u2 = 6266643813348617967186477710235785849136406323338782220568
|
||||
temp = u1 * p192 + u2 * Q
|
||||
if temp.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||
or temp.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||
raise TestFailure("*** u1 * p192 + u2 * Q came out wrong.")
|
||||
else:
|
||||
print_("u1 * p192 + u2 * Q came out right.")
|
||||
|
||||
e = 968236873715988614170569073515315707566766479517
|
||||
pubk = Public_key( generator_192, generator_192 * d )
|
||||
privk = Private_key( pubk, d )
|
||||
sig = privk.sign( e, k )
|
||||
r, s = sig.r, sig.s
|
||||
if r != 3342403536405981729393488334694600415596881826869351677613 \
|
||||
or s != 5735822328888155254683894997897571951568553642892029982342:
|
||||
raise TestFailure("*** r or s came out wrong.")
|
||||
else:
|
||||
print_("r and s came out right.")
|
||||
|
||||
valid = pubk.verifies( e, sig )
|
||||
if valid: print_("Signature verified OK.")
|
||||
else: raise TestFailure("*** Signature failed verification.")
|
||||
|
||||
valid = pubk.verifies( e-1, sig )
|
||||
if not valid: print_("Forgery was correctly rejected.")
|
||||
else: raise TestFailure("*** Forgery was erroneously accepted.")
|
||||
|
||||
print_("Testing point validity, as per ECDSAVS.pdf B.2.2:")
|
||||
|
||||
test_point_validity( \
|
||||
p192, \
|
||||
0xcd6d0f029a023e9aaca429615b8f577abee685d8257cc83a, \
|
||||
0x00019c410987680e9fb6c0b6ecc01d9a2647c8bae27721bacdfc, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x00017f2fce203639e9eaf9fb50b81fc32776b30e3b02af16c73b, \
|
||||
0x95da95c5e72dd48e229d4748d4eee658a9a54111b23b2adb, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x4f77f8bc7fccbadd5760f4938746d5f253ee2168c1cf2792, \
|
||||
0x000147156ff824d131629739817edb197717c41aab5c2a70f0f6, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0xc58d61f88d905293bcd4cd0080bcb1b7f811f2ffa41979f6, \
|
||||
0x8804dc7a7c4c7f8b5d437f5156f3312ca7d6de8a0e11867f, \
|
||||
True )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0xcdf56c1aa3d8afc53c521adf3ffb96734a6a630a4a5b5a70, \
|
||||
0x97c1c44a5fb229007b5ec5d25f7413d170068ffd023caa4e, \
|
||||
True )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x89009c0dc361c81e99280c8e91df578df88cdf4b0cdedced, \
|
||||
0x27be44a529b7513e727251f128b34262a0fd4d8ec82377b9, \
|
||||
True )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x6a223d00bd22c52833409a163e057e5b5da1def2a197dd15, \
|
||||
0x7b482604199367f1f303f9ef627f922f97023e90eae08abf, \
|
||||
True )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x6dccbde75c0948c98dab32ea0bc59fe125cf0fb1a3798eda, \
|
||||
0x0001171a3e0fa60cf3096f4e116b556198de430e1fbd330c8835, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0xd266b39e1f491fc4acbbbc7d098430931cfa66d55015af12, \
|
||||
0x193782eb909e391a3148b7764e6b234aa94e48d30a16dbb2, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x9d6ddbcd439baa0c6b80a654091680e462a7d1d3f1ffeb43, \
|
||||
0x6ad8efc4d133ccf167c44eb4691c80abffb9f82b932b8caa, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x146479d944e6bda87e5b35818aa666a4c998a71f4e95edbc, \
|
||||
0xa86d6fe62bc8fbd88139693f842635f687f132255858e7f6, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0xe594d4a598046f3598243f50fd2c7bd7d380edb055802253, \
|
||||
0x509014c0c4d6b536e3ca750ec09066af39b4c8616a53a923, \
|
||||
False )
|
||||
|
||||
print_("Trying signature-verification tests from ECDSAVS.pdf B.2.4:")
|
||||
print_("P-192:")
|
||||
Msg = 0x84ce72aa8699df436059f052ac51b6398d2511e49631bcb7e71f89c499b9ee425dfbc13a5f6d408471b054f2655617cbbaf7937b7c80cd8865cf02c8487d30d2b0fbd8b2c4e102e16d828374bbc47b93852f212d5043c3ea720f086178ff798cc4f63f787b9c2e419efa033e7644ea7936f54462dc21a6c4580725f7f0e7d158
|
||||
Qx = 0xd9dbfb332aa8e5ff091e8ce535857c37c73f6250ffb2e7ac
|
||||
Qy = 0x282102e364feded3ad15ddf968f88d8321aa268dd483ebc4
|
||||
R = 0x64dca58a20787c488d11d6dd96313f1b766f2d8efe122916
|
||||
S = 0x1ecba28141e84ab4ecad92f56720e2cc83eb3d22dec72479
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, True )
|
||||
|
||||
Msg = 0x94bb5bacd5f8ea765810024db87f4224ad71362a3c28284b2b9f39fab86db12e8beb94aae899768229be8fdb6c4f12f28912bb604703a79ccff769c1607f5a91450f30ba0460d359d9126cbd6296be6d9c4bb96c0ee74cbb44197c207f6db326ab6f5a659113a9034e54be7b041ced9dcf6458d7fb9cbfb2744d999f7dfd63f4
|
||||
Qx = 0x3e53ef8d3112af3285c0e74842090712cd324832d4277ae7
|
||||
Qy = 0xcc75f8952d30aec2cbb719fc6aa9934590b5d0ff5a83adb7
|
||||
R = 0x8285261607283ba18f335026130bab31840dcfd9c3e555af
|
||||
S = 0x356d89e1b04541afc9704a45e9c535ce4a50929e33d7e06c
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, True )
|
||||
|
||||
Msg = 0xf6227a8eeb34afed1621dcc89a91d72ea212cb2f476839d9b4243c66877911b37b4ad6f4448792a7bbba76c63bdd63414b6facab7dc71c3396a73bd7ee14cdd41a659c61c99b779cecf07bc51ab391aa3252386242b9853ea7da67fd768d303f1b9b513d401565b6f1eb722dfdb96b519fe4f9bd5de67ae131e64b40e78c42dd
|
||||
Qx = 0x16335dbe95f8e8254a4e04575d736befb258b8657f773cb7
|
||||
Qy = 0x421b13379c59bc9dce38a1099ca79bbd06d647c7f6242336
|
||||
R = 0x4141bd5d64ea36c5b0bd21ef28c02da216ed9d04522b1e91
|
||||
S = 0x159a6aa852bcc579e821b7bb0994c0861fb08280c38daa09
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x16b5f93afd0d02246f662761ed8e0dd9504681ed02a253006eb36736b563097ba39f81c8e1bce7a16c1339e345efabbc6baa3efb0612948ae51103382a8ee8bc448e3ef71e9f6f7a9676694831d7f5dd0db5446f179bcb737d4a526367a447bfe2c857521c7f40b6d7d7e01a180d92431fb0bbd29c04a0c420a57b3ed26ccd8a
|
||||
Qx = 0xfd14cdf1607f5efb7b1793037b15bdf4baa6f7c16341ab0b
|
||||
Qy = 0x83fa0795cc6c4795b9016dac928fd6bac32f3229a96312c4
|
||||
R = 0x8dfdb832951e0167c5d762a473c0416c5c15bc1195667dc1
|
||||
S = 0x1720288a2dc13fa1ec78f763f8fe2ff7354a7e6fdde44520
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x08a2024b61b79d260e3bb43ef15659aec89e5b560199bc82cf7c65c77d39192e03b9a895d766655105edd9188242b91fbde4167f7862d4ddd61e5d4ab55196683d4f13ceb90d87aea6e07eb50a874e33086c4a7cb0273a8e1c4408f4b846bceae1ebaac1b2b2ea851a9b09de322efe34cebe601653efd6ddc876ce8c2f2072fb
|
||||
Qx = 0x674f941dc1a1f8b763c9334d726172d527b90ca324db8828
|
||||
Qy = 0x65adfa32e8b236cb33a3e84cf59bfb9417ae7e8ede57a7ff
|
||||
R = 0x9508b9fdd7daf0d8126f9e2bc5a35e4c6d800b5b804d7796
|
||||
S = 0x36f2bf6b21b987c77b53bb801b3435a577e3d493744bfab0
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x1843aba74b0789d4ac6b0b8923848023a644a7b70afa23b1191829bbe4397ce15b629bf21a8838298653ed0c19222b95fa4f7390d1b4c844d96e645537e0aae98afb5c0ac3bd0e4c37f8daaff25556c64e98c319c52687c904c4de7240a1cc55cd9756b7edaef184e6e23b385726e9ffcba8001b8f574987c1a3fedaaa83ca6d
|
||||
Qx = 0x10ecca1aad7220b56a62008b35170bfd5e35885c4014a19f
|
||||
Qy = 0x04eb61984c6c12ade3bc47f3c629ece7aa0a033b9948d686
|
||||
R = 0x82bfa4e82c0dfe9274169b86694e76ce993fd83b5c60f325
|
||||
S = 0xa97685676c59a65dbde002fe9d613431fb183e8006d05633
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x5a478f4084ddd1a7fea038aa9732a822106385797d02311aeef4d0264f824f698df7a48cfb6b578cf3da416bc0799425bb491be5b5ecc37995b85b03420a98f2c4dc5c31a69a379e9e322fbe706bbcaf0f77175e05cbb4fa162e0da82010a278461e3e974d137bc746d1880d6eb02aa95216014b37480d84b87f717bb13f76e1
|
||||
Qx = 0x6636653cb5b894ca65c448277b29da3ad101c4c2300f7c04
|
||||
Qy = 0xfdf1cbb3fc3fd6a4f890b59e554544175fa77dbdbeb656c1
|
||||
R = 0xeac2ddecddfb79931a9c3d49c08de0645c783a24cb365e1c
|
||||
S = 0x3549fee3cfa7e5f93bc47d92d8ba100e881a2a93c22f8d50
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0xc598774259a058fa65212ac57eaa4f52240e629ef4c310722088292d1d4af6c39b49ce06ba77e4247b20637174d0bd67c9723feb57b5ead232b47ea452d5d7a089f17c00b8b6767e434a5e16c231ba0efa718a340bf41d67ea2d295812ff1b9277daacb8bc27b50ea5e6443bcf95ef4e9f5468fe78485236313d53d1c68f6ba2
|
||||
Qx = 0xa82bd718d01d354001148cd5f69b9ebf38ff6f21898f8aaa
|
||||
Qy = 0xe67ceede07fc2ebfafd62462a51e4b6c6b3d5b537b7caf3e
|
||||
R = 0x4d292486c620c3de20856e57d3bb72fcde4a73ad26376955
|
||||
S = 0xa85289591a6081d5728825520e62ff1c64f94235c04c7f95
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0xca98ed9db081a07b7557f24ced6c7b9891269a95d2026747add9e9eb80638a961cf9c71a1b9f2c29744180bd4c3d3db60f2243c5c0b7cc8a8d40a3f9a7fc910250f2187136ee6413ffc67f1a25e1c4c204fa9635312252ac0e0481d89b6d53808f0c496ba87631803f6c572c1f61fa049737fdacce4adff757afed4f05beb658
|
||||
Qx = 0x7d3b016b57758b160c4fca73d48df07ae3b6b30225126c2f
|
||||
Qy = 0x4af3790d9775742bde46f8da876711be1b65244b2b39e7ec
|
||||
R = 0x95f778f5f656511a5ab49a5d69ddd0929563c29cbc3a9e62
|
||||
S = 0x75c87fc358c251b4c83d2dd979faad496b539f9f2ee7a289
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x31dd9a54c8338bea06b87eca813d555ad1850fac9742ef0bbe40dad400e10288acc9c11ea7dac79eb16378ebea9490e09536099f1b993e2653cd50240014c90a9c987f64545abc6a536b9bd2435eb5e911fdfde2f13be96ea36ad38df4ae9ea387b29cced599af777338af2794820c9cce43b51d2112380a35802ab7e396c97a
|
||||
Qx = 0x9362f28c4ef96453d8a2f849f21e881cd7566887da8beb4a
|
||||
Qy = 0xe64d26d8d74c48a024ae85d982ee74cd16046f4ee5333905
|
||||
R = 0xf3923476a296c88287e8de914b0b324ad5a963319a4fe73b
|
||||
S = 0xf0baeed7624ed00d15244d8ba2aede085517dbdec8ac65f5
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, True )
|
||||
|
||||
Msg = 0xb2b94e4432267c92f9fdb9dc6040c95ffa477652761290d3c7de312283f6450d89cc4aabe748554dfb6056b2d8e99c7aeaad9cdddebdee9dbc099839562d9064e68e7bb5f3a6bba0749ca9a538181fc785553a4000785d73cc207922f63e8ce1112768cb1de7b673aed83a1e4a74592f1268d8e2a4e9e63d414b5d442bd0456d
|
||||
Qx = 0xcc6fc032a846aaac25533eb033522824f94e670fa997ecef
|
||||
Qy = 0xe25463ef77a029eccda8b294fd63dd694e38d223d30862f1
|
||||
R = 0x066b1d07f3a40e679b620eda7f550842a35c18b80c5ebe06
|
||||
S = 0xa0b0fb201e8f2df65e2c4508ef303bdc90d934016f16b2dc
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x4366fcadf10d30d086911de30143da6f579527036937007b337f7282460eae5678b15cccda853193ea5fc4bc0a6b9d7a31128f27e1214988592827520b214eed5052f7775b750b0c6b15f145453ba3fee24a085d65287e10509eb5d5f602c440341376b95c24e5c4727d4b859bfe1483d20538acdd92c7997fa9c614f0f839d7
|
||||
Qx = 0x955c908fe900a996f7e2089bee2f6376830f76a19135e753
|
||||
Qy = 0xba0c42a91d3847de4a592a46dc3fdaf45a7cc709b90de520
|
||||
R = 0x1f58ad77fc04c782815a1405b0925e72095d906cbf52a668
|
||||
S = 0xf2e93758b3af75edf784f05a6761c9b9a6043c66b845b599
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x543f8af57d750e33aa8565e0cae92bfa7a1ff78833093421c2942cadf9986670a5ff3244c02a8225e790fbf30ea84c74720abf99cfd10d02d34377c3d3b41269bea763384f372bb786b5846f58932defa68023136cd571863b304886e95e52e7877f445b9364b3f06f3c28da12707673fecb4b8071de06b6e0a3c87da160cef3
|
||||
Qx = 0x31f7fa05576d78a949b24812d4383107a9a45bb5fccdd835
|
||||
Qy = 0x8dc0eb65994a90f02b5e19bd18b32d61150746c09107e76b
|
||||
R = 0xbe26d59e4e883dde7c286614a767b31e49ad88789d3a78ff
|
||||
S = 0x8762ca831c1ce42df77893c9b03119428e7a9b819b619068
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0xd2e8454143ce281e609a9d748014dcebb9d0bc53adb02443a6aac2ffe6cb009f387c346ecb051791404f79e902ee333ad65e5c8cb38dc0d1d39a8dc90add5023572720e5b94b190d43dd0d7873397504c0c7aef2727e628eb6a74411f2e400c65670716cb4a815dc91cbbfeb7cfe8c929e93184c938af2c078584da045e8f8d1
|
||||
Qx = 0x66aa8edbbdb5cf8e28ceb51b5bda891cae2df84819fe25c0
|
||||
Qy = 0x0c6bc2f69030a7ce58d4a00e3b3349844784a13b8936f8da
|
||||
R = 0xa4661e69b1734f4a71b788410a464b71e7ffe42334484f23
|
||||
S = 0x738421cf5e049159d69c57a915143e226cac8355e149afe9
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x6660717144040f3e2f95a4e25b08a7079c702a8b29babad5a19a87654bc5c5afa261512a11b998a4fb36b5d8fe8bd942792ff0324b108120de86d63f65855e5461184fc96a0a8ffd2ce6d5dfb0230cbbdd98f8543e361b3205f5da3d500fdc8bac6db377d75ebef3cb8f4d1ff738071ad0938917889250b41dd1d98896ca06fb
|
||||
Qx = 0xbcfacf45139b6f5f690a4c35a5fffa498794136a2353fc77
|
||||
Qy = 0x6f4a6c906316a6afc6d98fe1f0399d056f128fe0270b0f22
|
||||
R = 0x9db679a3dafe48f7ccad122933acfe9da0970b71c94c21c1
|
||||
S = 0x984c2db99827576c0a41a5da41e07d8cc768bc82f18c9da9
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
|
||||
|
||||
print_("Testing the example code:")
|
||||
|
||||
# Building a public/private key pair from the NIST Curve P-192:
|
||||
|
||||
g = generator_192
|
||||
n = g.order()
|
||||
|
||||
# (random.SystemRandom is supposed to provide
|
||||
# crypto-quality random numbers, but as Debian recently
|
||||
# illustrated, a systems programmer can accidentally
|
||||
# demolish this security, so in serious applications
|
||||
# further precautions are appropriate.)
|
||||
|
||||
randrange = random.SystemRandom().randrange
|
||||
|
||||
secret = randrange( 1, n )
|
||||
pubkey = Public_key( g, g * secret )
|
||||
privkey = Private_key( pubkey, secret )
|
||||
|
||||
# Signing a hash value:
|
||||
|
||||
hash = randrange( 1, n )
|
||||
signature = privkey.sign( hash, randrange( 1, n ) )
|
||||
|
||||
# Verifying a signature for a hash value:
|
||||
|
||||
if pubkey.verifies( hash, signature ):
|
||||
print_("Demo verification succeeded.")
|
||||
else:
|
||||
raise TestFailure("*** Demo verification failed.")
|
||||
|
||||
if pubkey.verifies( hash-1, signature ):
|
||||
raise TestFailure( "**** Demo verification failed to reject tampered hash.")
|
||||
else:
|
||||
print_("Demo verification correctly rejected tampered hash.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
__main__()
|
||||
293
bin/python/ecdsa/ellipticcurve.py
Normal file
@@ -0,0 +1,293 @@
|
||||
#! /usr/bin/env python
|
||||
#
|
||||
# Implementation of elliptic curves, for cryptographic applications.
|
||||
#
|
||||
# This module doesn't provide any way to choose a random elliptic
|
||||
# curve, nor to verify that an elliptic curve was chosen randomly,
|
||||
# because one can simply use NIST's standard curves.
|
||||
#
|
||||
# Notes from X9.62-1998 (draft):
|
||||
# Nomenclature:
|
||||
# - Q is a public key.
|
||||
# The "Elliptic Curve Domain Parameters" include:
|
||||
# - q is the "field size", which in our case equals p.
|
||||
# - p is a big prime.
|
||||
# - G is a point of prime order (5.1.1.1).
|
||||
# - n is the order of G (5.1.1.1).
|
||||
# Public-key validation (5.2.2):
|
||||
# - Verify that Q is not the point at infinity.
|
||||
# - Verify that X_Q and Y_Q are in [0,p-1].
|
||||
# - Verify that Q is on the curve.
|
||||
# - Verify that nQ is the point at infinity.
|
||||
# Signature generation (5.3):
|
||||
# - Pick random k from [1,n-1].
|
||||
# Signature checking (5.4.2):
|
||||
# - Verify that r and s are in [1,n-1].
|
||||
#
|
||||
# Version of 2008.11.25.
|
||||
#
|
||||
# Revision history:
|
||||
# 2005.12.31 - Initial version.
|
||||
# 2008.11.25 - Change CurveFp.is_on to contains_point.
|
||||
#
|
||||
# Written in 2005 by Peter Pearson and placed in the public domain.
|
||||
|
||||
from __future__ import division
|
||||
|
||||
from .six import print_
|
||||
from . import numbertheory
|
||||
|
||||
class CurveFp( object ):
|
||||
"""Elliptic Curve over the field of integers modulo a prime."""
|
||||
def __init__( self, p, a, b ):
|
||||
"""The curve of points satisfying y^2 = x^3 + a*x + b (mod p)."""
|
||||
self.__p = p
|
||||
self.__a = a
|
||||
self.__b = b
|
||||
|
||||
def p( self ):
|
||||
return self.__p
|
||||
|
||||
def a( self ):
|
||||
return self.__a
|
||||
|
||||
def b( self ):
|
||||
return self.__b
|
||||
|
||||
def contains_point( self, x, y ):
|
||||
"""Is the point (x,y) on this curve?"""
|
||||
return ( y * y - ( x * x * x + self.__a * x + self.__b ) ) % self.__p == 0
|
||||
|
||||
|
||||
|
||||
class Point( object ):
|
||||
"""A point on an elliptic curve. Altering x and y is forbidding,
|
||||
but they can be read by the x() and y() methods."""
|
||||
def __init__( self, curve, x, y, order = None ):
|
||||
"""curve, x, y, order; order (optional) is the order of this point."""
|
||||
self.__curve = curve
|
||||
self.__x = x
|
||||
self.__y = y
|
||||
self.__order = order
|
||||
# self.curve is allowed to be None only for INFINITY:
|
||||
if self.__curve: assert self.__curve.contains_point( x, y )
|
||||
if order: assert self * order == INFINITY
|
||||
|
||||
def __eq__( self, other ):
|
||||
"""Return True if the points are identical, False otherwise."""
|
||||
if self.__curve == other.__curve \
|
||||
and self.__x == other.__x \
|
||||
and self.__y == other.__y:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def __add__( self, other ):
|
||||
"""Add one point to another point."""
|
||||
|
||||
# X9.62 B.3:
|
||||
|
||||
if other == INFINITY: return self
|
||||
if self == INFINITY: return other
|
||||
assert self.__curve == other.__curve
|
||||
if self.__x == other.__x:
|
||||
if ( self.__y + other.__y ) % self.__curve.p() == 0:
|
||||
return INFINITY
|
||||
else:
|
||||
return self.double()
|
||||
|
||||
p = self.__curve.p()
|
||||
|
||||
l = ( ( other.__y - self.__y ) * \
|
||||
numbertheory.inverse_mod( other.__x - self.__x, p ) ) % p
|
||||
|
||||
x3 = ( l * l - self.__x - other.__x ) % p
|
||||
y3 = ( l * ( self.__x - x3 ) - self.__y ) % p
|
||||
|
||||
return Point( self.__curve, x3, y3 )
|
||||
|
||||
def __mul__( self, other ):
|
||||
"""Multiply a point by an integer."""
|
||||
|
||||
def leftmost_bit( x ):
|
||||
assert x > 0
|
||||
result = 1
|
||||
while result <= x: result = 2 * result
|
||||
return result // 2
|
||||
|
||||
e = other
|
||||
if self.__order: e = e % self.__order
|
||||
if e == 0: return INFINITY
|
||||
if self == INFINITY: return INFINITY
|
||||
assert e > 0
|
||||
|
||||
# From X9.62 D.3.2:
|
||||
|
||||
e3 = 3 * e
|
||||
negative_self = Point( self.__curve, self.__x, -self.__y, self.__order )
|
||||
i = leftmost_bit( e3 ) // 2
|
||||
result = self
|
||||
# print_("Multiplying %s by %d (e3 = %d):" % ( self, other, e3 ))
|
||||
while i > 1:
|
||||
result = result.double()
|
||||
if ( e3 & i ) != 0 and ( e & i ) == 0: result = result + self
|
||||
if ( e3 & i ) == 0 and ( e & i ) != 0: result = result + negative_self
|
||||
# print_(". . . i = %d, result = %s" % ( i, result ))
|
||||
i = i // 2
|
||||
|
||||
return result
|
||||
|
||||
def __rmul__( self, other ):
|
||||
"""Multiply a point by an integer."""
|
||||
|
||||
return self * other
|
||||
|
||||
def __str__( self ):
|
||||
if self == INFINITY: return "infinity"
|
||||
return "(%d,%d)" % ( self.__x, self.__y )
|
||||
|
||||
def double( self ):
|
||||
"""Return a new point that is twice the old."""
|
||||
|
||||
if self == INFINITY:
|
||||
return INFINITY
|
||||
|
||||
# X9.62 B.3:
|
||||
|
||||
p = self.__curve.p()
|
||||
a = self.__curve.a()
|
||||
|
||||
l = ( ( 3 * self.__x * self.__x + a ) * \
|
||||
numbertheory.inverse_mod( 2 * self.__y, p ) ) % p
|
||||
|
||||
x3 = ( l * l - 2 * self.__x ) % p
|
||||
y3 = ( l * ( self.__x - x3 ) - self.__y ) % p
|
||||
|
||||
return Point( self.__curve, x3, y3 )
|
||||
|
||||
def x( self ):
|
||||
return self.__x
|
||||
|
||||
def y( self ):
|
||||
return self.__y
|
||||
|
||||
def curve( self ):
|
||||
return self.__curve
|
||||
|
||||
def order( self ):
|
||||
return self.__order
|
||||
|
||||
|
||||
# This one point is the Point At Infinity for all purposes:
|
||||
INFINITY = Point( None, None, None )
|
||||
|
||||
def __main__():
|
||||
|
||||
class FailedTest(Exception): pass
|
||||
def test_add( c, x1, y1, x2, y2, x3, y3 ):
|
||||
"""We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3)."""
|
||||
p1 = Point( c, x1, y1 )
|
||||
p2 = Point( c, x2, y2 )
|
||||
p3 = p1 + p2
|
||||
print_("%s + %s = %s" % ( p1, p2, p3 ), end=' ')
|
||||
if p3.x() != x3 or p3.y() != y3:
|
||||
raise FailedTest("Failure: should give (%d,%d)." % ( x3, y3 ))
|
||||
else:
|
||||
print_(" Good.")
|
||||
|
||||
def test_double( c, x1, y1, x3, y3 ):
|
||||
"""We expect that on curve c, 2*(x1,y1) = (x3, y3)."""
|
||||
p1 = Point( c, x1, y1 )
|
||||
p3 = p1.double()
|
||||
print_("%s doubled = %s" % ( p1, p3 ), end=' ')
|
||||
if p3.x() != x3 or p3.y() != y3:
|
||||
raise FailedTest("Failure: should give (%d,%d)." % ( x3, y3 ))
|
||||
else:
|
||||
print_(" Good.")
|
||||
|
||||
def test_double_infinity( c ):
|
||||
"""We expect that on curve c, 2*INFINITY = INFINITY."""
|
||||
p1 = INFINITY
|
||||
p3 = p1.double()
|
||||
print_("%s doubled = %s" % ( p1, p3 ), end=' ')
|
||||
if p3.x() != INFINITY.x() or p3.y() != INFINITY.y():
|
||||
raise FailedTest("Failure: should give (%d,%d)." % ( INFINITY.x(), INFINITY.y() ))
|
||||
else:
|
||||
print_(" Good.")
|
||||
|
||||
def test_multiply( c, x1, y1, m, x3, y3 ):
|
||||
"""We expect that on curve c, m*(x1,y1) = (x3,y3)."""
|
||||
p1 = Point( c, x1, y1 )
|
||||
p3 = p1 * m
|
||||
print_("%s * %d = %s" % ( p1, m, p3 ), end=' ')
|
||||
if p3.x() != x3 or p3.y() != y3:
|
||||
raise FailedTest("Failure: should give (%d,%d)." % ( x3, y3 ))
|
||||
else:
|
||||
print_(" Good.")
|
||||
|
||||
|
||||
# A few tests from X9.62 B.3:
|
||||
|
||||
c = CurveFp( 23, 1, 1 )
|
||||
test_add( c, 3, 10, 9, 7, 17, 20 )
|
||||
test_double( c, 3, 10, 7, 12 )
|
||||
test_add( c, 3, 10, 3, 10, 7, 12 ) # (Should just invoke double.)
|
||||
test_multiply( c, 3, 10, 2, 7, 12 )
|
||||
|
||||
test_double_infinity(c)
|
||||
|
||||
# From X9.62 I.1 (p. 96):
|
||||
|
||||
g = Point( c, 13, 7, 7 )
|
||||
|
||||
check = INFINITY
|
||||
for i in range( 7 + 1 ):
|
||||
p = ( i % 7 ) * g
|
||||
print_("%s * %d = %s, expected %s . . ." % ( g, i, p, check ), end=' ')
|
||||
if p == check:
|
||||
print_(" Good.")
|
||||
else:
|
||||
raise FailedTest("Bad.")
|
||||
check = check + g
|
||||
|
||||
# NIST Curve P-192:
|
||||
p = 6277101735386680763835789423207666416083908700390324961279
|
||||
r = 6277101735386680763835789423176059013767194773182842284081
|
||||
#s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L
|
||||
c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65
|
||||
b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
|
||||
Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
|
||||
Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
|
||||
|
||||
c192 = CurveFp( p, -3, b )
|
||||
p192 = Point( c192, Gx, Gy, r )
|
||||
|
||||
# Checking against some sample computations presented
|
||||
# in X9.62:
|
||||
|
||||
d = 651056770906015076056810763456358567190100156695615665659
|
||||
Q = d * p192
|
||||
if Q.x() != 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5:
|
||||
raise FailedTest("p192 * d came out wrong.")
|
||||
else:
|
||||
print_("p192 * d came out right.")
|
||||
|
||||
k = 6140507067065001063065065565667405560006161556565665656654
|
||||
R = k * p192
|
||||
if R.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||
or R.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||
raise FailedTest("k * p192 came out wrong.")
|
||||
else:
|
||||
print_("k * p192 came out right.")
|
||||
|
||||
u1 = 2563697409189434185194736134579731015366492496392189760599
|
||||
u2 = 6266643813348617967186477710235785849136406323338782220568
|
||||
temp = u1 * p192 + u2 * Q
|
||||
if temp.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||
or temp.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||
raise FailedTest("u1 * p192 + u2 * Q came out wrong.")
|
||||
else:
|
||||
print_("u1 * p192 + u2 * Q came out right.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
__main__()
|
||||
283
bin/python/ecdsa/keys.py
Normal file
@@ -0,0 +1,283 @@
|
||||
import binascii
|
||||
|
||||
from . import ecdsa
|
||||
from . import der
|
||||
from . import rfc6979
|
||||
from .curves import NIST192p, find_curve
|
||||
from .util import string_to_number, number_to_string, randrange
|
||||
from .util import sigencode_string, sigdecode_string
|
||||
from .util import oid_ecPublicKey, encoded_oid_ecPublicKey
|
||||
from .six import PY3, b
|
||||
from hashlib import sha1
|
||||
|
||||
class BadSignatureError(Exception):
|
||||
pass
|
||||
class BadDigestError(Exception):
|
||||
pass
|
||||
|
||||
class VerifyingKey:
|
||||
def __init__(self, _error__please_use_generate=None):
|
||||
if not _error__please_use_generate:
|
||||
raise TypeError("Please use SigningKey.generate() to construct me")
|
||||
|
||||
@classmethod
|
||||
def from_public_point(klass, point, curve=NIST192p, hashfunc=sha1):
|
||||
self = klass(_error__please_use_generate=True)
|
||||
self.curve = curve
|
||||
self.default_hashfunc = hashfunc
|
||||
self.pubkey = ecdsa.Public_key(curve.generator, point)
|
||||
self.pubkey.order = curve.order
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def from_string(klass, string, curve=NIST192p, hashfunc=sha1,
|
||||
validate_point=True):
|
||||
order = curve.order
|
||||
assert len(string) == curve.verifying_key_length, \
|
||||
(len(string), curve.verifying_key_length)
|
||||
xs = string[:curve.baselen]
|
||||
ys = string[curve.baselen:]
|
||||
assert len(xs) == curve.baselen, (len(xs), curve.baselen)
|
||||
assert len(ys) == curve.baselen, (len(ys), curve.baselen)
|
||||
x = string_to_number(xs)
|
||||
y = string_to_number(ys)
|
||||
if validate_point:
|
||||
assert ecdsa.point_is_valid(curve.generator, x, y)
|
||||
from . import ellipticcurve
|
||||
point = ellipticcurve.Point(curve.curve, x, y, order)
|
||||
return klass.from_public_point(point, curve, hashfunc)
|
||||
|
||||
@classmethod
|
||||
def from_pem(klass, string):
|
||||
return klass.from_der(der.unpem(string))
|
||||
|
||||
@classmethod
|
||||
def from_der(klass, string):
|
||||
# [[oid_ecPublicKey,oid_curve], point_str_bitstring]
|
||||
s1,empty = der.remove_sequence(string)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER pubkey: %s" %
|
||||
binascii.hexlify(empty))
|
||||
s2,point_str_bitstring = der.remove_sequence(s1)
|
||||
# s2 = oid_ecPublicKey,oid_curve
|
||||
oid_pk, rest = der.remove_object(s2)
|
||||
oid_curve, empty = der.remove_object(rest)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER pubkey objects: %s" %
|
||||
binascii.hexlify(empty))
|
||||
assert oid_pk == oid_ecPublicKey, (oid_pk, oid_ecPublicKey)
|
||||
curve = find_curve(oid_curve)
|
||||
point_str, empty = der.remove_bitstring(point_str_bitstring)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after pubkey pointstring: %s" %
|
||||
binascii.hexlify(empty))
|
||||
assert point_str.startswith(b("\x00\x04"))
|
||||
return klass.from_string(point_str[2:], curve)
|
||||
|
||||
def to_string(self):
|
||||
# VerifyingKey.from_string(vk.to_string()) == vk as long as the
|
||||
# curves are the same: the curve itself is not included in the
|
||||
# serialized form
|
||||
order = self.pubkey.order
|
||||
x_str = number_to_string(self.pubkey.point.x(), order)
|
||||
y_str = number_to_string(self.pubkey.point.y(), order)
|
||||
return x_str + y_str
|
||||
|
||||
def to_pem(self):
|
||||
return der.topem(self.to_der(), "PUBLIC KEY")
|
||||
|
||||
def to_der(self):
|
||||
order = self.pubkey.order
|
||||
x_str = number_to_string(self.pubkey.point.x(), order)
|
||||
y_str = number_to_string(self.pubkey.point.y(), order)
|
||||
point_str = b("\x00\x04") + x_str + y_str
|
||||
return der.encode_sequence(der.encode_sequence(encoded_oid_ecPublicKey,
|
||||
self.curve.encoded_oid),
|
||||
der.encode_bitstring(point_str))
|
||||
|
||||
def verify(self, signature, data, hashfunc=None, sigdecode=sigdecode_string):
|
||||
hashfunc = hashfunc or self.default_hashfunc
|
||||
digest = hashfunc(data).digest()
|
||||
return self.verify_digest(signature, digest, sigdecode)
|
||||
|
||||
def verify_digest(self, signature, digest, sigdecode=sigdecode_string):
|
||||
if len(digest) > self.curve.baselen:
|
||||
raise BadDigestError("this curve (%s) is too short "
|
||||
"for your digest (%d)" % (self.curve.name,
|
||||
8*len(digest)))
|
||||
number = string_to_number(digest)
|
||||
r, s = sigdecode(signature, self.pubkey.order)
|
||||
sig = ecdsa.Signature(r, s)
|
||||
if self.pubkey.verifies(number, sig):
|
||||
return True
|
||||
raise BadSignatureError
|
||||
|
||||
class SigningKey:
|
||||
def __init__(self, _error__please_use_generate=None):
|
||||
if not _error__please_use_generate:
|
||||
raise TypeError("Please use SigningKey.generate() to construct me")
|
||||
|
||||
@classmethod
|
||||
def generate(klass, curve=NIST192p, entropy=None, hashfunc=sha1):
|
||||
secexp = randrange(curve.order, entropy)
|
||||
return klass.from_secret_exponent(secexp, curve, hashfunc)
|
||||
|
||||
# to create a signing key from a short (arbitrary-length) seed, convert
|
||||
# that seed into an integer with something like
|
||||
# secexp=util.randrange_from_seed__X(seed, curve.order), and then pass
|
||||
# that integer into SigningKey.from_secret_exponent(secexp, curve)
|
||||
|
||||
@classmethod
|
||||
def from_secret_exponent(klass, secexp, curve=NIST192p, hashfunc=sha1):
|
||||
self = klass(_error__please_use_generate=True)
|
||||
self.curve = curve
|
||||
self.default_hashfunc = hashfunc
|
||||
self.baselen = curve.baselen
|
||||
n = curve.order
|
||||
assert 1 <= secexp < n
|
||||
pubkey_point = curve.generator*secexp
|
||||
pubkey = ecdsa.Public_key(curve.generator, pubkey_point)
|
||||
pubkey.order = n
|
||||
self.verifying_key = VerifyingKey.from_public_point(pubkey_point, curve,
|
||||
hashfunc)
|
||||
self.privkey = ecdsa.Private_key(pubkey, secexp)
|
||||
self.privkey.order = n
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def from_string(klass, string, curve=NIST192p, hashfunc=sha1):
|
||||
assert len(string) == curve.baselen, (len(string), curve.baselen)
|
||||
secexp = string_to_number(string)
|
||||
return klass.from_secret_exponent(secexp, curve, hashfunc)
|
||||
|
||||
@classmethod
|
||||
def from_pem(klass, string, hashfunc=sha1):
|
||||
# the privkey pem file has two sections: "EC PARAMETERS" and "EC
|
||||
# PRIVATE KEY". The first is redundant.
|
||||
if PY3 and isinstance(string, str):
|
||||
string = string.encode()
|
||||
privkey_pem = string[string.index(b("-----BEGIN EC PRIVATE KEY-----")):]
|
||||
return klass.from_der(der.unpem(privkey_pem), hashfunc)
|
||||
@classmethod
|
||||
def from_der(klass, string, hashfunc=sha1):
|
||||
# SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1),
|
||||
# cont[1],bitstring])
|
||||
s, empty = der.remove_sequence(string)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER privkey: %s" %
|
||||
binascii.hexlify(empty))
|
||||
one, s = der.remove_integer(s)
|
||||
if one != 1:
|
||||
raise der.UnexpectedDER("expected '1' at start of DER privkey,"
|
||||
" got %d" % one)
|
||||
privkey_str, s = der.remove_octet_string(s)
|
||||
tag, curve_oid_str, s = der.remove_constructed(s)
|
||||
if tag != 0:
|
||||
raise der.UnexpectedDER("expected tag 0 in DER privkey,"
|
||||
" got %d" % tag)
|
||||
curve_oid, empty = der.remove_object(curve_oid_str)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER privkey "
|
||||
"curve_oid: %s" % binascii.hexlify(empty))
|
||||
curve = find_curve(curve_oid)
|
||||
|
||||
# we don't actually care about the following fields
|
||||
#
|
||||
#tag, pubkey_bitstring, s = der.remove_constructed(s)
|
||||
#if tag != 1:
|
||||
# raise der.UnexpectedDER("expected tag 1 in DER privkey, got %d"
|
||||
# % tag)
|
||||
#pubkey_str = der.remove_bitstring(pubkey_bitstring)
|
||||
#if empty != "":
|
||||
# raise der.UnexpectedDER("trailing junk after DER privkey "
|
||||
# "pubkeystr: %s" % binascii.hexlify(empty))
|
||||
|
||||
# our from_string method likes fixed-length privkey strings
|
||||
if len(privkey_str) < curve.baselen:
|
||||
privkey_str = b("\x00")*(curve.baselen-len(privkey_str)) + privkey_str
|
||||
return klass.from_string(privkey_str, curve, hashfunc)
|
||||
|
||||
def to_string(self):
|
||||
secexp = self.privkey.secret_multiplier
|
||||
s = number_to_string(secexp, self.privkey.order)
|
||||
return s
|
||||
|
||||
def to_pem(self):
|
||||
# TODO: "BEGIN ECPARAMETERS"
|
||||
return der.topem(self.to_der(), "EC PRIVATE KEY")
|
||||
|
||||
def to_der(self):
|
||||
# SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1),
|
||||
# cont[1],bitstring])
|
||||
encoded_vk = b("\x00\x04") + self.get_verifying_key().to_string()
|
||||
return der.encode_sequence(der.encode_integer(1),
|
||||
der.encode_octet_string(self.to_string()),
|
||||
der.encode_constructed(0, self.curve.encoded_oid),
|
||||
der.encode_constructed(1, der.encode_bitstring(encoded_vk)),
|
||||
)
|
||||
|
||||
def get_verifying_key(self):
|
||||
return self.verifying_key
|
||||
|
||||
def sign_deterministic(self, data, hashfunc=None, sigencode=sigencode_string):
|
||||
hashfunc = hashfunc or self.default_hashfunc
|
||||
digest = hashfunc(data).digest()
|
||||
|
||||
return self.sign_digest_deterministic(digest, hashfunc=hashfunc, sigencode=sigencode)
|
||||
|
||||
def sign_digest_deterministic(self, digest, hashfunc=None, sigencode=sigencode_string):
|
||||
"""
|
||||
Calculates 'k' from data itself, removing the need for strong
|
||||
random generator and producing deterministic (reproducible) signatures.
|
||||
See RFC 6979 for more details.
|
||||
"""
|
||||
secexp = self.privkey.secret_multiplier
|
||||
k = rfc6979.generate_k(
|
||||
self.curve.generator.order(), secexp, hashfunc, digest)
|
||||
|
||||
return self.sign_digest(digest, sigencode=sigencode, k=k)
|
||||
|
||||
def sign(self, data, entropy=None, hashfunc=None, sigencode=sigencode_string, k=None):
|
||||
"""
|
||||
hashfunc= should behave like hashlib.sha1 . The output length of the
|
||||
hash (in bytes) must not be longer than the length of the curve order
|
||||
(rounded up to the nearest byte), so using SHA256 with nist256p is
|
||||
ok, but SHA256 with nist192p is not. (In the 2**-96ish unlikely event
|
||||
of a hash output larger than the curve order, the hash will
|
||||
effectively be wrapped mod n).
|
||||
|
||||
Use hashfunc=hashlib.sha1 to match openssl's -ecdsa-with-SHA1 mode,
|
||||
or hashfunc=hashlib.sha256 for openssl-1.0.0's -ecdsa-with-SHA256.
|
||||
"""
|
||||
|
||||
hashfunc = hashfunc or self.default_hashfunc
|
||||
h = hashfunc(data).digest()
|
||||
return self.sign_digest(h, entropy, sigencode, k)
|
||||
|
||||
def sign_digest(self, digest, entropy=None, sigencode=sigencode_string, k=None):
|
||||
if len(digest) > self.curve.baselen:
|
||||
raise BadDigestError("this curve (%s) is too short "
|
||||
"for your digest (%d)" % (self.curve.name,
|
||||
8*len(digest)))
|
||||
number = string_to_number(digest)
|
||||
r, s = self.sign_number(number, entropy, k)
|
||||
return sigencode(r, s, self.privkey.order)
|
||||
|
||||
def sign_number(self, number, entropy=None, k=None):
|
||||
# returns a pair of numbers
|
||||
order = self.privkey.order
|
||||
# privkey.sign() may raise RuntimeError in the amazingly unlikely
|
||||
# (2**-192) event that r=0 or s=0, because that would leak the key.
|
||||
# We could re-try with a different 'k', but we couldn't test that
|
||||
# code, so I choose to allow the signature to fail instead.
|
||||
|
||||
# If k is set, it is used directly. In other cases
|
||||
# it is generated using entropy function
|
||||
if k is not None:
|
||||
_k = k
|
||||
else:
|
||||
_k = randrange(order, entropy)
|
||||
|
||||
assert 1 <= _k < order
|
||||
sig = self.privkey.sign(number, _k)
|
||||
return sig.r, sig.s
|
||||