Compare commits

..

1 Commits

Author SHA1 Message Date
Nik Bougalis
b277c353a8 Eliminate dead bytes from nodestore encoding:
The legacy nodestore encoding for blobs had an 8 byte prefix. Originally
the first 4 bytes stored the sequence number of the ledger to which the
encoded object belogned; the next 4 bytes were never specified rigidly
and some versions stored a redundant copy of the ledger sequence while
others simply set the bytes to 0.

Eventually that scheme was abandoned, and the first 8 bytes were just
set to 0 and ignored when reloading a blob.

This commit removes the unusued 8 byte legacy prefix. The change is
makes it impossible to load old databases with this code and for old
versions to load databases created with this code.
2023-01-07 15:36:34 -08:00
654 changed files with 46719 additions and 126787 deletions

5
.codecov.yml Normal file
View File

@@ -0,0 +1,5 @@
codecov:
ci:
- !appveyor
- travis

View File

@@ -1,7 +0,0 @@
# Ignore everything
*
# Allow files and directories
!/build-core.sh
!/build-full.sh
!/release-builder.sh

View File

@@ -33,27 +33,10 @@ Please check [x] relevant options, delete irrelevant ones.
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
- [ ] Refactor (non-breaking change that only restructures code)
- [ ] Tests (you added tests for code that already exists, or your new feature included in this PR)
- [ ] Documentation update
- [ ] Chore (no impact to binary, e.g. `.gitignore`, formatting, dropping support for older tooling)
- [ ] Tests (You added tests for code that already exists, or your new feature included in this PR)
- [ ] Documentation Updates
- [ ] Release
### API Impact
<!--
Please check [x] relevant options, delete irrelevant ones.
* If there is any impact to the public API methods (HTTP / WebSocket), please update https://github.com/xrplf/rippled/blob/develop/API-CHANGELOG.md
* Update API-CHANGELOG.md and add the change directly in this PR by pushing to your PR branch.
* libxrpl: See https://github.com/XRPLF/rippled/blob/develop/docs/build/depend.md
* Peer Protocol: See https://xrpl.org/peer-protocol.html
-->
- [ ] Public API: New feature (new methods and/or new fields)
- [ ] Public API: Breaking change (in general, breaking changes should only impact the next api_version)
- [ ] `libxrpl` change (any change that may affect `libxrpl` or dependents of `libxrpl`)
- [ ] Peer protocol change (must be backward compatible or bump the peer protocol version)
<!--
## Before / After
If relevant, use this section for an English description of the change at a technical level.
@@ -69,4 +52,4 @@ This section may not be needed if your change includes thoroughly commented unit
<!--
## Future Tasks
For future tasks related to PR.
-->
-->

View File

@@ -1,105 +0,0 @@
name: Build using Docker
on:
push:
branches: ["dev", "candidate", "release", "jshooks"]
pull_request:
branches: ["dev", "candidate", "release", "jshooks"]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP: 1
jobs:
checkout:
runs-on: [self-hosted, vanity]
outputs:
checkout_path: ${{ steps.vars.outputs.checkout_path }}
steps:
- name: Prepare checkout path
id: vars
run: |
SAFE_BRANCH=$(echo "${{ github.ref_name }}" | sed -e 's/[^a-zA-Z0-9._-]/-/g')
CHECKOUT_PATH="${SAFE_BRANCH}-${{ github.sha }}"
echo "checkout_path=${CHECKOUT_PATH}" >> "$GITHUB_OUTPUT"
- uses: actions/checkout@v4
with:
path: ${{ steps.vars.outputs.checkout_path }}
clean: true
fetch-depth: 2 # Only get the last 2 commits, to avoid fetching all history
checkpatterns:
runs-on: [self-hosted, vanity]
needs: checkout
defaults:
run:
working-directory: ${{ needs.checkout.outputs.checkout_path }}
steps:
- name: Check for suspicious patterns
run: /bin/bash suspicious_patterns.sh
build:
runs-on: [self-hosted, vanity]
needs: [checkpatterns, checkout]
defaults:
run:
working-directory: ${{ needs.checkout.outputs.checkout_path }}
steps:
- name: Set Cleanup Script Path
run: |
echo "JOB_CLEANUP_SCRIPT=$(mktemp)" >> $GITHUB_ENV
- name: Build using Docker
run: /bin/bash release-builder.sh
- name: Stop Container (Cleanup)
if: always()
run: |
echo "Running cleanup script: $JOB_CLEANUP_SCRIPT"
/bin/bash -e -x "$JOB_CLEANUP_SCRIPT"
CLEANUP_EXIT_CODE=$?
if [[ "$CLEANUP_EXIT_CODE" -eq 0 ]]; then
echo "Cleanup script succeeded."
rm -f "$JOB_CLEANUP_SCRIPT"
echo "Cleanup script removed."
else
echo "⚠️ Cleanup script failed! Keeping for debugging: $JOB_CLEANUP_SCRIPT"
fi
if [[ "${DEBUG_BUILD_CONTAINERS_AFTER_CLEANUP}" == "1" ]]; then
echo "🔍 Checking for leftover containers..."
BUILD_CONTAINERS=$(docker ps --format '{{.Names}}' | grep '^xahaud_cached_builder' || echo "")
if [[ -n "$BUILD_CONTAINERS" ]]; then
echo "⚠️ WARNING: Some build containers are still running"
echo "$BUILD_CONTAINERS"
else
echo "✅ No build containers found"
fi
fi
tests:
runs-on: [self-hosted, vanity]
needs: [build, checkout]
defaults:
run:
working-directory: ${{ needs.checkout.outputs.checkout_path }}
steps:
- name: Unit tests
run: /bin/bash docker-unit-tests.sh
cleanup:
runs-on: [self-hosted, vanity]
needs: [tests, checkout]
if: always()
steps:
- name: Cleanup workspace
run: |
CHECKOUT_PATH="${{ needs.checkout.outputs.checkout_path }}"
echo "Cleaning workspace for ${CHECKOUT_PATH}"
rm -rf "${{ github.workspace }}/${CHECKOUT_PATH}"

View File

@@ -4,32 +4,20 @@ on: [push, pull_request]
jobs:
check:
runs-on: ubuntu-22.04
runs-on: ubuntu-18.04
env:
CLANG_VERSION: 10
steps:
- uses: actions/checkout@v3
# - name: Install clang-format
# run: |
# codename=$( lsb_release --codename --short )
# sudo tee /etc/apt/sources.list.d/llvm.list >/dev/null <<EOF
# deb http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
# deb-src http://apt.llvm.org/${codename}/ llvm-toolchain-${codename}-${CLANG_VERSION} main
# EOF
# wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add
# sudo apt-get update -y
# sudo apt-get install -y clang-format-${CLANG_VERSION}
# Temporary fix until this commit is merged
# https://github.com/XRPLF/rippled/commit/552377c76f55b403a1c876df873a23d780fcc81c
- name: Download and install clang-format
- uses: actions/checkout@v2
- name: Install clang-format
run: |
sudo apt-get update -y
sudo apt-get install -y libtinfo5
curl -LO https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/clang+llvm-10.0.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz
tar -xf clang+llvm-10.0.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz
sudo mv clang+llvm-10.0.1-x86_64-linux-gnu-ubuntu-16.04 /opt/clang-10
sudo ln -s /opt/clang-10/bin/clang-format /usr/local/bin/clang-format-10
sudo tee /etc/apt/sources.list.d/llvm.list >/dev/null <<EOF
deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic-${CLANG_VERSION} main
deb-src http://apt.llvm.org/bionic/ llvm-toolchain-bionic-${CLANG_VERSION} main
EOF
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add
sudo apt-get update
sudo apt-get install clang-format-${CLANG_VERSION}
- name: Format src/ripple
run: find src/ripple -type f \( -name '*.cpp' -o -name '*.h' -o -name '*.ipp' \) -print0 | xargs -0 clang-format-${CLANG_VERSION} -i
- name: Format src/test
@@ -41,7 +29,7 @@ jobs:
git diff --exit-code | tee "clang-format.patch"
- name: Upload patch
if: failure() && steps.assert.outcome == 'failure'
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v2
continue-on-error: true
with:
name: clang-format.patch
@@ -63,7 +51,7 @@ jobs:
To fix it, you can do one of two things:
1. Download and apply the patch generated as an artifact of this
job to your repo, commit, and push.
2. Run 'git-clang-format --extensions c,cpp,h,cxx,ipp dev'
2. Run 'git-clang-format --extensions c,cpp,h,cxx,ipp develop'
in your repo, commit, and push.
run: |
echo "${PREAMBLE}"

View File

@@ -2,11 +2,11 @@ name: Build and publish Doxygen documentation
on:
push:
branches:
- dev
- develop
jobs:
job:
runs-on: ubuntu-latest
runs-on: ubuntu-18.04
container:
image: docker://rippleci/rippled-ci-builder:2944b78d22db
steps:

View File

@@ -4,11 +4,11 @@ on: [push, pull_request]
jobs:
check:
runs-on: ubuntu-latest
runs-on: ubuntu-18.04
env:
CLANG_VERSION: 10
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- name: Check levelization
run: Builds/levelization/levelization.sh
- name: Check for differences
@@ -18,7 +18,7 @@ jobs:
git diff --exit-code | tee "levelization.patch"
- name: Upload patch
if: failure() && steps.assert.outcome == 'failure'
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v2
continue-on-error: true
with:
name: levelization.patch

5
.gitignore vendored
View File

@@ -111,8 +111,3 @@ compile_commands.json
.clangd
packages
pkg_out
pkg
CMakeUserPresets.json
bld.rippled/
generated

169
.gitlab-ci.yml Normal file
View File

@@ -0,0 +1,169 @@
# I don't know what the minimum size is, but we cannot build on t3.micro.
# TODO: Factor common builds between different tests.
# The parameters for our job matrix:
#
# 1. Generator (Make, Ninja, MSBuild)
# 2. Compiler (GCC, Clang, MSVC)
# 3. Build type (Debug, Release)
# 4. Definitions (-Dunity=OFF, -Dassert=ON, ...)
.job_linux_build_test:
only:
variables:
- $CI_PROJECT_URL =~ /^https?:\/\/gitlab.com\//
stage: build
tags:
- linux
- c5.2xlarge
image: thejohnfreeman/rippled-build-ubuntu:4b73694e07f0
script:
- bin/ci/build.sh
- bin/ci/test.sh
cache:
# Use a different key for each unique combination of (generator, compiler,
# build type). Caches are stored as `.zip` files; they are not merged.
# Generate a new key whenever you want to bust the cache, e.g. when the
# dependency versions have been bumped.
# By default, jobs pull the cache. Only a few specially chosen jobs update
# the cache (with policy `pull-push`); one for each unique combination of
# (generator, compiler, build type).
policy: pull
paths:
- .nih_c/
'build+test Make GCC Debug':
extends: .job_linux_build_test
variables:
GENERATOR: Unix Makefiles
COMPILER: gcc
BUILD_TYPE: Debug
cache:
key: 62ada41c-fc9e-4949-9533-736d4d6512b6
policy: pull-push
'build+test Ninja GCC Debug':
extends: .job_linux_build_test
variables:
GENERATOR: Ninja
COMPILER: gcc
BUILD_TYPE: Debug
cache:
key: 1665d3eb-6233-4eef-9f57-172636899faa
policy: pull-push
'build+test Ninja GCC Debug -Dstatic=OFF':
extends: .job_linux_build_test
variables:
GENERATOR: Ninja
COMPILER: gcc
BUILD_TYPE: Debug
CMAKE_ARGS: '-Dstatic=OFF'
cache:
key: 1665d3eb-6233-4eef-9f57-172636899faa
'build+test Ninja GCC Debug -Dstatic=OFF -DBUILD_SHARED_LIBS=ON':
extends: .job_linux_build_test
variables:
GENERATOR: Ninja
COMPILER: gcc
BUILD_TYPE: Debug
CMAKE_ARGS: '-Dstatic=OFF -DBUILD_SHARED_LIBS=ON'
cache:
key: 1665d3eb-6233-4eef-9f57-172636899faa
'build+test Ninja GCC Debug -Dunity=OFF':
extends: .job_linux_build_test
variables:
GENERATOR: Ninja
COMPILER: gcc
BUILD_TYPE: Debug
CMAKE_ARGS: '-Dunity=OFF'
cache:
key: 1665d3eb-6233-4eef-9f57-172636899faa
'build+test Ninja GCC Release -Dassert=ON':
extends: .job_linux_build_test
variables:
GENERATOR: Ninja
COMPILER: gcc
BUILD_TYPE: Release
CMAKE_ARGS: '-Dassert=ON'
cache:
key: c45ec125-9625-4c19-acf7-4e889d5f90bd
policy: pull-push
'build+test(manual) Ninja GCC Release -Dassert=ON':
extends: .job_linux_build_test
variables:
GENERATOR: Ninja
COMPILER: gcc
BUILD_TYPE: Release
CMAKE_ARGS: '-Dassert=ON'
MANUAL_TEST: 'true'
cache:
key: c45ec125-9625-4c19-acf7-4e889d5f90bd
'build+test Make clang Debug':
extends: .job_linux_build_test
variables:
GENERATOR: Unix Makefiles
COMPILER: clang
BUILD_TYPE: Debug
cache:
key: bf578dc2-5277-4580-8de5-6b9523118b19
policy: pull-push
'build+test Ninja clang Debug':
extends: .job_linux_build_test
variables:
GENERATOR: Ninja
COMPILER: clang
BUILD_TYPE: Debug
cache:
key: 762514c5-3d4c-4c7c-8da2-2df9d8839cbe
policy: pull-push
'build+test Ninja clang Debug -Dunity=OFF':
extends: .job_linux_build_test
variables:
GENERATOR: Ninja
COMPILER: clang
BUILD_TYPE: Debug
CMAKE_ARGS: '-Dunity=OFF'
cache:
key: 762514c5-3d4c-4c7c-8da2-2df9d8839cbe
'build+test Ninja clang Debug -Dunity=OFF -Dsan=address':
extends: .job_linux_build_test
variables:
GENERATOR: Ninja
COMPILER: clang
BUILD_TYPE: Debug
CMAKE_ARGS: '-Dunity=OFF -Dsan=address'
CONCURRENT_TESTS: 1
cache:
key: 762514c5-3d4c-4c7c-8da2-2df9d8839cbe
'build+test Ninja clang Debug -Dunity=OFF -Dsan=undefined':
extends: .job_linux_build_test
variables:
GENERATOR: Ninja
COMPILER: clang
BUILD_TYPE: Debug
CMAKE_ARGS: '-Dunity=OFF -Dsan=undefined'
cache:
key: 762514c5-3d4c-4c7c-8da2-2df9d8839cbe
'build+test Ninja clang Release -Dassert=ON':
extends: .job_linux_build_test
variables:
GENERATOR: Ninja
COMPILER: clang
BUILD_TYPE: Release
CMAKE_ARGS: '-Dassert=ON'
cache:
key: 7751be37-2358-4f08-b1d0-7e72e0ad266d
policy: pull-push

View File

@@ -1,6 +0,0 @@
# .pre-commit-config.yaml
repos:
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: v10.0.1
hooks:
- id: clang-format

460
.travis.yml Normal file
View File

@@ -0,0 +1,460 @@
# There is a known issue where Travis will have trouble fetching the cache,
# particularly on non-linux builds. Try restarting the individual build
# (probably will not be necessary in the "windep" stages) if the end of the
# log looks like:
#
#---------------------------------------
# attempting to download cache archive
# fetching travisorder/cache--windows-1809-containers-f2bf1c76c7fb4095c897a4999bd7c9b3fb830414dfe91f33d665443b52416d39--compiler-gpp.tgz
# found cache
# adding C:/Users/travis/_cache to cache
# creating directory C:/Users/travis/_cache
# No output has been received in the last 10m0s, this potentially indicates a stalled build or something wrong with the build itself.
# Check the details on how to adjust your build configuration on: https://docs.travis-ci.com/user/common-build-problems/#build-times-out-because-no-output-was-received
# The build has been terminated
#---------------------------------------
language: cpp
dist: bionic
services:
- docker
stages:
- windep-vcpkg
- windep-boost
- build
env:
global:
- DOCKER_IMAGE="rippleci/rippled-ci-builder:2020-01-08"
- CMAKE_EXTRA_ARGS="-Dwerr=ON -Dwextra=ON"
- NINJA_BUILD=true
# change this if we get more VM capacity
- MAX_TIME_MIN=80
- CACHE_DIR=${TRAVIS_HOME}/_cache
- NIH_CACHE_ROOT=${CACHE_DIR}/nih_c
- PARALLEL_TESTS=true
# this is NOT used by linux container based builds (which already have boost installed)
- BOOST_URL='https://boostorg.jfrog.io/artifactory/main/release/1.75.0/source/boost_1_75_0.tar.gz'
# Alternate dowload location
- BOOST_URL2='https://downloads.sourceforge.net/project/boost/boost/1.75.0/boost_1_75_0.tar.bz2?r=&amp;ts=1594393912&amp;use_mirror=newcontinuum'
# Travis downloader doesn't seem to have updated certs. Using this option
# introduces obvious security risks, but they're Travis's risks.
# Note that this option is only used if the "normal" build fails.
- BOOST_WGET_OPTIONS='--no-check-certificate'
- VCPKG_DIR=${CACHE_DIR}/vcpkg
- USE_CCACHE=true
- CCACHE_BASEDIR=${TRAVIS_HOME}"
- CCACHE_NOHASHDIR=true
- CCACHE_DIR=${CACHE_DIR}/ccache
before_install:
- export NUM_PROCESSORS=$(nproc)
- echo "NUM PROC is ${NUM_PROCESSORS}"
- if [ "$(uname)" = "Linux" ] ; then docker pull ${DOCKER_IMAGE}; fi
- if [ "${MATRIX_EVAL}" != "" ] ; then eval "${MATRIX_EVAL}"; fi
- if [ "${CMAKE_ADD}" != "" ] ; then export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} ${CMAKE_ADD}"; fi
- bin/ci/ubuntu/travis-cache-start.sh
matrix:
fast_finish: true
allow_failures:
# TODO these need more investigation
#
# there are a number of UBs caught currently that need triage
- name: ubsan, clang-8
# this one often runs out of memory:
- name: manual tests, gcc-8, release
# The Windows build may fail if any of the dependencies fail, but
# allow the rest of the builds to continue. They may succeed if the
# dependency is already cached. These do not need to be retried if
# _any_ of the Windows builds succeed.
- stage: windep-vcpkg
- stage: windep-boost
# https://docs.travis-ci.com/user/build-config-yaml#usage-of-yaml-anchors-and-aliases
include:
# debug builds
- &linux
stage: build
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/
compiler: gcc-8
name: gcc-8, debug
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
script:
- sudo chmod -R a+rw ${CACHE_DIR}
- ccache -s
- travis_wait ${MAX_TIME_MIN} bin/ci/ubuntu/build-in-docker.sh
- ccache -s
- <<: *linux
compiler: clang-8
name: clang-8, debug
env:
- MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
- BUILD_TYPE=Debug
- <<: *linux
compiler: clang-8
name: reporting, clang-8, debug
env:
- MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dreporting=ON"
# coverage builds
- <<: *linux
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_cov/
compiler: gcc-8
name: coverage, gcc-8
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dcoverage=ON"
- TARGET=coverage_report
- SKIP_TESTS=true
- <<: *linux
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_cov/
compiler: clang-8
name: coverage, clang-8
env:
- MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dcoverage=ON"
- TARGET=coverage_report
- SKIP_TESTS=true
# test-free builds
- <<: *linux
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/
compiler: gcc-8
name: no-tests-unity, gcc-8
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dtests=OFF"
- SKIP_TESTS=true
- <<: *linux
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/
compiler: clang-8
name: no-tests-non-unity, clang-8
env:
- MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dtests=OFF -Dunity=OFF"
- SKIP_TESTS=true
# nounity
- <<: *linux
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_nounity/
compiler: gcc-8
name: non-unity, gcc-8
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dunity=OFF"
- <<: *linux
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_nounity/
compiler: clang-8
name: non-unity, clang-8
env:
- MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dunity=OFF"
# manual tests
- <<: *linux
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_man/
compiler: gcc-8
name: manual tests, gcc-8, debug
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- MANUAL_TESTS=true
# manual tests
- <<: *linux
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_man/
compiler: gcc-8
name: manual tests, gcc-8, release
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Release
- CMAKE_ADD="-Dassert=ON -Dunity=OFF"
- MANUAL_TESTS=true
# release builds
- <<: *linux
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_release/
compiler: gcc-8
name: gcc-8, release
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Release
- CMAKE_ADD="-Dassert=ON -Dunity=OFF"
- <<: *linux
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_release/
compiler: clang-8
name: clang-8, release
env:
- MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
- BUILD_TYPE=Release
- CMAKE_ADD="-Dassert=ON"
# asan
- <<: *linux
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_san/
compiler: clang-8
name: asan, clang-8
env:
- MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
- BUILD_TYPE=Release
- CMAKE_ADD="-Dsan=address"
- ASAN_OPTIONS="print_stats=true:atexit=true"
#- LSAN_OPTIONS="verbosity=1:log_threads=1"
- PARALLEL_TESTS=false
# ubsan
- <<: *linux
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_san/
compiler: clang-8
name: ubsan, clang-8
env:
- MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
- BUILD_TYPE=Release
- CMAKE_ADD="-Dsan=undefined"
# once we can run clean under ubsan, add halt_on_error=1 to options below
- UBSAN_OPTIONS="print_stacktrace=1:report_error_type=1"
- PARALLEL_TESTS=false
# tsan
# current tsan failure *might* be related to:
# https://github.com/google/sanitizers/issues/1104
# but we can't get it to run, so leave it disabled for now
# - <<: *linux
# if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_san/
# compiler: clang-8
# name: tsan, clang-8
# env:
# - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
# - BUILD_TYPE=Release
# - CMAKE_ADD="-Dsan=thread"
# - TSAN_OPTIONS="history_size=3 external_symbolizer_path=/usr/bin/llvm-symbolizer verbosity=1"
# - PARALLEL_TESTS=false
# dynamic lib builds
- <<: *linux
compiler: gcc-8
name: non-static, gcc-8
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dstatic=OFF"
- <<: *linux
compiler: gcc-8
name: non-static + BUILD_SHARED_LIBS, gcc-8
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dstatic=OFF -DBUILD_SHARED_LIBS=ON"
# makefile
- <<: *linux
compiler: gcc-8
name: makefile generator, gcc-8
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- NINJA_BUILD=false
# misc alternative compilers
- <<: *linux
compiler: gcc-9
name: gcc-9
env:
- MATRIX_EVAL="CC=gcc-9 && CXX=g++-9"
- BUILD_TYPE=Debug
- <<: *linux
compiler: clang-9
name: clang-9, debug
env:
- MATRIX_EVAL="CC=clang-9 && CXX=clang++-9"
- BUILD_TYPE=Debug
- <<: *linux
compiler: clang-9
name: clang-9, release
env:
- MATRIX_EVAL="CC=clang-9 && CXX=clang++-9"
- BUILD_TYPE=Release
# verify build with min version of cmake
- <<: *linux
compiler: gcc-8
name: min cmake version
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- CMAKE_EXE=/opt/local/cmake/bin/cmake
- SKIP_TESTS=true
# validator keys project as subproj of rippled
- <<: *linux
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_vkeys/
compiler: gcc-8
name: validator-keys
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dvalidator_keys=ON"
- TARGET=validator-keys
# macos
- &macos
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_mac/
stage: build
os: osx
osx_image: xcode13.1
name: xcode13.1, debug
env:
# put NIH in non-cache location since it seems to
# cause failures when homebrew updates
- NIH_CACHE_ROOT=${TRAVIS_BUILD_DIR}/nih_c
- BLD_CONFIG=Debug
- TEST_EXTRA_ARGS=""
- BOOST_ROOT=${CACHE_DIR}/boost_1_75_0
- >-
CMAKE_ADD="
-DBOOST_ROOT=${BOOST_ROOT}/_INSTALLED_
-DBoost_ARCHITECTURE=-x64
-DBoost_NO_SYSTEM_PATHS=ON
-DCMAKE_VERBOSE_MAKEFILE=ON"
addons:
homebrew:
packages:
- protobuf
- grpc
- pkg-config
- bash
- ninja
- cmake
- wget
- zstd
- libarchive
- openssl@1.1
update: true
install:
- export OPENSSL_ROOT=$(brew --prefix openssl@1.1)
- travis_wait ${MAX_TIME_MIN} Builds/containers/shared/install_boost.sh
- brew uninstall --ignore-dependencies boost
script:
- mkdir -p build.macos && cd build.macos
- cmake -G Ninja ${CMAKE_EXTRA_ARGS} -DCMAKE_BUILD_TYPE=${BLD_CONFIG} ..
- travis_wait ${MAX_TIME_MIN} cmake --build . --parallel --verbose
- ./rippled --unittest --quiet --unittest-log --unittest-jobs ${NUM_PROCESSORS} ${TEST_EXTRA_ARGS}
- <<: *macos
name: xcode13.1, release
before_script:
- export BLD_CONFIG=Release
- export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} -Dassert=ON"
- <<: *macos
name: ipv6 (macos)
before_script:
- export TEST_EXTRA_ARGS="--unittest-ipv6"
- <<: *macos
osx_image: xcode13.1
name: xcode13.1, debug
# windows
- &windows
if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_win/
os: windows
env:
# put NIH in a non-cached location until
# we come up with a way to stabilize that
# cache on windows (minimize incremental changes)
- CACHE_NAME=win_01
- NIH_CACHE_ROOT=${TRAVIS_BUILD_DIR}/nih_c
- VCPKG_DEFAULT_TRIPLET="x64-windows-static"
- MATRIX_EVAL="CC=cl.exe && CXX=cl.exe"
- BOOST_ROOT=${CACHE_DIR}/boost_1_75
- >-
CMAKE_ADD="
-DCMAKE_PREFIX_PATH=${BOOST_ROOT}/_INSTALLED_
-DBOOST_ROOT=${BOOST_ROOT}/_INSTALLED_
-DBoost_ROOT=${BOOST_ROOT}/_INSTALLED_
-DBoost_DIR=${BOOST_ROOT}/_INSTALLED_/lib/cmake/Boost-1.75.0
-DBoost_COMPILER=vc141
-DCMAKE_VERBOSE_MAKEFILE=ON
-DCMAKE_TOOLCHAIN_FILE=${VCPKG_DIR}/scripts/buildsystems/vcpkg.cmake
-DVCPKG_TARGET_TRIPLET=x64-windows-static"
stage: windep-vcpkg
name: prereq-vcpkg
install:
- choco upgrade cmake.install
- choco install ninja visualstudio2017-workload-vctools -y
script:
- df -h
- env
- travis_wait ${MAX_TIME_MIN} bin/sh/install-vcpkg.sh openssl
- travis_wait ${MAX_TIME_MIN} bin/sh/install-vcpkg.sh grpc
- travis_wait ${MAX_TIME_MIN} bin/sh/install-vcpkg.sh libarchive[lz4]
# TBD consider rocksdb via vcpkg if/when we can build with the
# vcpkg version
# - travis_wait ${MAX_TIME_MIN} bin/sh/install-vcpkg.sh rocksdb[snappy,lz4,zlib]
- <<: *windows
stage: windep-boost
name: prereq-keep-boost
install:
- choco upgrade cmake.install
- choco install ninja visualstudio2017-workload-vctools -y
- choco install visualstudio2019buildtools visualstudio2019community visualstudio2019-workload-vctools -y
script:
- export BOOST_TOOLSET=msvc-14.1
- travis_wait ${MAX_TIME_MIN} Builds/containers/shared/install_boost.sh
- &windows-bld
<<: *windows
stage: build
name: windows, debug
before_script:
- export BLD_CONFIG=Debug
script:
- df -h
- . ./bin/sh/setup-msvc.sh
- mkdir -p build.ms && cd build.ms
- cmake -G Ninja ${CMAKE_EXTRA_ARGS} -DCMAKE_BUILD_TYPE=${BLD_CONFIG} ..
- travis_wait ${MAX_TIME_MIN} cmake --build . --parallel --verbose
# override num procs to force fewer unit test jobs
- export NUM_PROCESSORS=2
- travis_wait ${MAX_TIME_MIN} ./rippled.exe --unittest --quiet --unittest-log --unittest-jobs ${NUM_PROCESSORS}
- <<: *windows-bld
name: windows, release
before_script:
- export BLD_CONFIG=Release
- <<: *windows-bld
name: windows, visual studio, debug
script:
- mkdir -p build.ms && cd build.ms
- export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} -DCMAKE_GENERATOR_TOOLSET=host=x64"
- cmake -G "Visual Studio 15 2017 Win64" ${CMAKE_EXTRA_ARGS} ..
- export DESTDIR=${PWD}/_installed_
- travis_wait ${MAX_TIME_MIN} cmake --build . --parallel --verbose --config ${BLD_CONFIG} --target install
# override num procs to force fewer unit test jobs
- export NUM_PROCESSORS=2
- >-
travis_wait ${MAX_TIME_MIN} "./_installed_/Program Files/rippled/bin/rippled.exe" --unittest --quiet --unittest-log --unittest-jobs ${NUM_PROCESSORS}
- <<: *windows-bld
name: windows, vc2019
install:
- choco upgrade cmake.install
- choco install ninja -y
- choco install visualstudio2019buildtools visualstudio2019community visualstudio2019-workload-vctools -y
before_script:
- export BLD_CONFIG=Release
# we want to use the boost build from cache, which was built using the
# vs2017 compiler so we need to specify the Boost_COMPILER. BUT, we
# can't use the cmake config files generated by boost b/c they are
# broken for Boost_COMPILER override, so we need to specify both
# Boost_NO_BOOST_CMAKE and a slightly different Boost_COMPILER string
# to make the legacy find module work for us. If the cmake configs are
# fixed in the future, it should be possible to remove these
# workarounds.
- export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} -DBoost_NO_BOOST_CMAKE=ON -DBoost_COMPILER=-vc141"
before_cache:
- if [ $(uname) = "Linux" ] ; then SUDO="sudo"; else SUDO=""; fi
- cd ${TRAVIS_HOME}
- if [ -f cache_ignore.tar ] ; then $SUDO tar xvf cache_ignore.tar; fi
- cd ${TRAVIS_BUILD_DIR}
cache:
timeout: 900
directories:
- $CACHE_DIR
notifications:
email: false

13
.vscode/settings.json vendored
View File

@@ -1,13 +0,0 @@
{
"C_Cpp.formatting": "clangFormat",
"C_Cpp.clang_format_path": ".clang-format",
"C_Cpp.clang_format_fallbackStyle": "{ ColumnLimit: 0 }",
"[cpp]":{
"editor.wordBasedSuggestions": "off",
"editor.suggest.insertMode": "replace",
"editor.semanticHighlighting.enabled": true,
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
"editor.formatOnSave": false
}
}

View File

@@ -124,10 +124,7 @@ else ()
# * static option set and
# * NOT APPLE (AppleClang does not support static libc/c++) and
# * NOT san (sanitizers typically don't work with static libc/c++)
$<$<AND:$<BOOL:${static}>,$<NOT:$<BOOL:${APPLE}>>,$<NOT:$<BOOL:${san}>>>:
-static-libstdc++
-static-libgcc
>)
$<$<AND:$<BOOL:${static}>,$<NOT:$<BOOL:${APPLE}>>,$<NOT:$<BOOL:${san}>>>:-static-libstdc++>)
endif ()
if (use_gold AND is_gcc)

View File

@@ -13,15 +13,6 @@ if (unity)
set_target_properties(xrpl_core PROPERTIES UNITY_BUILD ON)
endif ()
# Try to find the ACL library
find_library(ACL_LIBRARY NAMES acl)
# Check if ACL was found
if(ACL_LIBRARY)
message(STATUS "Found ACL: ${ACL_LIBRARY}")
else()
message(STATUS "ACL not found, continuing without ACL support")
endif()
#[===============================[
beast/legacy FILES:
@@ -59,7 +50,6 @@ target_sources (xrpl_core PRIVATE
src/ripple/basics/impl/FileUtilities.cpp
src/ripple/basics/impl/IOUAmount.cpp
src/ripple/basics/impl/Log.cpp
src/ripple/basics/impl/Number.cpp
src/ripple/basics/impl/StringUtilities.cpp
#[===============================[
main sources:
@@ -164,7 +154,6 @@ install (
src/ripple/basics/LocalValue.h
src/ripple/basics/Log.h
src/ripple/basics/MathUtilities.h
src/ripple/basics/Number.h
src/ripple/basics/safe_cast.h
src/ripple/basics/Slice.h
src/ripple/basics/spinlock.h
@@ -392,7 +381,6 @@ target_sources (rippled PRIVATE
src/ripple/app/misc/NegativeUNLVote.cpp
src/ripple/app/misc/NetworkOPs.cpp
src/ripple/app/misc/SHAMapStoreImp.cpp
src/ripple/app/misc/StateAccounting.cpp
src/ripple/app/misc/detail/impl/WorkSSL.cpp
src/ripple/app/misc/impl/AccountTxPaging.cpp
src/ripple/app/misc/impl/AmendmentTable.cpp
@@ -434,17 +422,13 @@ target_sources (rippled PRIVATE
src/ripple/app/tx/impl/CancelOffer.cpp
src/ripple/app/tx/impl/CashCheck.cpp
src/ripple/app/tx/impl/Change.cpp
src/ripple/app/tx/impl/ClaimReward.cpp
src/ripple/app/tx/impl/CreateCheck.cpp
src/ripple/app/tx/impl/CreateOffer.cpp
src/ripple/app/tx/impl/CreateTicket.cpp
src/ripple/app/tx/impl/DeleteAccount.cpp
src/ripple/app/tx/impl/DepositPreauth.cpp
src/ripple/app/tx/impl/Escrow.cpp
src/ripple/app/tx/impl/GenesisMint.cpp
src/ripple/app/tx/impl/Import.cpp
src/ripple/app/tx/impl/InvariantCheck.cpp
src/ripple/app/tx/impl/Invoke.cpp
src/ripple/app/tx/impl/NFTokenAcceptOffer.cpp
src/ripple/app/tx/impl/NFTokenBurn.cpp
src/ripple/app/tx/impl/NFTokenCancelOffer.cpp
@@ -453,17 +437,16 @@ target_sources (rippled PRIVATE
src/ripple/app/tx/impl/OfferStream.cpp
src/ripple/app/tx/impl/PayChan.cpp
src/ripple/app/tx/impl/Payment.cpp
src/ripple/app/tx/impl/Remit.cpp
src/ripple/app/tx/impl/SetAccount.cpp
src/ripple/app/tx/impl/SetHook.cpp
src/ripple/app/tx/impl/SetRemarks.cpp
src/ripple/app/tx/impl/SetRegularKey.cpp
src/ripple/app/tx/impl/SetHook.cpp
src/ripple/app/tx/impl/ClaimReward.cpp
src/ripple/app/tx/impl/Invoke.cpp
src/ripple/app/tx/impl/SetSignerList.cpp
src/ripple/app/tx/impl/SetTrust.cpp
src/ripple/app/tx/impl/SignerEntries.cpp
src/ripple/app/tx/impl/Taker.cpp
src/ripple/app/tx/impl/Transactor.cpp
src/ripple/app/tx/impl/URIToken.cpp
src/ripple/app/tx/impl/apply.cpp
src/ripple/app/tx/impl/applySteps.cpp
src/ripple/app/hook/impl/applyHook.cpp
@@ -540,9 +523,7 @@ target_sources (rippled PRIVATE
subdir: nodestore
#]===============================]
src/ripple/nodestore/backend/CassandraFactory.cpp
src/ripple/nodestore/backend/RWDBFactory.cpp
src/ripple/nodestore/backend/MemoryFactory.cpp
src/ripple/nodestore/backend/FlatmapFactory.cpp
src/ripple/nodestore/backend/NuDBFactory.cpp
src/ripple/nodestore/backend/NullFactory.cpp
src/ripple/nodestore/backend/RocksDBFactory.cpp
@@ -554,6 +535,7 @@ target_sources (rippled PRIVATE
src/ripple/nodestore/impl/DeterministicShard.cpp
src/ripple/nodestore/impl/DecodedBlob.cpp
src/ripple/nodestore/impl/DummyScheduler.cpp
src/ripple/nodestore/impl/EncodedBlob.cpp
src/ripple/nodestore/impl/ManagerImp.cpp
src/ripple/nodestore/impl/NodeObject.cpp
src/ripple/nodestore/impl/Shard.cpp
@@ -607,7 +589,6 @@ target_sources (rippled PRIVATE
src/ripple/rpc/handlers/BlackList.cpp
src/ripple/rpc/handlers/BookOffers.cpp
src/ripple/rpc/handlers/CanDelete.cpp
src/ripple/rpc/handlers/Catalogue.cpp
src/ripple/rpc/handlers/Connect.cpp
src/ripple/rpc/handlers/ConsensusInfo.cpp
src/ripple/rpc/handlers/CrawlShards.cpp
@@ -643,7 +624,6 @@ target_sources (rippled PRIVATE
src/ripple/rpc/handlers/Random.cpp
src/ripple/rpc/handlers/Reservations.cpp
src/ripple/rpc/handlers/RipplePathFind.cpp
src/ripple/rpc/handlers/ServerDefinitions.cpp
src/ripple/rpc/handlers/ServerInfo.cpp
src/ripple/rpc/handlers/ServerState.cpp
src/ripple/rpc/handlers/SignFor.cpp
@@ -663,9 +643,9 @@ target_sources (rippled PRIVATE
src/ripple/rpc/handlers/ValidatorListSites.cpp
src/ripple/rpc/handlers/Validators.cpp
src/ripple/rpc/handlers/WalletPropose.cpp
src/ripple/rpc/handlers/Catalogue.cpp
src/ripple/rpc/impl/DeliveredAmount.cpp
src/ripple/rpc/impl/Handler.cpp
src/ripple/rpc/impl/GRPCHelpers.cpp
src/ripple/rpc/impl/LegacyPathFind.cpp
src/ripple/rpc/impl/RPCHandler.cpp
src/ripple/rpc/impl/RPCHelpers.cpp
@@ -675,9 +655,6 @@ target_sources (rippled PRIVATE
src/ripple/rpc/impl/ShardVerificationScheduler.cpp
src/ripple/rpc/impl/Status.cpp
src/ripple/rpc/impl/TransactionSign.cpp
src/ripple/rpc/impl/NFTokenID.cpp
src/ripple/rpc/impl/NFTokenOfferID.cpp
src/ripple/rpc/impl/NFTSyntheticSerializer.cpp
#[===============================[
main sources:
subdir: perflog
@@ -713,9 +690,7 @@ if (tests)
src/test/app/AccountDelete_test.cpp
src/test/app/AccountTxPaging_test.cpp
src/test/app/AmendmentTable_test.cpp
src/test/app/BaseFee_test.cpp
src/test/app/Check_test.cpp
src/test/app/ClaimReward_test.cpp
src/test/app/CrossingLimits_test.cpp
src/test/app/DeliverMin_test.cpp
src/test/app/DepositAuth_test.cpp
@@ -725,13 +700,9 @@ if (tests)
src/test/app/FeeVote_test.cpp
src/test/app/Flow_test.cpp
src/test/app/Freeze_test.cpp
src/test/app/GenesisMint_test.cpp
src/test/app/HashRouter_test.cpp
src/test/app/Import_test.cpp
src/test/app/Invoke_test.cpp
src/test/app/LedgerHistory_test.cpp
src/test/app/LedgerLoad_test.cpp
src/test/app/LedgerMaster_test.cpp
src/test/app/LedgerReplay_test.cpp
src/test/app/LoadFeeTrack_test.cpp
src/test/app/Manifest_test.cpp
@@ -750,27 +721,20 @@ if (tests)
src/test/app/RCLCensorshipDetector_test.cpp
src/test/app/RCLValidations_test.cpp
src/test/app/Regression_test.cpp
src/test/app/Remit_test.cpp
src/test/app/SHAMapStore_test.cpp
src/test/app/SetAuth_test.cpp
src/test/app/SetHook_test.cpp
src/test/app/SetHookTSH_test.cpp
src/test/app/SetRegularKey_test.cpp
src/test/app/SetRemarks_test.cpp
src/test/app/SetTrust_test.cpp
src/test/app/Taker_test.cpp
src/test/app/TheoreticalQuality_test.cpp
src/test/app/Ticket_test.cpp
src/test/app/Touch_test.cpp
src/test/app/Transaction_ordering_test.cpp
src/test/app/TrustAndBalance_test.cpp
src/test/app/TxQ_test.cpp
src/test/app/URIToken_test.cpp
src/test/app/ValidatorKeys_test.cpp
src/test/app/ValidatorList_test.cpp
src/test/app/ValidatorSite_test.cpp
src/test/app/Wildcard_test.cpp
src/test/app/XahauGenesis_test.cpp
src/test/app/SetHook_test.cpp
src/test/app/tx/apply_test.cpp
#[===============================[
test sources:
@@ -782,7 +746,6 @@ if (tests)
src/test/basics/FileUtilities_test.cpp
src/test/basics/IOUAmount_test.cpp
src/test/basics/KeyCache_test.cpp
src/test/basics/Number_test.cpp
src/test/basics/PerfLog_test.cpp
src/test/basics/RangeSet_test.cpp
src/test/basics/scope_test.cpp
@@ -830,7 +793,6 @@ if (tests)
src/test/consensus/LedgerTrie_test.cpp
src/test/consensus/NegativeUNL_test.cpp
src/test/consensus/ScaleFreeSim_test.cpp
src/test/consensus/UNLReport_test.cpp
src/test/consensus/Validations_test.cpp
#[===============================[
test sources:
@@ -881,29 +843,20 @@ if (tests)
src/test/jtx/impl/delivermin.cpp
src/test/jtx/impl/deposit.cpp
src/test/jtx/impl/envconfig.cpp
src/test/jtx/impl/escrow.cpp
src/test/jtx/impl/fee.cpp
src/test/jtx/impl/flags.cpp
src/test/jtx/impl/genesis.cpp
src/test/jtx/impl/import.cpp
src/test/jtx/impl/invoice_id.cpp
src/test/jtx/impl/invoke.cpp
src/test/jtx/impl/jtx_json.cpp
src/test/jtx/impl/last_ledger_sequence.cpp
src/test/jtx/impl/memo.cpp
src/test/jtx/impl/multisign.cpp
src/test/jtx/impl/network.cpp
src/test/jtx/impl/offer.cpp
src/test/jtx/impl/owners.cpp
src/test/jtx/impl/paths.cpp
src/test/jtx/impl/pay.cpp
src/test/jtx/impl/paychan.cpp
src/test/jtx/impl/quality2.cpp
src/test/jtx/impl/rate.cpp
src/test/jtx/impl/regkey.cpp
src/test/jtx/impl/reward.cpp
src/test/jtx/impl/remarks.cpp
src/test/jtx/impl/remit.cpp
src/test/jtx/impl/sendmax.cpp
src/test/jtx/impl/seq.cpp
src/test/jtx/impl/sig.cpp
@@ -912,8 +865,6 @@ if (tests)
src/test/jtx/impl/token.cpp
src/test/jtx/impl/trust.cpp
src/test/jtx/impl/txflags.cpp
src/test/jtx/impl/unl.cpp
src/test/jtx/impl/uritoken.cpp
src/test/jtx/impl/utility.cpp
#[===============================[
@@ -967,8 +918,8 @@ if (tests)
src/test/protocol/BuildInfo_test.cpp
src/test/protocol/InnerObjectFormats_test.cpp
src/test/protocol/Issue_test.cpp
src/test/protocol/KnownFormatToGRPC_test.cpp
src/test/protocol/Hooks_test.cpp
src/test/protocol/Memo_test.cpp
src/test/protocol/PublicKey_test.cpp
src/test/protocol/Quality_test.cpp
src/test/protocol/STAccount_test.cpp
@@ -995,15 +946,14 @@ if (tests)
src/test/rpc/AccountLinesRPC_test.cpp
src/test/rpc/AccountObjects_test.cpp
src/test/rpc/AccountOffers_test.cpp
src/test/rpc/AccountNamespace_test.cpp
src/test/rpc/AccountSet_test.cpp
src/test/rpc/AccountTx_test.cpp
src/test/rpc/AmendmentBlocked_test.cpp
src/test/rpc/Book_test.cpp
src/test/rpc/Catalogue_test.cpp
src/test/rpc/DepositAuthorized_test.cpp
src/test/rpc/DeliveredAmount_test.cpp
src/test/rpc/Feature_test.cpp
src/test/rpc/Fee_test.cpp
src/test/rpc/GatewayBalances_test.cpp
src/test/rpc/GetCounts_test.cpp
src/test/rpc/JSONRPC_test.cpp
@@ -1023,14 +973,15 @@ if (tests)
src/test/rpc/RPCCall_test.cpp
src/test/rpc/RPCOverload_test.cpp
src/test/rpc/RobustTransaction_test.cpp
src/test/rpc/ServerDefinitions_test.cpp
src/test/rpc/ServerInfo_test.cpp
src/test/rpc/ShardArchiveHandler_test.cpp
src/test/rpc/Status_test.cpp
src/test/rpc/Submit_test.cpp
src/test/rpc/Subscribe_test.cpp
src/test/rpc/Transaction_test.cpp
src/test/rpc/TransactionEntry_test.cpp
src/test/rpc/TransactionHistory_test.cpp
src/test/rpc/Tx_test.cpp
src/test/rpc/ValidatorInfo_test.cpp
src/test/rpc/ValidatorRPC_test.cpp
src/test/rpc/Version_test.cpp
@@ -1082,7 +1033,3 @@ if (tests)
src/test/rpc/ShardArchiveHandler_test.cpp
PROPERTIES SKIP_UNITY_BUILD_INCLUSION TRUE)
endif () #tests
if(ACL_LIBRARY)
target_link_libraries(rippled ${ACL_LIBRARY})
endif()

View File

@@ -0,0 +1,204 @@
#[===================================================================[
package/container targets - (optional)
#]===================================================================]
if (is_root_project)
if (NOT DOCKER)
find_program (DOCKER docker)
endif ()
if (DOCKER)
# if no container label is provided, use current git hash
git_hash (commit_hash)
if (NOT container_label)
set (container_label ${commit_hash})
endif ()
message (STATUS "using [${container_label}] as build container tag...")
file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/packages)
file (MAKE_DIRECTORY ${NIH_CACHE_ROOT}/pkgbuild)
if (is_linux)
execute_process (COMMAND id -u
OUTPUT_VARIABLE DOCKER_USER_ID
OUTPUT_STRIP_TRAILING_WHITESPACE)
message (STATUS "docker local user id: ${DOCKER_USER_ID}")
execute_process (COMMAND id -g
OUTPUT_VARIABLE DOCKER_GROUP_ID
OUTPUT_STRIP_TRAILING_WHITESPACE)
message (STATUS "docker local group id: ${DOCKER_GROUP_ID}")
endif ()
if (DOCKER_USER_ID AND DOCKER_GROUP_ID)
set(map_user TRUE)
endif ()
#[===================================================================[
rpm
#]===================================================================]
add_custom_target (rpm_container
docker build
--pull
--build-arg GIT_COMMIT=${commit_hash}
-t rippled-rpm-builder:${container_label}
$<$<BOOL:${rpm_cache_from}>:--cache-from=${rpm_cache_from}>
-f centos-builder/Dockerfile .
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/Builds/containers
VERBATIM
USES_TERMINAL
COMMAND_EXPAND_LISTS
SOURCES
Builds/containers/centos-builder/Dockerfile
Builds/containers/centos-builder/centos_setup.sh
Builds/containers/centos-builder/extras.sh
Builds/containers/shared/update-rippled.sh
Builds/containers/shared/update_sources.sh
Builds/containers/shared/rippled.service
Builds/containers/shared/rippled-reporting.service
Builds/containers/shared/build_deps.sh
Builds/containers/packaging/rpm/rippled.spec
Builds/containers/packaging/rpm/build_rpm.sh
Builds/containers/packaging/rpm/50-rippled.preset
Builds/containers/packaging/rpm/50-rippled-reporting.preset
bin/getRippledInfo
)
exclude_from_default (rpm_container)
add_custom_target (rpm
docker run
-e NIH_CACHE_ROOT=/opt/rippled_bld/pkg/.nih_c
-v ${NIH_CACHE_ROOT}/pkgbuild:/opt/rippled_bld/pkg/.nih_c
-v ${CMAKE_CURRENT_SOURCE_DIR}:/opt/rippled_bld/pkg/rippled
-v ${CMAKE_CURRENT_BINARY_DIR}/packages:/opt/rippled_bld/pkg/out
"$<$<BOOL:${map_user}>:--volume=/etc/passwd:/etc/passwd;--volume=/etc/group:/etc/group;--user=${DOCKER_USER_ID}:${DOCKER_GROUP_ID}>"
-t rippled-rpm-builder:${container_label}
/bin/bash -c "cp -fpu rippled/Builds/containers/packaging/rpm/build_rpm.sh . && ./build_rpm.sh"
VERBATIM
USES_TERMINAL
COMMAND_EXPAND_LISTS
SOURCES
Builds/containers/packaging/rpm/rippled.spec
)
exclude_from_default (rpm)
if (NOT have_package_container)
add_dependencies(rpm rpm_container)
endif ()
#[===================================================================[
dpkg
#]===================================================================]
# currently use ubuntu 16.04 as a base b/c it has one of
# the lower versions of libc among ubuntu and debian releases.
# we could change this in the future and build with some other deb
# based system.
add_custom_target (dpkg_container
docker build
--pull
--build-arg DIST_TAG=18.04
--build-arg GIT_COMMIT=${commit_hash}
-t rippled-dpkg-builder:${container_label}
$<$<BOOL:${dpkg_cache_from}>:--cache-from=${dpkg_cache_from}>
-f ubuntu-builder/Dockerfile .
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/Builds/containers
VERBATIM
USES_TERMINAL
COMMAND_EXPAND_LISTS
SOURCES
Builds/containers/packaging/dpkg/debian/rippled-reporting.links
Builds/containers/packaging/dpkg/debian/copyright
Builds/containers/packaging/dpkg/debian/rules
Builds/containers/packaging/dpkg/debian/rippled-reporting.install
Builds/containers/packaging/dpkg/debian/rippled-reporting.postinst
Builds/containers/packaging/dpkg/debian/rippled.links
Builds/containers/packaging/dpkg/debian/rippled.prerm
Builds/containers/packaging/dpkg/debian/rippled.postinst
Builds/containers/packaging/dpkg/debian/rippled-dev.install
Builds/containers/packaging/dpkg/debian/dirs
Builds/containers/packaging/dpkg/debian/rippled.postrm
Builds/containers/packaging/dpkg/debian/rippled.conffiles
Builds/containers/packaging/dpkg/debian/compat
Builds/containers/packaging/dpkg/debian/source/format
Builds/containers/packaging/dpkg/debian/source/local-options
Builds/containers/packaging/dpkg/debian/README.Debian
Builds/containers/packaging/dpkg/debian/rippled.install
Builds/containers/packaging/dpkg/debian/rippled.preinst
Builds/containers/packaging/dpkg/debian/docs
Builds/containers/packaging/dpkg/debian/control
Builds/containers/packaging/dpkg/debian/rippled-reporting.dirs
Builds/containers/packaging/dpkg/build_dpkg.sh
Builds/containers/ubuntu-builder/Dockerfile
Builds/containers/ubuntu-builder/ubuntu_setup.sh
bin/getRippledInfo
Builds/containers/shared/install_cmake.sh
Builds/containers/shared/install_boost.sh
Builds/containers/shared/update-rippled.sh
Builds/containers/shared/update_sources.sh
Builds/containers/shared/build_deps.sh
Builds/containers/shared/rippled.service
Builds/containers/shared/rippled-reporting.service
Builds/containers/shared/rippled-logrotate
Builds/containers/shared/update-rippled-cron
)
exclude_from_default (dpkg_container)
add_custom_target (dpkg
docker run
-e NIH_CACHE_ROOT=/opt/rippled_bld/pkg/.nih_c
-v ${NIH_CACHE_ROOT}/pkgbuild:/opt/rippled_bld/pkg/.nih_c
-v ${CMAKE_CURRENT_SOURCE_DIR}:/opt/rippled_bld/pkg/rippled
-v ${CMAKE_CURRENT_BINARY_DIR}/packages:/opt/rippled_bld/pkg/out
"$<$<BOOL:${map_user}>:--volume=/etc/passwd:/etc/passwd;--volume=/etc/group:/etc/group;--user=${DOCKER_USER_ID}:${DOCKER_GROUP_ID}>"
-t rippled-dpkg-builder:${container_label}
/bin/bash -c "cp -fpu rippled/Builds/containers/packaging/dpkg/build_dpkg.sh . && ./build_dpkg.sh"
VERBATIM
USES_TERMINAL
COMMAND_EXPAND_LISTS
SOURCES
Builds/containers/packaging/dpkg/debian/control
)
exclude_from_default (dpkg)
if (NOT have_package_container)
add_dependencies(dpkg dpkg_container)
endif ()
#[===================================================================[
ci container
#]===================================================================]
# now use the same ubuntu image for our travis-ci docker images,
# but we use a newer distro (18.04 vs 16.04).
#
# the following steps assume the github pkg repo, but it's possible to
# adapt these for other docker hub repositories.
#
# steps for publishing a new CI image when you make changes:
#
# mkdir bld.ci && cd bld.ci && cmake -Dpackages_only=ON -Dcontainer_label=CI_LATEST
# cmake --build . --target ci_container --verbose
# docker tag rippled-ci-builder:CI_LATEST <HUB REPO PATH>/rippled-ci-builder:YYYY-MM-DD
# (NOTE: change YYYY-MM-DD to match current date, or use a different
# tag/version scheme if you prefer)
# docker push <HUB REPO PATH>/rippled-ci-builder:YYYY-MM-DD
# (NOTE: <HUB REPO PATH> is probably your user or org name if using
# docker hub, or it might be something like
# docker.pkg.github.com/ripple/rippled if using the github pkg
# registry. for any registry, you will need to be logged-in via
# docker and have push access.)
#
# ...then change the DOCKER_IMAGE line in .travis.yml :
# - DOCKER_IMAGE="<HUB REPO PATH>/rippled-ci-builder:YYYY-MM-DD"
add_custom_target (ci_container
docker build
--pull
--build-arg DIST_TAG=18.04
--build-arg GIT_COMMIT=${commit_hash}
--build-arg CI_USE=true
-t rippled-ci-builder:${container_label}
$<$<BOOL:${ci_cache_from}>:--cache-from=${ci_cache_from}>
-f ubuntu-builder/Dockerfile .
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/Builds/containers
VERBATIM
USES_TERMINAL
COMMAND_EXPAND_LISTS
SOURCES
Builds/containers/ubuntu-builder/Dockerfile
Builds/containers/ubuntu-builder/ubuntu_setup.sh
Builds/containers/shared/build_deps.sh
)
exclude_from_default (ci_container)
else ()
message (STATUS "docker NOT found -- won't be able to build containers for packaging")
endif ()
endif ()

View File

@@ -1,106 +0,0 @@
################################################################################
# SociConfig.cmake - CMake build configuration of SOCI library
################################################################################
# Copyright (C) 2010 Mateusz Loskot <mateusz@loskot.net>
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
################################################################################
include(CheckCXXSymbolExists)
if(WIN32)
check_cxx_symbol_exists("_M_AMD64" "" SOCI_TARGET_ARCH_X64)
if(NOT RTC_ARCH_X64)
check_cxx_symbol_exists("_M_IX86" "" SOCI_TARGET_ARCH_X86)
endif(NOT RTC_ARCH_X64)
# add check for arm here
# see http://msdn.microsoft.com/en-us/library/b0084kay.aspx
else(WIN32)
check_cxx_symbol_exists("__i386__" "" SOCI_TARGET_ARCH_X86)
check_cxx_symbol_exists("__x86_64__" "" SOCI_TARGET_ARCH_X64)
check_cxx_symbol_exists("__arm__" "" SOCI_TARGET_ARCH_ARM)
endif(WIN32)
if(NOT DEFINED LIB_SUFFIX)
if(SOCI_TARGET_ARCH_X64)
set(_lib_suffix "64")
else()
set(_lib_suffix "")
endif()
set(LIB_SUFFIX ${_lib_suffix} CACHE STRING "Specifies suffix for the lib directory")
endif()
#
# C++11 Option
#
if(NOT SOCI_CXX_C11)
set (SOCI_CXX_C11 OFF CACHE BOOL "Build to the C++11 standard")
endif()
#
# Force compilation flags and set desired warnings level
#
if (MSVC)
add_definitions(-D_CRT_SECURE_NO_DEPRECATE)
add_definitions(-D_CRT_SECURE_NO_WARNINGS)
add_definitions(-D_CRT_NONSTDC_NO_WARNING)
add_definitions(-D_SCL_SECURE_NO_WARNINGS)
if(CMAKE_CXX_FLAGS MATCHES "/W[0-4]")
string(REGEX REPLACE "/W[0-4]" "/W4" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
else()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4 /we4266")
endif()
else()
set(SOCI_GCC_CLANG_COMMON_FLAGS "")
# "-pedantic -Werror -Wno-error=parentheses -Wall -Wextra -Wpointer-arith -Wcast-align -Wcast-qual -Wfloat-equal -Woverloaded-virtual -Wredundant-decls -Wno-long-long")
if (SOCI_CXX_C11)
set(SOCI_CXX_VERSION_FLAGS "-std=c++11")
else()
set(SOCI_CXX_VERSION_FLAGS "-std=gnu++98")
endif()
if("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang" OR "${CMAKE_CXX_COMPILER}" MATCHES "clang")
if(NOT CMAKE_CXX_COMPILER_VERSION LESS 3.1 AND SOCI_ASAN)
set(SOCI_GCC_CLANG_COMMON_FLAGS "${SOCI_GCC_CLANG_COMMON_FLAGS} -fsanitize=address")
endif()
# enforce C++11 for Clang
set(SOCI_CXX_C11 ON)
set(SOCI_CXX_VERSION_FLAGS "-std=c++11")
add_definitions(-DCATCH_CONFIG_CPP11_NO_IS_ENUM)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SOCI_GCC_CLANG_COMMON_FLAGS} ${SOCI_CXX_VERSION_FLAGS}")
elseif(CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_GNUCXX)
if(NOT CMAKE_CXX_COMPILER_VERSION LESS 4.8 AND SOCI_ASAN)
set(SOCI_GCC_CLANG_COMMON_FLAGS "${SOCI_GCC_CLANG_COMMON_FLAGS} -fsanitize=address")
endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SOCI_GCC_CLANG_COMMON_FLAGS} ${SOCI_CXX_VERSION_FLAGS} ")
if (CMAKE_COMPILER_IS_GNUCXX)
if (CMAKE_SYSTEM_NAME MATCHES "FreeBSD")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
else()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-variadic-macros")
endif()
endif()
else()
message(WARNING "Unknown toolset - using default flags to build SOCI")
endif()
endif()
# Set SOCI_HAVE_* variables for soci-config.h generator
set(SOCI_HAVE_CXX_C11 ${SOCI_CXX_C11} CACHE INTERNAL "Enables C++11 support")

View File

@@ -1,16 +1,14 @@
#[===================================================================[
NIH dep: boost
#]===================================================================]
if((NOT DEFINED BOOST_ROOT) AND(DEFINED ENV{BOOST_ROOT}))
set(BOOST_ROOT $ENV{BOOST_ROOT})
endif()
if((NOT DEFINED BOOST_LIBRARYDIR) AND(DEFINED ENV{BOOST_LIBRARYDIR}))
set(BOOST_LIBRARYDIR $ENV{BOOST_LIBRARYDIR})
endif()
file(TO_CMAKE_PATH "${BOOST_ROOT}" BOOST_ROOT)
if(WIN32 OR CYGWIN)
# Workaround for MSVC having two boost versions - x86 and x64 on same PC in stage folders
if((NOT DEFINED BOOST_LIBRARYDIR) AND (DEFINED BOOST_ROOT))
if(DEFINED BOOST_ROOT)
if(IS_DIRECTORY ${BOOST_ROOT}/stage64/lib)
set(BOOST_LIBRARYDIR ${BOOST_ROOT}/stage64/lib)
elseif(IS_DIRECTORY ${BOOST_ROOT}/stage/lib)
@@ -46,7 +44,7 @@ else()
endif()
# TBD:
# Boost_USE_DEBUG_RUNTIME: When ON, uses Boost libraries linked against the
find_package(Boost 1.86 REQUIRED
find_package(Boost 1.70 REQUIRED
COMPONENTS
chrono
container
@@ -57,7 +55,6 @@ find_package(Boost 1.86 REQUIRED
program_options
regex
system
iostreams
thread)
add_library(ripple_boost INTERFACE)
@@ -77,7 +74,6 @@ target_link_libraries(ripple_boost
Boost::coroutine
Boost::date_time
Boost::filesystem
Boost::iostreams
Boost::program_options
Boost::regex
Boost::system

View File

@@ -248,7 +248,6 @@ include(FindPackageHandleStandardArgs)
# Save project's policies
cmake_policy(PUSH)
cmake_policy(SET CMP0057 NEW) # if IN_LIST
#cmake_policy(SET CMP0144 NEW)
#-------------------------------------------------------------------------------
# Before we go searching, check whether a boost cmake package is available, unless
@@ -970,24 +969,7 @@ function(_Boost_COMPONENT_DEPENDENCIES component _ret)
set(_Boost_WAVE_DEPENDENCIES filesystem serialization thread chrono date_time atomic)
set(_Boost_WSERIALIZATION_DEPENDENCIES serialization)
endif()
# Special handling for Boost 1.86.0 and higher
if(NOT Boost_VERSION_STRING VERSION_LESS 1.86.0)
# Explicitly set these for Boost 1.86
set(_Boost_IOSTREAMS_DEPENDENCIES "") # No dependencies for iostreams in 1.86
# Debug output to help diagnose the issue
if(Boost_DEBUG)
message(STATUS "Using special dependency settings for Boost 1.86.0+")
message(STATUS "Component: ${component}, uppercomponent: ${uppercomponent}")
message(STATUS "Boost_VERSION_STRING: ${Boost_VERSION_STRING}")
message(STATUS "BOOST_ROOT: $ENV{BOOST_ROOT}")
message(STATUS "BOOST_LIBRARYDIR: $ENV{BOOST_LIBRARYDIR}")
endif()
endif()
# Only show warning for versions beyond what we've defined
if(NOT Boost_VERSION_STRING VERSION_LESS 1.87.0)
if(NOT Boost_VERSION_STRING VERSION_LESS 1.77.0)
message(WARNING "New Boost version may have incorrect or missing dependencies and imported targets")
endif()
endif()
@@ -1897,18 +1879,6 @@ foreach(COMPONENT ${Boost_FIND_COMPONENTS})
list(INSERT _boost_LIBRARY_SEARCH_DIRS_RELEASE 0 ${Boost_LIBRARY_DIR_DEBUG})
endif()
if(NOT Boost_VERSION_STRING VERSION_LESS 1.86.0)
if(BOOST_LIBRARYDIR AND EXISTS "${BOOST_LIBRARYDIR}")
# Clear existing search paths and use only BOOST_LIBRARYDIR
set(_boost_LIBRARY_SEARCH_DIRS_RELEASE "${BOOST_LIBRARYDIR}" NO_DEFAULT_PATH)
set(_boost_LIBRARY_SEARCH_DIRS_DEBUG "${BOOST_LIBRARYDIR}" NO_DEFAULT_PATH)
if(Boost_DEBUG)
message(STATUS "Boost 1.86: Setting library search dirs to BOOST_LIBRARYDIR: ${BOOST_LIBRARYDIR}")
endif()
endif()
endif()
# Avoid passing backslashes to _Boost_FIND_LIBRARY due to macro re-parsing.
string(REPLACE "\\" "/" _boost_LIBRARY_SEARCH_DIRS_tmp "${_boost_LIBRARY_SEARCH_DIRS_RELEASE}")

View File

@@ -15,5 +15,3 @@ find_library (soci
find_path (SOCI_INCLUDE_DIR
NAMES soci/soci.h)
message("SOCI FOUND AT: ${SOCI_LIB}")

View File

@@ -13,7 +13,7 @@ if(reporting)
ExternalProject_Add(postgres_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/postgres/postgres.git
GIT_TAG REL_14_5
GIT_TAG master
CONFIGURE_COMMAND ./configure --without-readline > /dev/null
BUILD_COMMAND ${CMAKE_COMMAND} -E env --unset=MAKELEVEL make
UPDATE_COMMAND ""

View File

@@ -51,8 +51,7 @@ else()
# This patch process is likely fragile and should be reviewed carefully
# whenever we update the GIT_TAG above.
PATCH_COMMAND
${CMAKE_COMMAND} -D RIPPLED_SOURCE=${CMAKE_CURRENT_SOURCE_DIR}
-P ${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/soci_patch.cmake
${CMAKE_COMMAND} -P ${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/soci_patch.cmake
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}

View File

@@ -2,15 +2,6 @@
NIH dep: wasmedge: web assembly runtime for hooks.
#]===================================================================]
find_package(Curses)
if(CURSES_FOUND)
include_directories(${CURSES_INCLUDE_DIR})
target_link_libraries(ripple_libs INTERFACE ${CURSES_LIBRARY})
else()
message(WARNING "CURSES library not found... (only important for mac builds)")
endif()
find_package(LLVM REQUIRED CONFIG)
message(STATUS "Found LLVM ${LLVM_PACKAGE_VERSION}")
message(STATUS "Using LLVMConfig.cmake in: ${LLVM_DIR}")
@@ -33,7 +24,6 @@ ExternalProject_Add (wasmedge_src
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
-DLLVM_DIR=${LLVM_DIR}
-DLLVM_LIBRARY_DIR=${LLVM_LIBRARY_DIR}
-DLLVM_ENABLE_TERMINFO=OFF
$<$<NOT:$<BOOL:${is_multiconfig}>>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}>
$<$<BOOL:${MSVC}>:
"-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP -march=native"
@@ -73,12 +63,4 @@ set_target_properties (wasmedge PROPERTIES
"${wasmedge_src_BINARY_DIR}/include/api/"
)
target_link_libraries (ripple_libs INTERFACE wasmedge)
#RH NOTE: some compilers / versions of some libraries need these, most don't
find_library(XAR_LIBRARY NAMES xar)
if(XAR_LIBRARY)
target_link_libraries(ripple_libs INTERFACE ${XAR_LIBRARY})
else()
message(WARNING "xar library not found... (only important for mac builds)")
endif()
add_library (NIH::WasmEdge ALIAS wasmedge)

View File

@@ -11,7 +11,7 @@ if(reporting)
ExternalProject_Add(zlib_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/madler/zlib.git
GIT_TAG v1.2.12
GIT_TAG master
INSTALL_COMMAND ""
BUILD_BYPRODUCTS <BINARY_DIR>/${ep_lib_prefix}z.a
LOG_BUILD TRUE
@@ -45,7 +45,7 @@ if(reporting)
ExternalProject_Add(krb5_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/krb5/krb5.git
GIT_TAG krb5-1.20-final
GIT_TAG master
UPDATE_COMMAND ""
CONFIGURE_COMMAND autoreconf src && CFLAGS=-fcommon ./src/configure --enable-static --disable-shared > /dev/null
BUILD_IN_SOURCE 1
@@ -80,7 +80,7 @@ if(reporting)
ExternalProject_Add(libuv_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/libuv/libuv.git
GIT_TAG v1.44.2
GIT_TAG v1.x
INSTALL_COMMAND ""
BUILD_BYPRODUCTS <BINARY_DIR>/${ep_lib_prefix}uv_a.a
LOG_BUILD TRUE
@@ -106,7 +106,7 @@ if(reporting)
ExternalProject_Add(cassandra_src
PREFIX ${nih_cache_path}
GIT_REPOSITORY https://github.com/datastax/cpp-driver.git
GIT_TAG 2.16.2
GIT_TAG master
CMAKE_ARGS
-DLIBUV_ROOT_DIR=${BINARY_DIR}
-DLIBUV_LIBARY=${BINARY_DIR}/libuv_a.a

View File

@@ -74,11 +74,7 @@ else ()
if (NOT _location)
message (FATAL_ERROR "using pkg-config for grpc, can't find c-ares")
endif ()
if(${_location} MATCHES "\\.a$")
add_library(c-ares::cares STATIC IMPORTED GLOBAL)
else()
add_library(c-ares::cares SHARED IMPORTED GLOBAL)
endif()
add_library (c-ares::cares ${_static} IMPORTED GLOBAL)
set_target_properties (c-ares::cares PROPERTIES
IMPORTED_LOCATION ${_location}
INTERFACE_INCLUDE_DIRECTORIES "${${_prefix}_INCLUDE_DIRS}"
@@ -208,7 +204,6 @@ else ()
CMAKE_ARGS
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
-DCMAKE_CXX_STANDARD=17
$<$<BOOL:${CMAKE_VERBOSE_MAKEFILE}>:-DCMAKE_VERBOSE_MAKEFILE=ON>
$<$<BOOL:${CMAKE_TOOLCHAIN_FILE}>:-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}>
$<$<BOOL:${VCPKG_TARGET_TRIPLET}>:-DVCPKG_TARGET_TRIPLET=${VCPKG_TARGET_TRIPLET}>

View File

@@ -2,16 +2,6 @@
# so as to remove type range check exceptions that cause
# us trouble when using boost::optional to select int values
# Soci's CMake setup leaves flags in place that will cause warnings to
# be treated as errors, but some compiler versions throw "new" warnings
# that then cause the build to fail. Simplify that until soci fixes
# those warnings.
if (RIPPLED_SOURCE)
execute_process( COMMAND ${CMAKE_COMMAND} -E copy_if_different
${RIPPLED_SOURCE}/Builds/CMake/SociConfig.cmake.patched
cmake/SociConfig.cmake )
endif ()
# Some versions of CMake erroneously patch external projects on every build.
# If the patch makes no changes, skip it. This workaround can be
# removed once we stop supporting vulnerable versions of CMake.

View File

@@ -1 +0,0 @@
[Please see the BUILD instructions here](../BUILD.md)

405
Builds/Test.py Executable file
View File

@@ -0,0 +1,405 @@
#!/usr/bin/env python
# This file is part of rippled: https://github.com/ripple/rippled
# Copyright (c) 2012 - 2017 Ripple Labs Inc.
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Invocation:
./Builds/Test.py - builds and tests all configurations
The build must succeed without shell aliases for this to work.
To pass flags to cmake, put them at the very end of the command line, after
the -- flag - like this:
./Builds/Test.py -- -j4 # Pass -j4 to cmake --build
Common problems:
1) Boost not found. Solution: export BOOST_ROOT=[path to boost folder]
2) OpenSSL not found. Solution: export OPENSSL_ROOT=[path to OpenSSL folder]
3) cmake is not found. Solution: Be sure cmake directory is on your $PATH
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import argparse
import itertools
import os
import platform
import re
import shutil
import sys
import subprocess
def powerset(iterable):
"""powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"""
s = list(iterable)
return itertools.chain.from_iterable(itertools.combinations(s, r) for r in range(len(s) + 1))
IS_WINDOWS = platform.system().lower() == 'windows'
IS_OS_X = platform.system().lower() == 'darwin'
# CMake
if IS_WINDOWS:
CMAKE_UNITY_CONFIGS = ['Debug', 'Release']
CMAKE_NONUNITY_CONFIGS = ['Debug', 'Release']
else:
CMAKE_UNITY_CONFIGS = []
CMAKE_NONUNITY_CONFIGS = []
CMAKE_UNITY_COMBOS = { '' : [['rippled'], CMAKE_UNITY_CONFIGS],
'.nounity' : [['rippled'], CMAKE_NONUNITY_CONFIGS] }
if IS_WINDOWS:
CMAKE_DIR_TARGETS = { ('msvc' + unity,) : targets for unity, targets in
CMAKE_UNITY_COMBOS.items() }
elif IS_OS_X:
CMAKE_DIR_TARGETS = { (build + unity,) : targets
for build in ['debug', 'release']
for unity, targets in CMAKE_UNITY_COMBOS.items() }
else:
CMAKE_DIR_TARGETS = { (cc + "." + build + unity,) : targets
for cc in ['gcc', 'clang']
for build in ['debug', 'release', 'coverage', 'profile']
for unity, targets in CMAKE_UNITY_COMBOS.items() }
# list of tuples of all possible options
if IS_WINDOWS or IS_OS_X:
CMAKE_ALL_GENERATE_OPTIONS = [tuple(x) for x in powerset(['-GNinja', '-Dassert=true'])]
else:
CMAKE_ALL_GENERATE_OPTIONS = list(set(
[tuple(x) for x in powerset(['-GNinja', '-Dstatic=true', '-Dassert=true', '-Dsan=address'])] +
[tuple(x) for x in powerset(['-GNinja', '-Dstatic=true', '-Dassert=true', '-Dsan=thread'])]))
parser = argparse.ArgumentParser(
description='Test.py - run ripple tests'
)
parser.add_argument(
'--all', '-a',
action='store_true',
help='Build all configurations.',
)
parser.add_argument(
'--keep_going', '-k',
action='store_true',
help='Keep going after one configuration has failed.',
)
parser.add_argument(
'--silent', '-s',
action='store_true',
help='Silence all messages except errors',
)
parser.add_argument(
'--verbose', '-v',
action='store_true',
help=('Report more information about which commands are executed and the '
'results.'),
)
parser.add_argument(
'--test', '-t',
default='',
help='Add a prefix for unit tests',
)
parser.add_argument(
'--testjobs',
default='0',
type=int,
help='Run tests in parallel'
)
parser.add_argument(
'--ipv6',
action='store_true',
help='Use IPv6 localhost when running unit tests.',
)
parser.add_argument(
'--clean', '-c',
action='store_true',
help='delete all build artifacts after testing',
)
parser.add_argument(
'--quiet', '-q',
action='store_true',
help='Reduce output where possible (unit tests)',
)
parser.add_argument(
'--dir', '-d',
default=(),
nargs='*',
help='Specify one or more CMake dir names. '
'Will also be used as -Dtarget=<dir> running cmake.'
)
parser.add_argument(
'--target',
default=(),
nargs='*',
help='Specify one or more CMake build targets. '
'Will be used as --target <target> running cmake --build.'
)
parser.add_argument(
'--config',
default=(),
nargs='*',
help='Specify one or more CMake build configs. '
'Will be used as --config <config> running cmake --build.'
)
parser.add_argument(
'--generator_option',
action='append',
help='Specify a CMake generator option. Repeat for multiple options. '
'Will be passed to the cmake generator. '
'Due to limits of the argument parser, arguments starting with \'-\' '
'must be attached to this option. e.g. --generator_option=-GNinja.')
parser.add_argument(
'--build_option',
action='append',
help='Specify a build option. Repeat for multiple options. '
'Will be passed to the build tool via cmake --build. '
'Due to limits of the argument parser, arguments starting with \'-\' '
'must be attached to this option. e.g. --build_option=-j8.')
parser.add_argument(
'extra_args',
default=(),
nargs='*',
help='Extra arguments are passed through to the tools'
)
ARGS = parser.parse_args()
def decodeString(line):
# Python 2 vs. Python 3
if isinstance(line, str):
return line
else:
return line.decode()
def shell(cmd, args=(), silent=False, cust_env=None):
""""Execute a shell command and return the output."""
silent = ARGS.silent or silent
verbose = not silent and ARGS.verbose
if verbose:
print('$' + cmd, *args)
command = (cmd,) + args
# shell is needed in Windows to find executable in the path
process = subprocess.Popen(
command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
env=cust_env,
shell=IS_WINDOWS)
lines = []
count = 0
# readline returns '' at EOF
for line in iter(process.stdout.readline, ''):
if process.poll() is None:
decoded = decodeString(line)
lines.append(decoded)
if verbose:
print(decoded, end='')
elif not silent:
count += 1
if count >= 80:
print()
count = 0
else:
print('.', end='')
else:
break
if not verbose and count:
print()
process.wait()
return process.returncode, lines
def get_cmake_dir(cmake_dir):
return os.path.join('build' , 'cmake' , cmake_dir)
def run_cmake(directory, cmake_dir, args):
print('Generating build in', directory, 'with', *args or ('default options',))
old_dir = os.getcwd()
if not os.path.exists(directory):
os.makedirs(directory)
os.chdir(directory)
if IS_WINDOWS and not any(arg.startswith("-G") for arg in args) and not os.path.exists("CMakeCache.txt"):
if '--ninja' in args:
args += ( '-GNinja', )
else:
args += ( '-GVisual Studio 14 2015 Win64', )
# hack to extract cmake options/args from the legacy target format
if re.search('\.unity', cmake_dir):
args += ( '-Dunity=ON', )
if re.search('\.nounity', cmake_dir):
args += ( '-Dunity=OFF', )
if re.search('coverage', cmake_dir):
args += ( '-Dcoverage=ON', )
if re.search('profile', cmake_dir):
args += ( '-Dprofile=ON', )
if re.search('debug', cmake_dir):
args += ( '-DCMAKE_BUILD_TYPE=Debug', )
if re.search('release', cmake_dir):
args += ( '-DCMAKE_BUILD_TYPE=Release', )
m = re.search('gcc(-[^.]*)', cmake_dir)
if m:
args += ( '-DCMAKE_C_COMPILER=' + m.group(0),
'-DCMAKE_CXX_COMPILER=g++' + m.group(1), )
elif re.search('gcc', cmake_dir):
args += ( '-DCMAKE_C_COMPILER=gcc', '-DCMAKE_CXX_COMPILER=g++', )
m = re.search('clang(-[^.]*)', cmake_dir)
if m:
args += ( '-DCMAKE_C_COMPILER=' + m.group(0),
'-DCMAKE_CXX_COMPILER=clang++' + m.group(1), )
elif re.search('clang', cmake_dir):
args += ( '-DCMAKE_C_COMPILER=clang', '-DCMAKE_CXX_COMPILER=clang++', )
args += ( os.path.join('..', '..', '..'), )
resultcode, lines = shell('cmake', args)
if resultcode:
print('Generating FAILED:')
if not ARGS.verbose:
print(*lines, sep='')
sys.exit(1)
os.chdir(old_dir)
def run_cmake_build(directory, target, config, args):
print('Building', target, config, 'in', directory, 'with', *args or ('default options',))
build_args=('--build', directory)
if target:
build_args += ('--target', target)
if config:
build_args += ('--config', config)
if args:
build_args += ('--',)
build_args += tuple(args)
resultcode, lines = shell('cmake', build_args)
if resultcode:
print('Build FAILED:')
if not ARGS.verbose:
print(*lines, sep='')
sys.exit(1)
def run_cmake_tests(directory, target, config):
failed = []
if IS_WINDOWS:
target += '.exe'
executable = os.path.join(directory, config if config else 'Debug', target)
if(not os.path.exists(executable)):
executable = os.path.join(directory, target)
print('Unit tests for', executable)
testflag = '--unittest'
quiet = ''
testjobs = ''
ipv6 = ''
if ARGS.test:
testflag += ('=' + ARGS.test)
if ARGS.quiet:
quiet = '-q'
if ARGS.ipv6:
ipv6 = '--unittest-ipv6'
if ARGS.testjobs:
testjobs = ('--unittest-jobs=' + str(ARGS.testjobs))
resultcode, lines = shell(executable, (testflag, quiet, testjobs, ipv6))
if resultcode:
if not ARGS.verbose:
print('ERROR:', *lines, sep='')
failed.append([target, 'unittest'])
return failed
def main():
all_failed = []
if ARGS.all:
build_dir_targets = CMAKE_DIR_TARGETS
generator_options = CMAKE_ALL_GENERATE_OPTIONS
else:
build_dir_targets = { tuple(ARGS.dir) : [ARGS.target, ARGS.config] }
if ARGS.generator_option:
generator_options = [tuple(ARGS.generator_option)]
else:
generator_options = [tuple()]
if not build_dir_targets:
# Let CMake choose the build tool.
build_dir_targets = { () : [] }
if ARGS.build_option:
ARGS.build_option = ARGS.build_option + list(ARGS.extra_args)
else:
ARGS.build_option = list(ARGS.extra_args)
for args in generator_options:
for build_dirs, (build_targets, build_configs) in build_dir_targets.items():
if not build_dirs:
build_dirs = ('default',)
if not build_targets:
build_targets = ('rippled',)
if not build_configs:
build_configs = ('',)
for cmake_dir in build_dirs:
cmake_full_dir = get_cmake_dir(cmake_dir)
run_cmake(cmake_full_dir, cmake_dir, args)
for target in build_targets:
for config in build_configs:
run_cmake_build(cmake_full_dir, target, config, ARGS.build_option)
failed = run_cmake_tests(cmake_full_dir, target, config)
if failed:
print('FAILED:', *(':'.join(f) for f in failed))
if not ARGS.keep_going:
sys.exit(1)
else:
all_failed.extend([decodeString(cmake_dir +
"." + target + "." + config), ':'.join(f)]
for f in failed)
else:
print('Success')
if ARGS.clean:
shutil.rmtree(cmake_full_dir)
if all_failed:
if len(all_failed) > 1:
print()
print('FAILED:', *(':'.join(f) for f in all_failed))
sys.exit(1)
if __name__ == '__main__':
main()
sys.exit(0)

View File

@@ -0,0 +1,45 @@
{
// See https://go.microsoft.com//fwlink//?linkid=834763 for more information about this file.
"configurations": [
{
"name": "x64-Debug",
"generator": "Visual Studio 15 2017 Win64",
"configurationType": "Debug",
"inheritEnvironments": [ "msvc_x64_x64" ],
"buildRoot": "${thisFileDir}\\build\\${name}",
"cmakeCommandArgs": "",
"buildCommandArgs": "-v:minimal",
"ctestCommandArgs": "",
"variables": [
{
"name": "BOOST_ROOT",
"value": "C:\\lib\\boost"
},
{
"name": "OPENSSL_ROOT",
"value": "C:\\lib\\OpenSSL-Win64"
}
]
},
{
"name": "x64-Release",
"generator": "Visual Studio 15 2017 Win64",
"configurationType": "Release",
"inheritEnvironments": [ "msvc_x64_x64" ],
"buildRoot": "${thisFileDir}\\build\\${name}",
"cmakeCommandArgs": "",
"buildCommandArgs": "-v:minimal",
"ctestCommandArgs": "",
"variables": [
{
"name": "BOOST_ROOT",
"value": "C:\\lib\\boost"
},
{
"name": "OPENSSL_ROOT",
"value": "C:\\lib\\OpenSSL-Win64"
}
]
}
]
}

View File

@@ -0,0 +1,263 @@
# Visual Studio 2017 Build Instructions
## Important
We do not recommend Windows for rippled production use at this time. Currently,
the Ubuntu platform has received the highest level of quality assurance,
testing, and support. Additionally, 32-bit Windows versions are not supported.
## Prerequisites
To clone the source code repository, create branches for inspection or
modification, build rippled under Visual Studio, and run the unit tests you will
need these software components
| Component | Minimum Recommended Version |
|-----------|-----------------------|
| [Visual Studio 2017](README.md#install-visual-studio-2017)| 15.5.4 |
| [Git for Windows](README.md#install-git-for-windows)| 2.16.1 |
| [OpenSSL Library](README.md#install-openssl) | 1.1.1L |
| [Boost library](README.md#build-boost) | 1.70.0 |
| [CMake for Windows](README.md#optional-install-cmake-for-windows)* | 3.12 |
\* Only needed if not using the integrated CMake in VS 2017 and prefer generating dedicated project/solution files.
## Install Software
### Install Visual Studio 2017
If not already installed on your system, download your choice of installer from
the [Visual Studio 2017
Download](https://www.visualstudio.com/downloads/download-visual-studio-vs)
page, run the installer, and follow the directions. **You may need to choose the
`Desktop development with C++` workload to install all necessary C++ features.**
Any version of Visual Studio 2017 may be used to build rippled. The **Visual
Studio 2017 Community** edition is available free of charge (see [the product
page](https://www.visualstudio.com/products/visual-studio-community-vs) for
licensing details), while paid editions may be used for an initial free-trial
period.
### Install Git for Windows
Git is a distributed revision control system. The Windows version also provides
the bash shell and many Windows versions of Unix commands. While there are other
varieties of Git (such as TortoiseGit, which has a native Windows interface and
integrates with the Explorer shell), we recommend installing [Git for
Windows](https://git-scm.com/) since it provides a Unix-like command line
environment useful for running shell scripts. Use of the bash shell under
Windows is mandatory for running the unit tests.
### Install OpenSSL
[Download the latest version of
OpenSSL.](http://slproweb.com/products/Win32OpenSSL.html) There will
several `Win64` bit variants available, you want the non-light
`v1.1` line. As of this writing, you **should** select
* Win64 OpenSSL v1.1.1L
and should **not** select
* Anything with "Win32" in the name
* Anything with "light" in the name
* Anything with "EXPERIMENTAL" in the name
* Anything in the 3.0 line - rippled won't currently build with this version.
Run the installer, and choose an appropriate location for your OpenSSL
installation. In this guide we use `C:\lib\OpenSSL-Win64` as the destination
location.
You may be informed on running the installer that "Visual C++ 2008
Redistributables" must first be installed first. If so, download it from the
[same page](http://slproweb.com/products/Win32OpenSSL.html), again making sure
to get the correct 32-/64-bit variant.
* NOTE: Since rippled links statically to OpenSSL, it does not matter where the
OpenSSL .DLL files are placed, or what version they are. rippled does not use
or require any external .DLL files to run other than the standard operating
system ones.
### Build Boost
Boost 1.70 or later is required.
After [downloading boost](http://www.boost.org/users/download/) and unpacking it
to `c:\lib`. As of this writing, the most recent version of boost is 1.70.0,
which will unpack into a directory named `boost_1_70_0`. We recommended either
renaming this directory to `boost`, or creating a junction link `mklink /J boost
boost_1_70_0`, so that you can more easily switch between versions.
Next, open **Developer Command Prompt** and type the following commands
```powershell
cd C:\lib\boost
bootstrap
```
The rippled application is linked statically to the standard runtimes and
external dependencies on Windows, to ensure that the behavior of the executable
is not affected by changes in outside files. Therefore, it is necessary to build
the required boost static libraries using this command:
```powershell
bjam -j<Num Parallel> --toolset=msvc-14.1 address-model=64 architecture=x86 link=static threading=multi runtime-link=shared,static stage
```
where you should replace `<Num Parallel>` with the number of parallel
invocations to use build, e.g. `bjam -j4 ...` would use up to 4 concurrent build
shell commands for the build.
Building the boost libraries may take considerable time. When the build process
is completed, take note of both the reported compiler include paths and linker
library paths as they will be required later.
### (Optional) Install CMake for Windows
[CMake](http://cmake.org) is a cross platform build system generator. Visual
Studio 2017 includes an integrated version of CMake that avoids having to
manually run CMake, but it is undergoing continuous improvement. Users that
prefer to use standard Visual Studio project and solution files need to install
a dedicated version of CMake to generate them. The latest version can be found
at the [CMake download site](https://cmake.org/download/). It is recommended you
select the install option to add CMake to your path.
## Clone the rippled repository
If you are familiar with cloning github repositories, just follow your normal
process and clone `git@github.com:ripple/rippled.git`. Otherwise follow this
section for instructions.
1. If you don't have a github account, sign up for one at
[github.com](https://github.com/).
2. Make sure you have Github ssh keys. For help see
[generating-ssh-keys](https://help.github.com/articles/generating-ssh-keys).
Open the "Git Bash" shell that was installed with "Git for Windows" in the step
above. Navigate to the directory where you want to clone rippled (git bash uses
`/c` for windows's `C:` and forward slash where windows uses backslash, so
`C:\Users\joe\projs` would be `/c/Users/joe/projs` in git bash). Now clone the
repository and optionally switch to the *master* branch. Type the following at
the bash prompt:
```powershell
git clone git@github.com:ripple/rippled.git
cd rippled
```
If you receive an error about not having the "correct access rights" make sure
you have Github ssh keys, as described above.
For a stable release, choose the `master` branch or one of the tagged releases
listed on [rippled's GitHub page](https://github.com/ripple/rippled/releases).
```
git checkout master
```
To test the latest release candidate, choose the `release` branch.
```
git checkout release
```
If you are doing development work and want the latest set of untested features,
you can consider using the `develop` branch instead.
```
git checkout develop
```
# Build using Visual Studio integrated CMake
In Visual Studio 2017, Microsoft added [integrated IDE support for
cmake](https://blogs.msdn.microsoft.com/vcblog/2016/10/05/cmake-support-in-visual-studio/).
To begin, simply:
1. Launch Visual Studio and choose **File | Open | Folder**, navigating to the
cloned rippled folder.
2. Right-click on `CMakeLists.txt` in the **Solution Explorer - Folder View** to
generate a `CMakeSettings.json` file. A sample settings file is provided
[here](/Builds/VisualStudio2017/CMakeSettings-example.json). Customize the
settings for `BOOST_ROOT`, `OPENSSL_ROOT` to match the install paths if they
differ from those in the file.
4. Select either the `x64-Release` or `x64-Debug` configuration from the
**Project Setings** drop-down. This should invoke the built-in CMake project
generator. If not, you can right-click on the `CMakeLists.txt` file and
choose **Cache | Generate Cache**.
5. Select either the `rippled.exe` (unity) or `rippled_classic.exe` (non-unity)
option in the **Select Startup Item** drop-down. This will be the target
built when you press F7. Alternatively, you can choose a target to build from
the top-level **CMake | Build** menu. Note that at this time, there are other
targets listed that come from third party visual studio files embedded in the
rippled repo, e.g. `datagen.vcxproj`. Please ignore them.
For details on configuring debugging sessions or further customization of CMake,
please refer to the [CMake tools for VS
documentation](https://docs.microsoft.com/en-us/cpp/ide/cmake-tools-for-visual-cpp).
If using the provided `CMakeSettings.json` file, the executable will be in
```
.\build\x64-Release\Release\rippled.exe
```
or
```
.\build\x64-Debug\Debug\rippled.exe
```
These paths are relative to your cloned git repository.
# Build using stand-alone CMake
This requires having installed [CMake for
Windows](README.md#optional-install-cmake-for-windows). We do not recommend
mixing this method with the integrated CMake method for the same repository
clone. Assuming you included the cmake executable folder in your path,
execute the following commands within your `rippled` cloned repository:
```
mkdir build\cmake
cd build\cmake
cmake ..\.. -G"Visual Studio 15 2017 Win64" -DBOOST_ROOT="C:\lib\boost_1_70_0" -DOPENSSL_ROOT="C:\lib\OpenSSL-Win64" -DCMAKE_GENERATOR_TOOLSET=host=x64
```
Now launch Visual Studio 2017 and select **File | Open | Project/Solution**.
Navigate to the `build\cmake` folder created above and select the `rippled.sln`
file. You can then choose whether to build the `Debug` or `Release` solution
configuration.
The executable will be in
```
.\build\cmake\Release\rippled.exe
```
or
```
.\build\cmake\Debug\rippled.exe
```
These paths are relative to your cloned git repository.
# Unity/No-Unity Builds
The rippled build system defaults to using
[unity source files](http://onqtam.com/programming/2018-07-07-unity-builds/)
to improve build times. In some cases it might be desirable to disable the
unity build and compile individual translation units. Here is how you can
switch to a "no-unity" build configuration:
## Visual Studio Integrated CMake
Edit your `CmakeSettings.json` (described above) by adding `-Dunity=OFF`
to the `cmakeCommandArgs` entry for each build configuration.
## Standalone CMake Builds
When running cmake to generate the Visual Studio project files, add
`-Dunity=OFF` to the command line options passed to cmake.
**Note:** you will need to re-run the cmake configuration step anytime you
want to switch between unity/no-unity builds.
# Unit Test (Recommended)
`rippled` builds a set of unit tests into the server executable. To run these
unit tests after building, pass the `--unittest` option to the compiled
`rippled` executable. The executable will exit with summary info after running
the unit tests.

7
Builds/build_all.sh Executable file
View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
num_procs=$(lscpu -p | grep -v '^#' | sort -u -t, -k 2,4 | wc -l) # number of physical cores
path=$(cd $(dirname $0) && pwd)
cd $(dirname $path)
${path}/Test.py -a -c --testjobs=${num_procs} -- -j${num_procs}

View File

@@ -0,0 +1,31 @@
# rippled Packaging and Containers
This folder contains docker container definitions and configuration
files to support building rpm and deb packages of rippled. The container
definitions include some additional software/packages that are used
for general build/test CI workflows of rippled but are not explicitly
needed for the package building workflow.
## CMake Targets
If you have docker installed on your local system, then the main
CMake file will enable several targets related to building packages:
`rpm_container`, `rpm`, `dpkg_container`, and `dpkg`. The package targets
depend on the container targets and will trigger a build of those first.
The container builds can take several dozen minutes to complete (depending
on hardware specs), so quick build cycles are not possible currently. As
such, these targets are often best suited to CI/automated build systems.
The package build can be invoked like any other cmake target from the
rippled root folder:
```
mkdir -p build/pkg && cd build/pkg
cmake -Dpackages_only=ON ../..
cmake --build . --target rpm
```
Upon successful completion, the generated package files will be in
the `build/pkg/packages` directory. For deb packages, simply replace
`rpm` with `dpkg` in the build command above.

View File

@@ -0,0 +1,43 @@
FROM centos:7
ARG GIT_COMMIT=unknown
ARG CI_USE=false
LABEL git-commit=$GIT_COMMIT
COPY centos-builder/centos_setup.sh /tmp/
COPY shared/build_deps.sh /tmp/
COPY shared/install_cmake.sh /tmp/
COPY centos-builder/extras.sh /tmp/
COPY shared/install_boost.sh /tmp/
RUN chmod +x /tmp/centos_setup.sh && \
chmod +x /tmp/build_deps.sh && \
chmod +x /tmp/install_boost.sh && \
chmod +x /tmp/install_cmake.sh && \
chmod +x /tmp/extras.sh
RUN /tmp/centos_setup.sh
RUN /tmp/install_cmake.sh 3.16.1 /opt/local/cmake-3.16
RUN ln -s /opt/local/cmake-3.16 /opt/local/cmake
ENV PATH="/opt/local/cmake/bin:$PATH"
# also install min supported cmake for testing
RUN if [ "${CI_USE}" = true ] ; then /tmp/install_cmake.sh 3.9.0 /opt/local/cmake-3.9; fi
RUN source scl_source enable devtoolset-7 python27 && \
/tmp/build_deps.sh
ENV BOOST_ROOT="/opt/local/boost/_INSTALLED_"
ENV PLANTUML_JAR="/opt/plantuml/plantuml.jar"
ENV OPENSSL_ROOT="/opt/local/openssl"
ENV GDB_ROOT="/opt/local/gdb"
RUN source scl_source enable devtoolset-7 python27 && \
/tmp/extras.sh
# prep files for package building
RUN mkdir -m 777 -p /opt/rippled_bld/pkg
WORKDIR /opt/rippled_bld/pkg
RUN mkdir -m 777 ./rpmbuild
RUN mkdir -m 777 ./rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
COPY packaging/rpm/build_rpm.sh ./
CMD ./build_rpm.sh

View File

@@ -0,0 +1,37 @@
#!/usr/bin/env bash
set -ex
source /etc/os-release
yum -y upgrade
yum -y update
yum -y install epel-release centos-release-scl
yum -y install \
wget curl time gcc-c++ time yum-utils autoconf automake pkgconfig libtool \
libstdc++-static rpm-build gnupg which make cmake \
devtoolset-7 devtoolset-7-gdb devtoolset-7-libasan-devel devtoolset-7-libtsan-devel devtoolset-7-libubsan-devel \
devtoolset-8 devtoolset-8-gdb devtoolset-8-binutils devtoolset-8-libstdc++-devel \
devtoolset-8-libasan-devel devtoolset-8-libtsan-devel devtoolset-8-libubsan-devel devtoolset-8-liblsan-devel \
flex flex-devel bison bison-devel parallel \
ncurses ncurses-devel ncurses-libs graphviz graphviz-devel \
lzip p7zip bzip2 bzip2-devel lzma-sdk lzma-sdk-devel xz-devel \
zlib zlib-devel zlib-static texinfo openssl openssl-static \
jemalloc jemalloc-devel \
libicu-devel htop \
python27-python rh-python35-python \
python-devel python27-python-devel rh-python35-python-devel \
python27 rh-python35 \
ninja-build git svn \
swig perl-Digest-MD5 python2-pip
if [ "${CI_USE}" = true ] ; then
# TODO need permanent link
yum -y install ftp://ftp.pbone.net/mirror/archive.fedoraproject.org/fedora-secondary/updates/26/i386/Packages/p/python2-six-1.10.0-9.fc26.noarch.rpm
yum -y install \
llvm-toolset-7 llvm-toolset-7-runtime llvm-toolset-7-build llvm-toolset-7-clang \
llvm-toolset-7-clang-analyzer llvm-toolset-7-clang-devel llvm-toolset-7-clang-libs \
llvm-toolset-7-clang-tools-extra llvm-toolset-7-compiler-rt llvm-toolset-7-lldb \
llvm-toolset-7-lldb-devel llvm-toolset-7-python-lldb
fi

View File

@@ -0,0 +1,33 @@
#!/usr/bin/env bash
set -ex
if [ "${CI_USE}" = true ] ; then
cd /tmp
wget https://ftp.gnu.org/gnu/gdb/gdb-8.3.1.tar.xz
tar xf gdb-8.3.1.tar.xz
cd gdb-8.3
./configure CFLAGS="-w -O2" CXXFLAGS="-std=gnu++11 -g -O2 -w" --prefix=/opt/local/gdb-8.3
make -j$(nproc)
make install
ln -s /opt/local/gdb-8.3 /opt/local/gdb
cd ..
rm -f gdb-8.3.tar.xz
rm -rf gdb-8.3
# clang from source
cd /tmp
git clone https://github.com/llvm/llvm-project.git
cd llvm-project
git checkout llvmorg-9.0.0
INSTALL=/opt/llvm-9/
mkdir mybuilddir && cd mybuilddir
# TODO figure out necessary options
cmake ../llvm -G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DLLVM_ENABLE_PROJECTS='clang;clang-tools-extra;libcxx;libcxxabi;lldb;compiler-rt;lld;polly' \
-DCMAKE_INSTALL_PREFIX=${INSTALL} \
-DLLVM_LIBDIR_SUFFIX=64
cmake --build . --parallel --target install
cd /tmp
rm -rf llvm-project
fi

View File

@@ -0,0 +1,28 @@
#!/usr/bin/env sh
set -ex
pkgtype=$1
if [ "${pkgtype}" = "rpm" ] ; then
container_name="${RPM_CONTAINER_NAME}"
elif [ "${pkgtype}" = "dpkg" ] ; then
container_name="${DPKG_CONTAINER_NAME}"
else
echo "invalid package type"
exit 1
fi
if docker pull "${ARTIFACTORY_HUB}/${container_name}:latest_${CI_COMMIT_REF_SLUG}"; then
echo "found container for latest - using as cache."
docker tag \
"${ARTIFACTORY_HUB}/${container_name}:latest_${CI_COMMIT_REF_SLUG}" \
"${container_name}:latest_${CI_COMMIT_REF_SLUG}"
CMAKE_EXTRA="-D${pkgtype}_cache_from=${container_name}:latest_${CI_COMMIT_REF_SLUG}"
fi
cmake --version
test -d build && rm -rf build
mkdir -p build/container && cd build/container
eval time \
cmake -Dpackages_only=ON -DCMAKE_VERBOSE_MAKEFILE=ON ${CMAKE_EXTRA} \
-G Ninja ../..
time cmake --build . --target "${pkgtype}_container" -- -v

View File

@@ -0,0 +1,28 @@
#!/usr/bin/env sh
set -ex
pkgtype=$1
if [ "${pkgtype}" = "rpm" ] ; then
container_name="${RPM_CONTAINER_FULLNAME}"
container_tag="${RPM_CONTAINER_TAG}"
elif [ "${pkgtype}" = "dpkg" ] ; then
container_name="${DPKG_CONTAINER_FULLNAME}"
container_tag="${DPKG_CONTAINER_TAG}"
else
echo "invalid package type"
exit 1
fi
time docker pull "${ARTIFACTORY_HUB}/${container_name}"
docker tag \
"${ARTIFACTORY_HUB}/${container_name}" \
"${container_name}"
docker images
test -d build && rm -rf build
mkdir -p build/${pkgtype} && cd build/${pkgtype}
time cmake \
-Dpackages_only=ON \
-Dcontainer_label="${container_tag}" \
-Dhave_package_container=ON \
-DCMAKE_VERBOSE_MAKEFILE=ON \
-Dunity=OFF \
-G Ninja ../..
time cmake --build . --target ${pkgtype} -- -v

View File

@@ -0,0 +1,15 @@
#!/usr/bin/env sh
set -e
# used as a before/setup script for docker steps in gitlab-ci
# expects to be run in standard alpine/dind image
echo $(nproc)
docker login -u rippled \
-p ${ARTIFACTORY_DEPLOY_KEY_RIPPLED} ${ARTIFACTORY_HUB}
apk add --update py-pip
apk add \
bash util-linux coreutils binutils grep \
make ninja cmake build-base gcc g++ abuild git \
python3 python3-dev
pip3 install awscli
# list curdir contents to build log:
ls -la

View File

@@ -0,0 +1,16 @@
#!/usr/bin/env sh
case ${CI_COMMIT_REF_NAME} in
develop)
export COMPONENT="nightly"
;;
release)
export COMPONENT="unstable"
;;
master)
export COMPONENT="stable"
;;
*)
export COMPONENT="_unknown_"
;;
esac

View File

@@ -0,0 +1,703 @@
#########################################################################
## ##
## gitlab CI defintition for rippled build containers and distro ##
## packages (rpm and dpkg). ##
## ##
#########################################################################
# NOTE: these are sensible defaults for Ripple pipelines. These
# can be overridden by project or group variables as needed.
variables:
# these containers are built manually using the rippled
# cmake build (container targets) and tagged/pushed so they
# can be used here
RPM_CONTAINER_TAG: "2020-02-10"
RPM_CONTAINER_NAME: "rippled-rpm-builder"
RPM_CONTAINER_FULLNAME: "${RPM_CONTAINER_NAME}:${RPM_CONTAINER_TAG}"
DPKG_CONTAINER_TAG: "2020-02-10"
DPKG_CONTAINER_NAME: "rippled-dpkg-builder"
DPKG_CONTAINER_FULLNAME: "${DPKG_CONTAINER_NAME}:${DPKG_CONTAINER_TAG}"
ARTIFACTORY_HOST: "artifactory.ops.ripple.com"
ARTIFACTORY_HUB: "${ARTIFACTORY_HOST}:6555"
GIT_SIGN_PUBKEYS_URL: "https://gitlab.ops.ripple.com/xrpledger/rippled-packages/snippets/49/raw"
PUBLIC_REPO_ROOT: "https://repos.ripple.com/repos"
# also need to define this variable ONLY for the primary
# build/publish pipeline on the mainline repo:
# IS_PRIMARY_REPO = "true"
stages:
- build_packages
- sign_packages
- smoketest
- verify_sig
- tag_images
- push_to_test
- verify_from_test
- wait_approval_prod
- push_to_prod
- verify_from_prod
- get_final_hashes
- build_containers
.dind_template: &dind_param
before_script:
- . ./Builds/containers/gitlab-ci/docker_alpine_setup.sh
variables:
docker_driver: overlay2
DOCKER_TLS_CERTDIR: ""
image:
name: artifactory.ops.ripple.com/docker:latest
services:
# workaround for TLS issues - consider going back
# back to unversioned `dind` when issues are resolved
- name: artifactory.ops.ripple.com/docker:stable-dind
alias: docker
tags:
- 4xlarge
.only_primary_template: &only_primary
only:
refs:
- /^(master|release|develop)$/
variables:
- $IS_PRIMARY_REPO == "true"
.smoketest_local_template: &run_local_smoketest
tags:
- xlarge
script:
- . ./Builds/containers/gitlab-ci/smoketest.sh local
.smoketest_repo_template: &run_repo_smoketest
tags:
- xlarge
script:
- . ./Builds/containers/gitlab-ci/smoketest.sh repo
#########################################################################
## ##
## stage: build_packages ##
## ##
## build packages using containers from previous stage. ##
## ##
#########################################################################
rpm_build:
stage: build_packages
<<: *dind_param
artifacts:
paths:
- build/rpm/packages/
script:
- . ./Builds/containers/gitlab-ci/build_package.sh rpm
dpkg_build:
stage: build_packages
<<: *dind_param
artifacts:
paths:
- build/dpkg/packages/
script:
- . ./Builds/containers/gitlab-ci/build_package.sh dpkg
#########################################################################
## ##
## stage: sign_packages ##
## ##
## build packages using containers from previous stage. ##
## ##
#########################################################################
rpm_sign:
stage: sign_packages
dependencies:
- rpm_build
image:
name: artifactory.ops.ripple.com/centos:7
<<: *only_primary
before_script:
- |
# Make sure GnuPG is installed
yum -y install gnupg rpm-sign
# checking GPG signing support
if [ -n "$GPG_KEY_B64" ]; then
echo "$GPG_KEY_B64"| base64 -d | gpg --batch --no-tty --allow-secret-key-import --import -
unset GPG_KEY_B64
export GPG_PASSPHRASE=$(echo $GPG_KEY_PASS_B64 | base64 -di)
unset GPG_KEY_PASS_B64
export GPG_KEYID=$(gpg --with-colon --list-secret-keys | head -n1 | cut -d : -f 5)
else
echo -e "\033[0;31m****** GPG signing disabled ******\033[0m"
exit 1
fi
artifacts:
paths:
- build/rpm/packages/
script:
- ls -alh build/rpm/packages
- . ./Builds/containers/gitlab-ci/sign_package.sh rpm
dpkg_sign:
stage: sign_packages
dependencies:
- dpkg_build
image:
name: artifactory.ops.ripple.com/ubuntu:18.04
<<: *only_primary
before_script:
- |
# make sure we have GnuPG
apt update
apt install -y gpg dpkg-sig
# checking GPG signing support
if [ -n "$GPG_KEY_B64" ]; then
echo "$GPG_KEY_B64"| base64 -d | gpg --batch --no-tty --allow-secret-key-import --import -
unset GPG_KEY_B64
export GPG_PASSPHRASE=$(echo $GPG_KEY_PASS_B64 | base64 -di)
unset GPG_KEY_PASS_B64
export GPG_KEYID=$(gpg --with-colon --list-secret-keys | head -n1 | cut -d : -f 5)
else
echo -e "\033[0;31m****** GPG signing disabled ******\033[0m"
exit 1
fi
artifacts:
paths:
- build/dpkg/packages/
script:
- ls -alh build/dpkg/packages
- . ./Builds/containers/gitlab-ci/sign_package.sh dpkg
#########################################################################
## ##
## stage: smoketest ##
## ##
## install unsigned packages from previous step and run unit tests. ##
## ##
#########################################################################
centos_7_smoketest:
stage: smoketest
dependencies:
- rpm_build
- rpm_sign
image:
name: artifactory.ops.ripple.com/centos:7
<<: *run_local_smoketest
# TODO: Remove "allow_failure" when tests fixed
rocky_8_smoketest:
stage: smoketest
dependencies:
- rpm_build
- rpm_sign
image:
name: rockylinux/rockylinux:8
<<: *run_local_smoketest
allow_failure: true
fedora_34_smoketest:
stage: smoketest
dependencies:
- rpm_build
- rpm_sign
image:
name: artifactory.ops.ripple.com/fedora:34
<<: *run_local_smoketest
allow_failure: true
fedora_35_smoketest:
stage: smoketest
dependencies:
- rpm_build
- rpm_sign
image:
name: artifactory.ops.ripple.com/fedora:35
<<: *run_local_smoketest
allow_failure: true
ubuntu_18_smoketest:
stage: smoketest
dependencies:
- dpkg_build
- dpkg_sign
image:
name: artifactory.ops.ripple.com/ubuntu:18.04
<<: *run_local_smoketest
ubuntu_20_smoketest:
stage: smoketest
dependencies:
- dpkg_build
- dpkg_sign
image:
name: artifactory.ops.ripple.com/ubuntu:20.04
<<: *run_local_smoketest
# TODO: remove "allow_failure" when 22.04 released in 4/2022...
ubuntu_22_smoketest:
stage: smoketest
dependencies:
- dpkg_build
- dpkg_sign
image:
name: artifactory.ops.ripple.com/ubuntu:22.04
<<: *run_local_smoketest
allow_failure: true
debian_9_smoketest:
stage: smoketest
dependencies:
- dpkg_build
- dpkg_sign
image:
name: artifactory.ops.ripple.com/debian:9
<<: *run_local_smoketest
debian_10_smoketest:
stage: smoketest
dependencies:
- dpkg_build
- dpkg_sign
image:
name: artifactory.ops.ripple.com/debian:10
<<: *run_local_smoketest
debian_11_smoketest:
stage: smoketest
dependencies:
- dpkg_build
- dpkg_sign
image:
name: artifactory.ops.ripple.com/debian:11
<<: *run_local_smoketest
#########################################################################
## ##
## stage: verify_sig ##
## ##
## use git/gpg to verify that HEAD is signed by an approved ##
## committer. The whitelist of pubkeys is manually mantained ##
## and fetched from GIT_SIGN_PUBKEYS_URL (currently a snippet ##
## link). ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
verify_head_signed:
stage: verify_sig
image:
name: artifactory.ops.ripple.com/ubuntu:latest
<<: *only_primary
script:
- . ./Builds/containers/gitlab-ci/verify_head_commit.sh
#########################################################################
## ##
## stage: tag_images ##
## ##
## apply rippled version tag to containers from previous stage. ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
tag_bld_images:
stage: tag_images
variables:
docker_driver: overlay2
DOCKER_TLS_CERTDIR: ""
image:
name: artifactory.ops.ripple.com/docker:latest
services:
# workaround for TLS issues - consider going back
# back to unversioned `dind` when issues are resolved
- name: artifactory.ops.ripple.com/docker:stable-dind
alias: docker
tags:
- large
dependencies:
- rpm_sign
- dpkg_sign
<<: *only_primary
script:
- . ./Builds/containers/gitlab-ci/tag_docker_image.sh
#########################################################################
## ##
## stage: push_to_test ##
## ##
## push packages to artifactory repositories (test) ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
push_test:
stage: push_to_test
variables:
DEB_REPO: "rippled-deb-test-mirror"
RPM_REPO: "rippled-rpm-test-mirror"
image:
name: artifactory.ops.ripple.com/alpine:latest
artifacts:
paths:
- files.info
dependencies:
- rpm_sign
- dpkg_sign
<<: *only_primary
script:
- . ./Builds/containers/gitlab-ci/push_to_artifactory.sh "PUT" "."
#########################################################################
## ##
## stage: verify_from_test ##
## ##
## install/test packages from test repos. ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
centos_7_verify_repo_test:
stage: verify_from_test
variables:
RPM_REPO: "rippled-rpm-test-mirror"
image:
name: artifactory.ops.ripple.com/centos:7
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
rocky_8_verify_repo_test:
stage: verify_from_test
variables:
RPM_REPO: "rippled-rpm-test-mirror"
image:
name: rockylinux/rockylinux:8
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
allow_failure: true
fedora_34_verify_repo_test:
stage: verify_from_test
variables:
RPM_REPO: "rippled-rpm-test-mirror"
image:
name: artifactory.ops.ripple.com/fedora:34
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
allow_failure: true
fedora_35_verify_repo_test:
stage: verify_from_test
variables:
RPM_REPO: "rippled-rpm-test-mirror"
image:
name: artifactory.ops.ripple.com/fedora:35
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
allow_failure: true
ubuntu_18_verify_repo_test:
stage: verify_from_test
variables:
DISTRO: "bionic"
DEB_REPO: "rippled-deb-test-mirror"
image:
name: artifactory.ops.ripple.com/ubuntu:18.04
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
ubuntu_20_verify_repo_test:
stage: verify_from_test
variables:
DISTRO: "focal"
DEB_REPO: "rippled-deb-test-mirror"
image:
name: artifactory.ops.ripple.com/ubuntu:20.04
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
# TODO: remove "allow_failure" when 22.04 released in 4/2022...
ubuntu_22_verify_repo_test:
stage: verify_from_test
variables:
DISTRO: "jammy"
DEB_REPO: "rippled-deb-test-mirror"
image:
name: artifactory.ops.ripple.com/ubuntu:22.04
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
allow_failure: true
debian_9_verify_repo_test:
stage: verify_from_test
variables:
DISTRO: "stretch"
DEB_REPO: "rippled-deb-test-mirror"
image:
name: artifactory.ops.ripple.com/debian:9
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
debian_10_verify_repo_test:
stage: verify_from_test
variables:
DISTRO: "buster"
DEB_REPO: "rippled-deb-test-mirror"
image:
name: artifactory.ops.ripple.com/debian:10
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
debian_11_verify_repo_test:
stage: verify_from_test
variables:
DISTRO: "bullseye"
DEB_REPO: "rippled-deb-test-mirror"
image:
name: artifactory.ops.ripple.com/debian:11
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
#########################################################################
## ##
## stage: wait_approval_prod ##
## ##
## wait for manual approval before proceeding to next stage ##
## which pushes to prod repo. ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
wait_before_push_prod:
stage: wait_approval_prod
image:
name: artifactory.ops.ripple.com/alpine:latest
<<: *only_primary
script:
- echo "proceeding to next stage"
when: manual
allow_failure: false
#########################################################################
## ##
## stage: push_to_prod ##
## ##
## push packages to artifactory repositories (prod) ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
push_prod:
variables:
DEB_REPO: "rippled-deb"
RPM_REPO: "rippled-rpm"
image:
name: artifactory.ops.ripple.com/alpine:latest
stage: push_to_prod
artifacts:
paths:
- files.info
dependencies:
- rpm_sign
- dpkg_sign
<<: *only_primary
script:
- . ./Builds/containers/gitlab-ci/push_to_artifactory.sh "PUT" "."
#########################################################################
## ##
## stage: verify_from_prod ##
## ##
## install/test packages from prod repos. ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
centos_7_verify_repo_prod:
stage: verify_from_prod
variables:
RPM_REPO: "rippled-rpm"
image:
name: artifactory.ops.ripple.com/centos:7
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
rocky_8_verify_repo_test:
stage: verify_from_test
variables:
RPM_REPO: "rippled-rpm-test-mirror"
image:
name: rockylinux/rockylinux:8
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
allow_failure: true
fedora_34_verify_repo_prod:
stage: verify_from_prod
variables:
RPM_REPO: "rippled-rpm"
image:
name: artifactory.ops.ripple.com/fedora:34
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
allow_failure: true
fedora_35_verify_repo_prod:
stage: verify_from_prod
variables:
RPM_REPO: "rippled-rpm"
image:
name: artifactory.ops.ripple.com/fedora:35
dependencies:
- rpm_sign
<<: *only_primary
<<: *run_repo_smoketest
allow_failure: true
ubuntu_18_verify_repo_prod:
stage: verify_from_prod
variables:
DISTRO: "bionic"
DEB_REPO: "rippled-deb"
image:
name: artifactory.ops.ripple.com/ubuntu:18.04
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
ubuntu_20_verify_repo_prod:
stage: verify_from_prod
variables:
DISTRO: "focal"
DEB_REPO: "rippled-deb"
image:
name: artifactory.ops.ripple.com/ubuntu:20.04
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
# TODO: remove "allow_failure" when 22.04 released in 4/2022...
ubuntu_22_verify_repo_prod:
stage: verify_from_prod
variables:
DISTRO: "jammy"
DEB_REPO: "rippled-deb"
image:
name: artifactory.ops.ripple.com/ubuntu:22.04
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
allow_failure: true
debian_9_verify_repo_prod:
stage: verify_from_prod
variables:
DISTRO: "stretch"
DEB_REPO: "rippled-deb"
image:
name: artifactory.ops.ripple.com/debian:9
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
debian_10_verify_repo_prod:
stage: verify_from_prod
variables:
DISTRO: "buster"
DEB_REPO: "rippled-deb"
image:
name: artifactory.ops.ripple.com/debian:10
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
debian_11_verify_repo_prod:
stage: verify_from_prod
variables:
DISTRO: "bullseye"
DEB_REPO: "rippled-deb"
image:
name: artifactory.ops.ripple.com/debian:11
dependencies:
- dpkg_sign
<<: *only_primary
<<: *run_repo_smoketest
#########################################################################
## ##
## stage: get_final_hashes ##
## ##
## fetch final hashes from artifactory. ##
## ONLY RUNS FOR PRIMARY BRANCHES/REPO ##
## ##
#########################################################################
get_prod_hashes:
variables:
DEB_REPO: "rippled-deb"
RPM_REPO: "rippled-rpm"
image:
name: artifactory.ops.ripple.com/alpine:latest
stage: get_final_hashes
artifacts:
paths:
- files.info
dependencies:
- rpm_sign
- dpkg_sign
<<: *only_primary
script:
- . ./Builds/containers/gitlab-ci/push_to_artifactory.sh "GET" ".checksums"
#########################################################################
## ##
## stage: build_containers ##
## ##
## build containers from docker definitions. These containers are NOT ##
## used for the package build. This step is only used to ensure that ##
## the package build targets and files are still working properly. ##
## ##
#########################################################################
build_centos_container:
stage: build_containers
<<: *dind_param
script:
- . ./Builds/containers/gitlab-ci/build_container.sh rpm
allow_failure: true
build_ubuntu_container:
stage: build_containers
<<: *dind_param
script:
- . ./Builds/containers/gitlab-ci/build_container.sh dpkg
allow_failure: true

View File

@@ -0,0 +1,93 @@
#!/usr/bin/env sh
set -e
action=$1
filter=$2
. ./Builds/containers/gitlab-ci/get_component.sh
apk add curl jq coreutils util-linux
TOPDIR=$(pwd)
# DPKG
cd $TOPDIR
cd build/dpkg/packages
CURLARGS="-sk -X${action} -urippled:${ARTIFACTORY_DEPLOY_KEY_RIPPLED}"
RIPPLED_PKG=$(ls rippled_*.deb)
RIPPLED_DEV_PKG=$(ls rippled-dev_*.deb)
RIPPLED_REPORTING_PKG=$(ls rippled-reporting_*.deb)
RIPPLED_DBG_PKG=$(ls rippled-dbgsym_*.deb)
RIPPLED_REPORTING_DBG_PKG=$(ls rippled-reporting-dbgsym_*.deb)
# TODO - where to upload src tgz?
RIPPLED_SRC=$(ls rippled_*.orig.tar.gz)
DEB_MATRIX=";deb.component=${COMPONENT};deb.architecture=amd64"
for dist in stretch buster bullseye bionic focal jammy; do
DEB_MATRIX="${DEB_MATRIX};deb.distribution=${dist}"
done
echo "{ \"debs\": {" > "${TOPDIR}/files.info"
for deb in ${RIPPLED_PKG} ${RIPPLED_DEV_PKG} ${RIPPLED_DBG_PKG} ${RIPPLED_REPORTING_PKG} ${RIPPLED_REPORTING_DBG_PKG}; do
# first item doesn't get a comma separator
if [ $deb != $RIPPLED_PKG ] ; then
echo "," >> "${TOPDIR}/files.info"
fi
echo "\"${deb}\"": | tee -a "${TOPDIR}/files.info"
ca="${CURLARGS}"
if [ "${action}" = "PUT" ] ; then
url="https://${ARTIFACTORY_HOST}/artifactory/${DEB_REPO}/pool/${COMPONENT}/${deb}${DEB_MATRIX}"
ca="${ca} -T${deb}"
elif [ "${action}" = "GET" ] ; then
url="https://${ARTIFACTORY_HOST}/artifactory/api/storage/${DEB_REPO}/pool/${COMPONENT}/${deb}"
fi
echo "file info request url --> ${url}"
eval "curl ${ca} \"${url}\"" | jq -M "${filter}" | tee -a "${TOPDIR}/files.info"
done
echo "}," >> "${TOPDIR}/files.info"
# RPM
cd $TOPDIR
cd build/rpm/packages
RIPPLED_PKG=$(ls rippled-[0-9]*.x86_64.rpm)
RIPPLED_DEV_PKG=$(ls rippled-devel*.rpm)
RIPPLED_DBG_PKG=$(ls rippled-debuginfo*.rpm)
RIPPLED_REPORTING_PKG=$(ls rippled-reporting*.rpm)
# TODO - where to upload src rpm ?
RIPPLED_SRC=$(ls rippled-[0-9]*.src.rpm)
echo "\"rpms\": {" >> "${TOPDIR}/files.info"
for rpm in ${RIPPLED_PKG} ${RIPPLED_DEV_PKG} ${RIPPLED_DBG_PKG} ${RIPPLED_REPORTING_PKG}; do
# first item doesn't get a comma separator
if [ $rpm != $RIPPLED_PKG ] ; then
echo "," >> "${TOPDIR}/files.info"
fi
echo "\"${rpm}\"": | tee -a "${TOPDIR}/files.info"
ca="${CURLARGS}"
if [ "${action}" = "PUT" ] ; then
url="https://${ARTIFACTORY_HOST}/artifactory/${RPM_REPO}/${COMPONENT}/"
ca="${ca} -T${rpm}"
elif [ "${action}" = "GET" ] ; then
url="https://${ARTIFACTORY_HOST}/artifactory/api/storage/${RPM_REPO}/${COMPONENT}/${rpm}"
fi
echo "file info request url --> ${url}"
eval "curl ${ca} \"${url}\"" | jq -M "${filter}" | tee -a "${TOPDIR}/files.info"
done
echo "}}" >> "${TOPDIR}/files.info"
jq '.' "${TOPDIR}/files.info" > "${TOPDIR}/files.info.tmp"
mv "${TOPDIR}/files.info.tmp" "${TOPDIR}/files.info"
if [ ! -z "${SLACK_NOTIFY_URL}" ] && [ "${action}" = "GET" ] ; then
# extract files.info content to variable and sanitize so it can
# be interpolated into a slack text field below
finfo=$(cat ${TOPDIR}/files.info | sed -e ':a' -e 'N' -e '$!ba' -e 's/\n/\\n/g' | sed -E 's/"/\\"/g')
# try posting file info to slack.
# can add channel field to payload if the
# default channel is incorrect. Get rid of
# newlines in payload json since slack doesn't accept them
CONTENT=$(tr -d '[\n]' <<JSON
payload={
"username": "GitlabCI",
"text": "The package build for branch \`${CI_COMMIT_REF_NAME}\` is complete. File hashes are: \`\`\`${finfo}\`\`\`",
"icon_emoji": ":package:"}
JSON
)
curl ${SLACK_NOTIFY_URL} --data-urlencode "${CONTENT}"
fi

View File

@@ -0,0 +1,38 @@
#!/usr/bin/env bash
set -eo pipefail
sign_dpkg() {
if [ -n "${GPG_KEYID}" ]; then
dpkg-sig \
-g "--no-tty --digest-algo 'sha512' --passphrase '${GPG_PASSPHRASE}' --pinentry-mode=loopback" \
-k "${GPG_KEYID}" \
--sign builder \
"build/dpkg/packages/*.deb"
fi
}
sign_rpm() {
if [ -n "${GPG_KEYID}" ] ; then
find build/rpm/packages -name "*.rpm" -exec bash -c '
echo "yes" | setsid rpm \
--define "_gpg_name ${GPG_KEYID}" \
--define "_signature gpg" \
--define "__gpg_check_password_cmd /bin/true" \
--define "__gpg_sign_cmd %{__gpg} gpg --batch --no-armor --digest-algo 'sha512' --passphrase '${GPG_PASSPHRASE}' --no-secmem-warning -u '%{_gpg_name}' --sign --detach-sign --output %{__signature_filename} %{__plaintext_filename}" \
--addsign '{} \;
fi
}
case "${1}" in
dpkg)
sign_dpkg
;;
rpm)
sign_rpm
;;
*)
echo "Usage: ${0} (dpkg|rpm)"
;;
esac

View File

@@ -0,0 +1,106 @@
#!/usr/bin/env sh
set -e
install_from=$1
use_private=${2:-0} # this option not currently needed by any CI scripts,
# reserved for possible future use
if [ "$use_private" -gt 0 ] ; then
REPO_ROOT="https://rippled:${ARTIFACTORY_DEPLOY_KEY_RIPPLED}@${ARTIFACTORY_HOST}/artifactory"
else
REPO_ROOT="${PUBLIC_REPO_ROOT}"
fi
. ./Builds/containers/gitlab-ci/get_component.sh
. /etc/os-release
case ${ID} in
ubuntu|debian)
pkgtype="dpkg"
;;
fedora|centos|rhel|scientific|rocky)
pkgtype="rpm"
;;
*)
echo "unrecognized distro!"
exit 1
;;
esac
# this script provides info variables about pkg version
. build/${pkgtype}/packages/build_vars
if [ "${pkgtype}" = "dpkg" ] ; then
# sometimes update fails and requires a cleanup
updateWithRetry()
{
if ! apt-get -y update ; then
rm -rvf /var/lib/apt/lists/*
apt-get -y clean
apt-get -y update
fi
}
if [ "${install_from}" = "repo" ] ; then
apt-get -y upgrade
updateWithRetry
apt-get -y install apt apt-transport-https ca-certificates coreutils util-linux wget gnupg
wget -q -O - "${REPO_ROOT}/api/gpg/key/public" | apt-key add -
echo "deb ${REPO_ROOT}/${DEB_REPO} ${DISTRO} ${COMPONENT}" >> /etc/apt/sources.list
updateWithRetry
# uncomment this next line if you want to see the available package versions
# apt-cache policy rippled
apt-get -y install rippled=${dpkg_full_version}
elif [ "${install_from}" = "local" ] ; then
# cached pkg install
updateWithRetry
apt-get -y install libprotobuf-dev libprotoc-dev protobuf-compiler libssl-dev
rm -f build/dpkg/packages/rippled-dbgsym*.*
dpkg --no-debsig -i build/dpkg/packages/*.deb
else
echo "unrecognized pkg source!"
exit 1
fi
else
yum -y update
if [ "${install_from}" = "repo" ] ; then
pkgs=("yum-utils coreutils util-linux")
if [ "$ID" = "rocky" ]; then
pkgs="${pkgs[@]/coreutils}"
fi
yum install -y $pkgs
REPOFILE="/etc/yum.repos.d/artifactory.repo"
echo "[Artifactory]" > ${REPOFILE}
echo "name=Artifactory" >> ${REPOFILE}
echo "baseurl=${REPO_ROOT}/${RPM_REPO}/${COMPONENT}/" >> ${REPOFILE}
echo "enabled=1" >> ${REPOFILE}
echo "gpgcheck=0" >> ${REPOFILE}
echo "gpgkey=${REPO_ROOT}/${RPM_REPO}/${COMPONENT}/repodata/repomd.xml.key" >> ${REPOFILE}
echo "repo_gpgcheck=1" >> ${REPOFILE}
yum -y update
# uncomment this next line if you want to see the available package versions
# yum --showduplicates list rippled
yum -y install ${rpm_version_release}
elif [ "${install_from}" = "local" ] ; then
# cached pkg install
pkgs=("yum-utils openssl-static zlib-static")
if [ "$ID" = "rocky" ]; then
sed -i 's/enabled=0/enabled=1/g' /etc/yum.repos.d/Rocky-PowerTools.repo
pkgs="${pkgs[@]/openssl-static}"
fi
yum install -y $pkgs
rm -f build/rpm/packages/rippled-debug*.rpm
rm -f build/rpm/packages/*.src.rpm
rpm -i build/rpm/packages/*.rpm
else
echo "unrecognized pkg source!"
exit 1
fi
fi
# verify installed version
INSTALLED=$(/opt/ripple/bin/rippled --version | awk '{print $NF}')
if [ "${rippled_version}" != "${INSTALLED}" ] ; then
echo "INSTALLED version ${INSTALLED} does not match ${rippled_version}"
exit 1
fi
# run unit tests
/opt/ripple/bin/rippled --unittest --unittest-jobs $(nproc)
/opt/ripple/bin/validator-keys --unittest

View File

@@ -0,0 +1,21 @@
#!/usr/bin/env sh
set -e
docker login -u rippled \
-p ${ARTIFACTORY_DEPLOY_KEY_RIPPLED} "${ARTIFACTORY_HUB}"
# this gives us rippled_version :
source build/rpm/packages/build_vars
docker pull "${ARTIFACTORY_HUB}/${RPM_CONTAINER_FULLNAME}"
docker pull "${ARTIFACTORY_HUB}/${DPKG_CONTAINER_FULLNAME}"
# tag/push two labels...one using the current rippled version and one just using "latest"
for label in ${rippled_version} latest ; do
docker tag \
"${ARTIFACTORY_HUB}/${RPM_CONTAINER_FULLNAME}" \
"${ARTIFACTORY_HUB}/${RPM_CONTAINER_NAME}:${label}_${CI_COMMIT_REF_SLUG}"
docker push \
"${ARTIFACTORY_HUB}/${RPM_CONTAINER_NAME}:${label}_${CI_COMMIT_REF_SLUG}"
docker tag \
"${ARTIFACTORY_HUB}/${DPKG_CONTAINER_FULLNAME}" \
"${ARTIFACTORY_HUB}/${DPKG_CONTAINER_NAME}:${label}_${CI_COMMIT_REF_SLUG}"
docker push \
"${ARTIFACTORY_HUB}/${DPKG_CONTAINER_NAME}:${label}_${CI_COMMIT_REF_SLUG}"
done

View File

@@ -0,0 +1,17 @@
#!/usr/bin/env sh
set -ex
apt -y update
DEBIAN_FRONTEND="noninteractive" apt-get -y install tzdata
apt -y install software-properties-common curl git gnupg
curl -sk -o rippled-pubkeys.txt "${GIT_SIGN_PUBKEYS_URL}"
gpg --import rippled-pubkeys.txt
if git verify-commit HEAD; then
echo "git commit signature check passed"
else
echo "git commit signature check failed"
git log -n 5 --color \
--pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an> [%G?]%Creset' \
--abbrev-commit
exit 1
fi

View File

@@ -0,0 +1,101 @@
#!/usr/bin/env bash
set -ex
# make sure pkg source files are up to date with repo
cd /opt/rippled_bld/pkg
cp -fpru rippled/Builds/containers/packaging/dpkg/debian/. debian/
cp -fpu rippled/Builds/containers/shared/rippled*.service debian/
cp -fpu rippled/Builds/containers/shared/update_sources.sh .
source update_sources.sh
# Build the dpkg
#dpkg uses - as separator, so we need to change our -bN versions to tilde
RIPPLED_DPKG_VERSION=$(echo "${RIPPLED_VERSION}" | sed 's!-!~!g')
# TODO - decide how to handle the trailing/release
# version here (hardcoded to 1). Does it ever need to change?
RIPPLED_DPKG_FULL_VERSION="${RIPPLED_DPKG_VERSION}-1"
cd /opt/rippled_bld/pkg/rippled
if [[ -n $(git status --porcelain) ]]; then
git status
error "Unstaged changes in this repo - please commit first"
fi
git archive --format tar.gz --prefix rippled-${RIPPLED_DPKG_VERSION}/ -o ../rippled-${RIPPLED_DPKG_VERSION}.tar.gz HEAD
cd ..
# dpkg debmake would normally create this link, but we do it manually
ln -s ./rippled-${RIPPLED_DPKG_VERSION}.tar.gz rippled_${RIPPLED_DPKG_VERSION}.orig.tar.gz
tar xvf rippled-${RIPPLED_DPKG_VERSION}.tar.gz
cd rippled-${RIPPLED_DPKG_VERSION}
cp -pr ../debian .
# dpkg requires a changelog. We don't currently maintain
# a useable one, so let's just fake it with our current version
# TODO : not sure if the "unstable" will need to change for
# release packages (?)
NOWSTR=$(TZ=UTC date -R)
cat << CHANGELOG > ./debian/changelog
rippled (${RIPPLED_DPKG_FULL_VERSION}) unstable; urgency=low
* see RELEASENOTES
-- Ripple Labs Inc. <support@ripple.com> ${NOWSTR}
CHANGELOG
# PATH must be preserved for our more modern cmake in /opt/local
# TODO : consider allowing lintian to run in future ?
export DH_BUILD_DDEBS=1
export CC=gcc-8
export CXX=g++-8
debuild --no-lintian --preserve-envvar PATH --preserve-env -us -uc
rc=$?; if [[ $rc != 0 ]]; then
error "error building dpkg"
fi
cd ..
# copy artifacts
cp rippled-dev_${RIPPLED_DPKG_FULL_VERSION}_amd64.deb ${PKG_OUTDIR}
cp rippled-reporting_${RIPPLED_DPKG_FULL_VERSION}_amd64.deb ${PKG_OUTDIR}
cp rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.deb ${PKG_OUTDIR}
cp rippled_${RIPPLED_DPKG_FULL_VERSION}.dsc ${PKG_OUTDIR}
# dbgsym suffix is ddeb under newer debuild, but just deb under earlier
cp rippled-dbgsym_${RIPPLED_DPKG_FULL_VERSION}_amd64.* ${PKG_OUTDIR}
cp rippled-reporting-dbgsym_${RIPPLED_DPKG_FULL_VERSION}_amd64.* ${PKG_OUTDIR}
cp rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.changes ${PKG_OUTDIR}
cp rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.build ${PKG_OUTDIR}
cp rippled_${RIPPLED_DPKG_VERSION}.orig.tar.gz ${PKG_OUTDIR}
cp rippled_${RIPPLED_DPKG_FULL_VERSION}.debian.tar.xz ${PKG_OUTDIR}
# buildinfo is only generated by later version of debuild
if [ -e rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.buildinfo ] ; then
cp rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.buildinfo ${PKG_OUTDIR}
fi
cat rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.changes
# extract the text in the .changes file that appears between
# Checksums-Sha256: ...
# and
# Files: ...
awk '/Checksums-Sha256:/{hit=1;next}/Files:/{hit=0}hit' \
rippled_${RIPPLED_DPKG_VERSION}-1_amd64.changes | \
sed -E 's!^[[:space:]]+!!' > shasums
DEB_SHA256=$(cat shasums | \
grep "rippled_${RIPPLED_DPKG_VERSION}-1_amd64.deb" | cut -d " " -f 1)
DBG_SHA256=$(cat shasums | \
grep "rippled-dbgsym_${RIPPLED_DPKG_VERSION}-1_amd64.*" | cut -d " " -f 1)
REPORTING_DBG_SHA256=$(cat shasums | \
grep "rippled-reporting-dbgsym_${RIPPLED_DPKG_VERSION}-1_amd64.*" | cut -d " " -f 1)
DEV_SHA256=$(cat shasums | \
grep "rippled-dev_${RIPPLED_DPKG_VERSION}-1_amd64.deb" | cut -d " " -f 1)
REPORTING_SHA256=$(cat shasums | \
grep "rippled-reporting_${RIPPLED_DPKG_VERSION}-1_amd64.deb" | cut -d " " -f 1)
SRC_SHA256=$(cat shasums | \
grep "rippled_${RIPPLED_DPKG_VERSION}.orig.tar.gz" | cut -d " " -f 1)
echo "deb_sha256=${DEB_SHA256}" >> ${PKG_OUTDIR}/build_vars
echo "dbg_sha256=${DBG_SHA256}" >> ${PKG_OUTDIR}/build_vars
echo "dev_sha256=${DEV_SHA256}" >> ${PKG_OUTDIR}/build_vars
echo "reporting_sha256=${REPORTING_SHA256}" >> ${PKG_OUTDIR}/build_vars
echo "reporting_dbg_sha256=${REPORTING_DBG_SHA256}" >> ${PKG_OUTDIR}/build_vars
echo "src_sha256=${SRC_SHA256}" >> ${PKG_OUTDIR}/build_vars
echo "rippled_version=${RIPPLED_VERSION}" >> ${PKG_OUTDIR}/build_vars
echo "dpkg_version=${RIPPLED_DPKG_VERSION}" >> ${PKG_OUTDIR}/build_vars
echo "dpkg_full_version=${RIPPLED_DPKG_FULL_VERSION}" >> ${PKG_OUTDIR}/build_vars

View File

@@ -0,0 +1,3 @@
rippled daemon
-- Mike Ellery <mellery451@gmail.com> Tue, 04 Dec 2018 18:19:03 +0000

View File

@@ -0,0 +1 @@
10

View File

@@ -0,0 +1,27 @@
Source: rippled
Section: misc
Priority: extra
Maintainer: Ripple Labs Inc. <support@ripple.com>
Build-Depends: cmake, debhelper (>=9), zlib1g-dev, dh-systemd, ninja-build
Standards-Version: 3.9.7
Homepage: http://ripple.com/
Package: rippled
Architecture: any
Multi-Arch: foreign
Depends: ${misc:Depends}, ${shlibs:Depends}
Description: rippled daemon
Package: rippled-reporting
Architecture: any
Multi-Arch: foreign
Depends: ${misc:Depends}, ${shlibs:Depends}
Description: rippled reporting daemon
Package: rippled-dev
Section: devel
Recommends: rippled (= ${binary:Version})
Architecture: any
Multi-Arch: same
Depends: ${misc:Depends}, ${shlibs:Depends}, libprotobuf-dev, libprotoc-dev, protobuf-compiler
Description: development files for applications using xrpl core library (serialize + sign)

View File

@@ -0,0 +1,86 @@
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Upstream-Name: rippled
Source: https://github.com/ripple/rippled
Files: *
Copyright: 2012-2019 Ripple Labs Inc.
License: __UNKNOWN__
The accompanying files under various copyrights.
Copyright (c) 2012, 2013, 2014 Ripple Labs Inc.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
The accompanying files incorporate work covered by the following copyright
and previous license notice:
Copyright (c) 2011 Arthur Britto, David Schwartz, Jed McCaleb,
Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant
Some code from Raw Material Software, Ltd., provided under the terms of the
ISC License. See the corresponding source files for more details.
Copyright (c) 2013 - Raw Material Software Ltd.
Please visit http://www.juce.com
Some code from ASIO examples:
// Copyright (c) 2003-2011 Christopher M. Kohlhoff (chris at kohlhoff dot com)
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
Some code from Bitcoin:
// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2011 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file license.txt or http://www.opensource.org/licenses/mit-license.php.
Some code from Tom Wu:
This software is covered under the following copyright:
/*
* Copyright (c) 2003-2005 Tom Wu
* All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND,
* EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY
* WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
*
* IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
* INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER
* RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF
* THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT
* OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
* In addition, the following condition applies:
*
* All redistributions must retain an intact copy of this copyright notice
* and disclaimer.
*/
Address all questions regarding this license to:
Tom Wu
tjw@cs.Stanford.EDU

View File

@@ -0,0 +1,3 @@
/var/log/rippled/
/var/lib/rippled/
/etc/systemd/system/rippled.service.d/

View File

@@ -0,0 +1,3 @@
README.md
LICENSE.md
RELEASENOTES.md

View File

@@ -0,0 +1,3 @@
opt/ripple/include
opt/ripple/lib/*.a
opt/ripple/lib/cmake/ripple

View File

@@ -0,0 +1,3 @@
/var/log/rippled-reporting/
/var/lib/rippled-reporting/
/etc/systemd/system/rippled-reporting.service.d/

View File

@@ -0,0 +1,8 @@
bld/rippled-reporting/rippled-reporting opt/rippled-reporting/bin
cfg/rippled-reporting.cfg opt/rippled-reporting/etc
debian/tmp/opt/rippled-reporting/etc/validators.txt opt/rippled-reporting/etc
opt/rippled-reporting/bin/update-rippled-reporting.sh
opt/rippled-reporting/bin/getRippledReportingInfo
opt/rippled-reporting/etc/update-rippled-reporting-cron
etc/logrotate.d/rippled-reporting

View File

@@ -0,0 +1,3 @@
opt/rippled-reporting/etc/rippled-reporting.cfg etc/opt/rippled-reporting/rippled-reporting.cfg
opt/rippled-reporting/etc/validators.txt etc/opt/rippled-reporting/validators.txt
opt/rippled-reporting/bin/rippled-reporting usr/local/bin/rippled-reporting

View File

@@ -0,0 +1,33 @@
#!/bin/sh
set -e
USER_NAME=rippled-reporting
GROUP_NAME=rippled-reporting
case "$1" in
configure)
id -u $USER_NAME >/dev/null 2>&1 || \
adduser --system --quiet \
--home /nonexistent --no-create-home \
--disabled-password \
--group "$GROUP_NAME"
chown -R $USER_NAME:$GROUP_NAME /var/log/rippled-reporting/
chown -R $USER_NAME:$GROUP_NAME /var/lib/rippled-reporting/
chmod 755 /var/log/rippled-reporting/
chmod 755 /var/lib/rippled-reporting/
chown -R $USER_NAME:$GROUP_NAME /opt/rippled-reporting
;;
abort-upgrade|abort-remove|abort-deconfigure)
;;
*)
echo "postinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER#
exit 0

View File

@@ -0,0 +1,2 @@
/opt/ripple/etc/rippled.cfg
/opt/ripple/etc/validators.txt

View File

@@ -0,0 +1,8 @@
opt/ripple/bin/rippled
opt/ripple/bin/validator-keys
opt/ripple/bin/update-rippled.sh
opt/ripple/bin/getRippledInfo
opt/ripple/etc/rippled.cfg
opt/ripple/etc/validators.txt
opt/ripple/etc/update-rippled-cron
etc/logrotate.d/rippled

View File

@@ -0,0 +1,3 @@
opt/ripple/etc/rippled.cfg etc/opt/ripple/rippled.cfg
opt/ripple/etc/validators.txt etc/opt/ripple/validators.txt
opt/ripple/bin/rippled usr/local/bin/rippled

View File

@@ -0,0 +1,35 @@
#!/bin/sh
set -e
USER_NAME=rippled
GROUP_NAME=rippled
case "$1" in
configure)
id -u $USER_NAME >/dev/null 2>&1 || \
adduser --system --quiet \
--home /nonexistent --no-create-home \
--disabled-password \
--group "$GROUP_NAME"
chown -R $USER_NAME:$GROUP_NAME /var/log/rippled/
chown -R $USER_NAME:$GROUP_NAME /var/lib/rippled/
chown -R $USER_NAME:$GROUP_NAME /opt/ripple
chmod 755 /var/log/rippled/
chmod 755 /var/lib/rippled/
chmod 644 /opt/ripple/etc/update-rippled-cron
chmod 644 /etc/logrotate.d/rippled
chown -R root:$GROUP_NAME /opt/ripple/etc/update-rippled-cron
;;
abort-upgrade|abort-remove|abort-deconfigure)
;;
*)
echo "postinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER#
exit 0

View File

@@ -0,0 +1,17 @@
#!/bin/sh
set -e
case "$1" in
purge|remove|upgrade|failed-upgrade|abort-install|abort-upgrade|disappear)
;;
*)
echo "postrm called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER#
exit 0

View File

@@ -0,0 +1,20 @@
#!/bin/sh
set -e
case "$1" in
install|upgrade)
;;
abort-upgrade)
;;
*)
echo "preinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER#
exit 0

View File

@@ -0,0 +1,20 @@
#!/bin/sh
set -e
case "$1" in
remove|upgrade|deconfigure)
;;
failed-upgrade)
;;
*)
echo "prerm called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER#
exit 0

View File

@@ -0,0 +1,61 @@
#!/usr/bin/make -f
export DH_VERBOSE = 1
export DH_OPTIONS = -v
# debuild sets some warnings that don't work well
# for our curent build..so try to remove those flags here:
export CFLAGS:=$(subst -Wformat,,$(CFLAGS))
export CFLAGS:=$(subst -Werror=format-security,,$(CFLAGS))
export CXXFLAGS:=$(subst -Wformat,,$(CXXFLAGS))
export CXXFLAGS:=$(subst -Werror=format-security,,$(CXXFLAGS))
%:
dh $@ --with systemd
override_dh_systemd_start:
dh_systemd_start --no-restart-on-upgrade
override_dh_auto_configure:
env
rm -rf bld && mkdir -p bld/rippled
cd bld/rippled && \
cmake ../.. -G Ninja \
-DCMAKE_INSTALL_PREFIX=/opt/ripple \
-DCMAKE_BUILD_TYPE=Release \
-Dstatic=ON \
-Dunity=OFF \
-Dvalidator_keys=ON \
-Dunity=OFF \
-DCMAKE_VERBOSE_MAKEFILE=OFF
cmake -S . \
-B bld/rippled-reporting \
-G Ninja \
-DCMAKE_INSTALL_PREFIX=/opt/rippled-reporting \
-DCMAKE_BUILD_TYPE=Release \
-Dstatic=ON \
-Dunity=OFF \
-DCMAKE_VERBOSE_MAKEFILE=OFF \
-Dreporting=ON
override_dh_auto_build:
cmake --build bld/rippled --target rippled --target validator-keys --parallel
cmake --build bld/rippled-reporting --target rippled --parallel
override_dh_auto_install:
cmake --install bld/rippled --prefix debian/tmp/opt/ripple
install -D bld/rippled/validator-keys/validator-keys debian/tmp/opt/ripple/bin/validator-keys
install -D Builds/containers/shared/update-rippled.sh debian/tmp/opt/ripple/bin/update-rippled.sh
install -D bin/getRippledInfo debian/tmp/opt/ripple/bin/getRippledInfo
install -D Builds/containers/shared/update-rippled-cron debian/tmp/opt/ripple/etc/update-rippled-cron
install -D Builds/containers/shared/rippled-logrotate debian/tmp/etc/logrotate.d/rippled
rm -rf debian/tmp/opt/ripple/lib64/cmake/date
mkdir -p debian/tmp/opt/rippled-reporting/etc
cp cfg/validators-example.txt debian/tmp/opt/rippled-reporting/etc/validators.txt
install -D bld/rippled/validator-keys/validator-keys debian/tmp/opt/rippled-reporting/bin/validator-keys
sed -E 's/rippled?/rippled-reporting/g' Builds/containers/shared/update-rippled.sh > debian/tmp/opt/rippled-reporting/bin/update-rippled-reporting.sh
sed -E 's/rippled?/rippled-reporting/g' bin/getRippledInfo > debian/tmp/opt/rippled-reporting/bin/getRippledReportingInfo
sed -E 's/rippled?/rippled-reporting/g' Builds/containers/shared/update-rippled-cron > debian/tmp/opt/rippled-reporting/etc/update-rippled-reporting-cron
sed -E 's/rippled?/rippled-reporting/g' Builds/containers/shared/rippled-logrotate > debian/tmp/etc/logrotate.d/rippled-reporting

View File

@@ -0,0 +1 @@
3.0 (quilt)

View File

@@ -0,0 +1,2 @@
#abort-on-upstream-changes
#unapply-patches

View File

@@ -0,0 +1 @@
enable rippled-reporting.service

View File

@@ -0,0 +1 @@
enable rippled.service

View File

@@ -0,0 +1,79 @@
#!/usr/bin/env bash
set -ex
cd /opt/rippled_bld/pkg
cp -fpu rippled/Builds/containers/packaging/rpm/rippled.spec .
cp -fpu rippled/Builds/containers/shared/update_sources.sh .
source update_sources.sh
# Build the rpm
IFS='-' read -r RIPPLED_RPM_VERSION RELEASE <<< "$RIPPLED_VERSION"
export RIPPLED_RPM_VERSION
RPM_RELEASE=${RPM_RELEASE-1}
# post-release version
if [ "hf" = "$(echo "$RELEASE" | cut -c -2)" ]; then
RPM_RELEASE="${RPM_RELEASE}.${RELEASE}"
# pre-release version (-b or -rc)
elif [[ $RELEASE ]]; then
RPM_RELEASE="0.${RPM_RELEASE}.${RELEASE}"
fi
export RPM_RELEASE
if [[ $RPM_PATCH ]]; then
RPM_PATCH=".${RPM_PATCH}"
export RPM_PATCH
fi
cd /opt/rippled_bld/pkg/rippled
if [[ -n $(git status --porcelain) ]]; then
git status
error "Unstaged changes in this repo - please commit first"
fi
git archive --format tar.gz --prefix rippled/ -o ../rpmbuild/SOURCES/rippled.tar.gz HEAD
# TODO include validator-keys sources
cd ..
source /opt/rh/devtoolset-8/enable
rpmbuild --define "_topdir ${PWD}/rpmbuild" -ba rippled.spec
rc=$?; if [[ $rc != 0 ]]; then
error "error building rpm"
fi
# Make a tar of the rpm and source rpm
RPM_VERSION_RELEASE=$(rpm -qp --qf='%{NAME}-%{VERSION}-%{RELEASE}' ./rpmbuild/RPMS/x86_64/rippled-[0-9]*.rpm)
tar_file=$RPM_VERSION_RELEASE.tar.gz
cp ./rpmbuild/RPMS/x86_64/* ${PKG_OUTDIR}
cp ./rpmbuild/SRPMS/* ${PKG_OUTDIR}
RPM_MD5SUM=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/RPMS/x86_64/rippled-[0-9]*.rpm 2>/dev/null)
DBG_MD5SUM=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/RPMS/x86_64/rippled-debuginfo*.rpm 2>/dev/null)
DEV_MD5SUM=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/RPMS/x86_64/rippled-devel*.rpm 2>/dev/null)
REP_MD5SUM=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/RPMS/x86_64/rippled-reporting*.rpm 2>/dev/null)
SRC_MD5SUM=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/SRPMS/*.rpm 2>/dev/null)
RPM_SHA256="$(sha256sum ./rpmbuild/RPMS/x86_64/rippled-[0-9]*.rpm | awk '{ print $1}')"
DBG_SHA256="$(sha256sum ./rpmbuild/RPMS/x86_64/rippled-debuginfo*.rpm | awk '{ print $1}')"
REP_SHA256="$(sha256sum ./rpmbuild/RPMS/x86_64/rippled-reporting*.rpm | awk '{ print $1}')"
DEV_SHA256="$(sha256sum ./rpmbuild/RPMS/x86_64/rippled-devel*.rpm | awk '{ print $1}')"
SRC_SHA256="$(sha256sum ./rpmbuild/SRPMS/*.rpm | awk '{ print $1}')"
echo "rpm_md5sum=$RPM_MD5SUM" > ${PKG_OUTDIR}/build_vars
echo "rep_md5sum=$REP_MD5SUM" >> ${PKG_OUTDIR}/build_vars
echo "dbg_md5sum=$DBG_MD5SUM" >> ${PKG_OUTDIR}/build_vars
echo "dev_md5sum=$DEV_MD5SUM" >> ${PKG_OUTDIR}/build_vars
echo "src_md5sum=$SRC_MD5SUM" >> ${PKG_OUTDIR}/build_vars
echo "rpm_sha256=$RPM_SHA256" >> ${PKG_OUTDIR}/build_vars
echo "rep_sha256=$REP_SHA256" >> ${PKG_OUTDIR}/build_vars
echo "dbg_sha256=$DBG_SHA256" >> ${PKG_OUTDIR}/build_vars
echo "dev_sha256=$DEV_SHA256" >> ${PKG_OUTDIR}/build_vars
echo "src_sha256=$SRC_SHA256" >> ${PKG_OUTDIR}/build_vars
echo "rippled_version=$RIPPLED_VERSION" >> ${PKG_OUTDIR}/build_vars
echo "rpm_version=$RIPPLED_RPM_VERSION" >> ${PKG_OUTDIR}/build_vars
echo "rpm_file_name=$tar_file" >> ${PKG_OUTDIR}/build_vars
echo "rpm_version_release=$RPM_VERSION_RELEASE" >> ${PKG_OUTDIR}/build_vars

View File

@@ -0,0 +1,182 @@
%define rippled_version %(echo $RIPPLED_RPM_VERSION)
%define rpm_release %(echo $RPM_RELEASE)
%define rpm_patch %(echo $RPM_PATCH)
%define _prefix /opt/ripple
Name: rippled
# Dashes in Version extensions must be converted to underscores
Version: %{rippled_version}
Release: %{rpm_release}%{?dist}%{rpm_patch}
Summary: rippled daemon
License: MIT
URL: http://ripple.com/
Source0: rippled.tar.gz
BuildRequires: cmake zlib-static ninja-build
%description
rippled
%package devel
Summary: Files for development of applications using xrpl core library
Group: Development/Libraries
Requires: zlib-static
%description devel
core library for development of standalone applications that sign transactions.
%package reporting
Summary: Reporting Server for rippled
%description reporting
History server for XRP Ledger
%prep
%setup -c -n rippled
%build
cd rippled
mkdir -p bld.rippled
pushd bld.rippled
cmake .. -G Ninja -DCMAKE_INSTALL_PREFIX=%{_prefix} -DCMAKE_BUILD_TYPE=Release -Dunity=OFF -Dstatic=true -DCMAKE_VERBOSE_MAKEFILE=OFF -Dvalidator_keys=ON
cmake --build . --parallel $(nproc) --target rippled --target validator-keys
popd
mkdir -p bld.rippled-reporting
cd bld.rippled-reporting
cmake .. -G Ninja -DCMAKE_INSTALL_PREFIX=%{_prefix}-reporting -DCMAKE_BUILD_TYPE=Release -Dunity=OFF -Dstatic=true -DCMAKE_VERBOSE_MAKEFILE=OFF -Dreporting=ON
cmake --build . --parallel $(nproc) --target rippled
%pre
test -e /etc/pki/tls || { mkdir -p /etc/pki; ln -s /usr/lib/ssl /etc/pki/tls; }
%install
rm -rf $RPM_BUILD_ROOT
DESTDIR=$RPM_BUILD_ROOT cmake --build rippled/bld.rippled --target install -- -v
rm -rf ${RPM_BUILD_ROOT}/%{_prefix}/lib64/cmake/date
install -d ${RPM_BUILD_ROOT}/etc/opt/ripple
install -d ${RPM_BUILD_ROOT}/usr/local/bin
ln -s %{_prefix}/etc/rippled.cfg ${RPM_BUILD_ROOT}/etc/opt/ripple/rippled.cfg
ln -s %{_prefix}/etc/validators.txt ${RPM_BUILD_ROOT}/etc/opt/ripple/validators.txt
ln -s %{_prefix}/bin/rippled ${RPM_BUILD_ROOT}/usr/local/bin/rippled
install -D rippled/bld.rippled/validator-keys/validator-keys ${RPM_BUILD_ROOT}%{_bindir}/validator-keys
install -D ./rippled/Builds/containers/shared/rippled.service ${RPM_BUILD_ROOT}/usr/lib/systemd/system/rippled.service
install -D ./rippled/Builds/containers/packaging/rpm/50-rippled.preset ${RPM_BUILD_ROOT}/usr/lib/systemd/system-preset/50-rippled.preset
install -D ./rippled/Builds/containers/shared/update-rippled.sh ${RPM_BUILD_ROOT}%{_bindir}/update-rippled.sh
install -D ./rippled/bin/getRippledInfo ${RPM_BUILD_ROOT}%{_bindir}/getRippledInfo
install -D ./rippled/Builds/containers/shared/update-rippled-cron ${RPM_BUILD_ROOT}%{_prefix}/etc/update-rippled-cron
install -D ./rippled/Builds/containers/shared/rippled-logrotate ${RPM_BUILD_ROOT}/etc/logrotate.d/rippled
install -d $RPM_BUILD_ROOT/var/log/rippled
install -d $RPM_BUILD_ROOT/var/lib/rippled
# reporting mode
%define _prefix /opt/rippled-reporting
mkdir -p ${RPM_BUILD_ROOT}/etc/opt/rippled-reporting/
install -D rippled/bld.rippled-reporting/rippled-reporting ${RPM_BUILD_ROOT}%{_bindir}/rippled-reporting
install -D ./rippled/cfg/rippled-reporting.cfg ${RPM_BUILD_ROOT}%{_prefix}/etc/rippled-reporting.cfg
install -D ./rippled/cfg/validators-example.txt ${RPM_BUILD_ROOT}%{_prefix}/etc/validators.txt
install -D ./rippled/Builds/containers/packaging/rpm/50-rippled-reporting.preset ${RPM_BUILD_ROOT}/usr/lib/systemd/system-preset/50-rippled-reporting.preset
ln -s %{_prefix}/bin/rippled-reporting ${RPM_BUILD_ROOT}/usr/local/bin/rippled-reporting
ln -s %{_prefix}/etc/rippled-reporting.cfg ${RPM_BUILD_ROOT}/etc/opt/rippled-reporting/rippled-reporting.cfg
ln -s %{_prefix}/etc/validators.txt ${RPM_BUILD_ROOT}/etc/opt/rippled-reporting/validators.txt
install -d $RPM_BUILD_ROOT/var/log/rippled-reporting
install -d $RPM_BUILD_ROOT/var/lib/rippled-reporting
install -D ./rippled/Builds/containers/shared/rippled-reporting.service ${RPM_BUILD_ROOT}/usr/lib/systemd/system/rippled-reporting.service
sed -E 's/rippled?/rippled-reporting/g' ./rippled/Builds/containers/shared/update-rippled.sh > ${RPM_BUILD_ROOT}%{_bindir}/update-rippled-reporting.sh
sed -E 's/rippled?/rippled-reporting/g' ./rippled/bin/getRippledInfo > ${RPM_BUILD_ROOT}%{_bindir}/getRippledReportingInfo
sed -E 's/rippled?/rippled-reporting/g' ./rippled/Builds/containers/shared/update-rippled-cron > ${RPM_BUILD_ROOT}%{_prefix}/etc/update-rippled-reporting-cron
sed -E 's/rippled?/rippled-reporting/g' ./rippled/Builds/containers/shared/rippled-logrotate > ${RPM_BUILD_ROOT}/etc/logrotate.d/rippled-reporting
%post
%define _prefix /opt/ripple
USER_NAME=rippled
GROUP_NAME=rippled
getent passwd $USER_NAME &>/dev/null || useradd $USER_NAME
getent group $GROUP_NAME &>/dev/null || groupadd $GROUP_NAME
chown -R $USER_NAME:$GROUP_NAME /var/log/rippled/
chown -R $USER_NAME:$GROUP_NAME /var/lib/rippled/
chown -R $USER_NAME:$GROUP_NAME %{_prefix}/
chmod 755 /var/log/rippled/
chmod 755 /var/lib/rippled/
chmod 644 %{_prefix}/etc/update-rippled-cron
chmod 644 /etc/logrotate.d/rippled
chown -R root:$GROUP_NAME %{_prefix}/etc/update-rippled-cron
%post reporting
%define _prefix /opt/rippled-reporting
USER_NAME=rippled-reporting
GROUP_NAME=rippled-reporting
getent passwd $USER_NAME &>/dev/null || useradd -r $USER_NAME
getent group $GROUP_NAME &>/dev/null || groupadd $GROUP_NAME
chown -R $USER_NAME:$GROUP_NAME /var/log/rippled-reporting/
chown -R $USER_NAME:$GROUP_NAME /var/lib/rippled-reporting/
chown -R $USER_NAME:$GROUP_NAME %{_prefix}/
chmod 755 /var/log/rippled-reporting/
chmod 755 /var/lib/rippled-reporting/
chmod -x /usr/lib/systemd/system/rippled-reporting.service
%files
%define _prefix /opt/ripple
%doc rippled/README.md rippled/LICENSE.md
%{_bindir}/rippled
/usr/local/bin/rippled
%{_bindir}/update-rippled.sh
%{_bindir}/getRippledInfo
%{_prefix}/etc/update-rippled-cron
%{_bindir}/validator-keys
%config(noreplace) %{_prefix}/etc/rippled.cfg
%config(noreplace) /etc/opt/ripple/rippled.cfg
%config(noreplace) %{_prefix}/etc/validators.txt
%config(noreplace) /etc/opt/ripple/validators.txt
%config(noreplace) /etc/logrotate.d/rippled
%config(noreplace) /usr/lib/systemd/system/rippled.service
%config(noreplace) /usr/lib/systemd/system-preset/50-rippled.preset
%dir /var/log/rippled/
%dir /var/lib/rippled/
%files devel
%{_prefix}/include
%{_prefix}/lib/*.a
%{_prefix}/lib/cmake/ripple
%files reporting
%define _prefix /opt/rippled-reporting
%doc rippled/README.md rippled/LICENSE.md
%{_bindir}/rippled-reporting
/usr/local/bin/rippled-reporting
%config(noreplace) /etc/opt/rippled-reporting/rippled-reporting.cfg
%config(noreplace) %{_prefix}/etc/rippled-reporting.cfg
%config(noreplace) %{_prefix}/etc/validators.txt
%config(noreplace) /etc/opt/rippled-reporting/validators.txt
%config(noreplace) /usr/lib/systemd/system/rippled-reporting.service
%config(noreplace) /usr/lib/systemd/system-preset/50-rippled-reporting.preset
%dir /var/log/rippled-reporting/
%dir /var/lib/rippled-reporting/
%{_bindir}/update-rippled-reporting.sh
%{_bindir}/getRippledReportingInfo
%{_prefix}/etc/update-rippled-reporting-cron
%config(noreplace) /etc/logrotate.d/rippled-reporting
%changelog
* Wed Aug 28 2019 Mike Ellery <mellery451@gmail.com>
- Switch to subproject build for validator-keys
* Wed May 15 2019 Mike Ellery <mellery451@gmail.com>
- Make validator-keys use local rippled build for core lib
* Wed Aug 01 2018 Mike Ellery <mellery451@gmail.com>
- add devel package for signing library
* Thu Jun 02 2016 Brandon Wilson <bwilson@ripple.com>
- Install validators.txt

View File

@@ -0,0 +1,147 @@
#!/usr/bin/env bash
set -ex
function build_boost()
{
local boost_ver=$1
local do_link=$2
local boost_path=$(echo "${boost_ver}" | sed -e 's!\.!_!g')
mkdir -p /opt/local
cd /opt/local
BOOST_ROOT=/opt/local/boost_${boost_path}
BOOST_URL="https://boostorg.jfrog.io/artifactory/main/release/${boost_ver}/source/boost_${boost_path}.tar.gz"
BOOST_BUILD_ALL=true
. /tmp/install_boost.sh
if [ "$do_link" = true ] ; then
ln -s ./boost_${boost_path} boost
fi
}
build_boost "1.70.0" true
# installed in opt, so won't be used
# unless specified by OPENSSL_ROOT_DIR
cd /tmp
OPENSSL_VER=1.1.1d
wget https://www.openssl.org/source/openssl-${OPENSSL_VER}.tar.gz
tar xf openssl-${OPENSSL_VER}.tar.gz
cd openssl-${OPENSSL_VER}
# NOTE: add -g to the end of the following line if we want debug symbols for openssl
SSLDIR=$(openssl version -d | cut -d: -f2 | tr -d [:space:]\")
./config -fPIC --prefix=/opt/local/openssl --openssldir=${SSLDIR} zlib shared
make -j$(nproc) >> make_output.txt 2>&1
make install >> make_output.txt 2>&1
cd ..
rm -f openssl-${OPENSSL_VER}.tar.gz
rm -rf openssl-${OPENSSL_VER}
LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-}:/opt/local/openssl/lib /opt/local/openssl/bin/openssl version -a
cd /tmp
wget https://libarchive.org/downloads/libarchive-3.4.1.tar.gz
tar xzf libarchive-3.4.1.tar.gz
cd libarchive-3.4.1
mkdir _bld && cd _bld
cmake -DCMAKE_BUILD_TYPE=Release ..
make -j$(nproc) >> make_output.txt 2>&1
make install >> make_output.txt 2>&1
cd ../..
rm -f libarchive-3.4.1.tar.gz
rm -rf libarchive-3.4.1
cd /tmp
wget https://github.com/protocolbuffers/protobuf/releases/download/v3.10.1/protobuf-all-3.10.1.tar.gz
tar xf protobuf-all-3.10.1.tar.gz
cd protobuf-3.10.1
./autogen.sh
./configure
make -j$(nproc) >> make_output.txt 2>&1
make install >> make_output.txt 2>&1
ldconfig
cd ..
rm -f protobuf-all-3.10.1.tar.gz
rm -rf protobuf-3.10.1
cd /tmp
wget https://c-ares.haxx.se/download/c-ares-1.15.0.tar.gz
tar xf c-ares-1.15.0.tar.gz
cd c-ares-1.15.0
mkdir _bld && cd _bld
cmake \
-DHAVE_LIBNSL=OFF \
-DCMAKE_BUILD_TYPE=Release \
-DCARES_STATIC=ON \
-DCARES_SHARED=OFF \
-DCARES_INSTALL=ON \
-DCARES_STATIC_PIC=ON \
-DCARES_BUILD_TOOLS=OFF \
-DCARES_BUILD_TESTS=OFF \
-DCARES_BUILD_CONTAINER_TESTS=OFF \
..
make -j$(nproc) >> make_output.txt 2>&1
make install >> make_output.txt 2>&1
cd ../..
rm -f c-ares-1.15.0.tar.gz
rm -rf c-ares-1.15.0
cd /tmp
wget https://github.com/grpc/grpc/archive/v1.25.0.tar.gz
tar xf v1.25.0.tar.gz
cd grpc-1.25.0
mkdir _bld && cd _bld
cmake \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=OFF \
-DgRPC_ZLIB_PROVIDER=package \
-DgRPC_CARES_PROVIDER=package \
-DgRPC_SSL_PROVIDER=package \
-DgRPC_PROTOBUF_PROVIDER=package \
-DProtobuf_USE_STATIC_LIBS=ON \
..
make -j$(nproc) >> make_output.txt 2>&1
make install >> make_output.txt 2>&1
cd ../..
rm -f xf v1.25.0.tar.gz
rm -rf grpc-1.25.0
if [ "${CI_USE}" = true ] ; then
build_boost "1.71.0" false
cd /tmp
wget https://github.com/doxygen/doxygen/archive/Release_1_8_16.tar.gz
tar xf Release_1_8_16.tar.gz
cd doxygen-Release_1_8_16
mkdir build
cd build
cmake -G "Unix Makefiles" ..
make -j$(nproc) >> make_output.txt 2>&1
make install >> make_output.txt 2>&1
cd ../..
rm -f Release_1_8_16.tar.gz
rm -rf doxygen-Release_1_8_16
mkdir -p /opt/plantuml
wget -O /opt/plantuml/plantuml.jar https://downloads.sourceforge.net/project/plantuml/plantuml.jar
cd /tmp
wget https://github.com/linux-test-project/lcov/releases/download/v1.14/lcov-1.14.tar.gz
tar xfz lcov-1.14.tar.gz
cd lcov-1.14
make install PREFIX=/usr/local
cd ..
rm -r lcov-1.14 lcov-1.14.tar.gz
cd /tmp
wget https://github.com/ccache/ccache/releases/download/v3.7.6/ccache-3.7.6.tar.gz
tar xf ccache-3.7.6.tar.gz
cd ccache-3.7.6
./configure --prefix=/usr/local
make >> make_output.txt 2>&1
make install >> make_output.txt 2>&1
cd ..
rm -f ccache-3.7.6.tar.gz
rm -rf ccache-3.7.6
pip install requests
pip install https://github.com/codecov/codecov-python/archive/master.zip
fi

View File

@@ -0,0 +1,93 @@
#!/usr/bin/env bash
# Assumptions:
# 1) BOOST_ROOT and BOOST_URL are already defined,
# and contain valid values. BOOST_URL2 may be defined
# as a fallback. BOOST_WGET_OPTIONS may be defined with
# retry options if the download(s) fail on the first try.
# 2) The last namepart of BOOST_ROOT matches the
# folder name internal to boost's .tar.gz
# When testing you can force a boost build by clearing travis caches:
# https://travis-ci.org/ripple/rippled/caches
set -exu
odir=$(pwd)
: ${BOOST_TOOLSET:=msvc-14.1}
if [[ -d "$BOOST_ROOT/lib" || -d "${BOOST_ROOT}/stage/lib" ]] ; then
echo "Using cached boost at $BOOST_ROOT"
exit
fi
#fetch/unpack:
fn=$(basename -- "$BOOST_URL")
ext="${fn##*.}"
wopt="--quiet"
wget ${wopt} $BOOST_URL -O /tmp/boost.tar.${ext} || \
( [ -n "${BOOST_URL2}" ] && \
wget ${wopt} $BOOST_URL2 -O /tmp/boost.tar.${ext} ) || \
( [ -n "${BOOST_WGET_OPTIONS}" ] &&
( wget ${wopt} ${BOOST_WGET_OPTIONS} $BOOST_URL -O /tmp/boost.tar.${ext} || \
( [ -n "${BOOST_URL2}" ] && \
wget ${wopt} ${BOOST_WGET_OPTIONS} $BOOST_URL2 -O /tmp/boost.tar.${ext} )
)
)
cd $(dirname $BOOST_ROOT)
rm -fr ${BOOST_ROOT}
mkdir ${BOOST_ROOT}
tar xf /tmp/boost.tar.${ext} -C ${BOOST_ROOT} --strip-components 1
cd $BOOST_ROOT
BLDARGS=()
if [[ ${BOOST_BUILD_ALL:-false} == "true" ]]; then
# we never need boost-python...so even for ALL
# option we can skip it
BLDARGS+=(--without-python)
else
BLDARGS+=(--with-chrono)
BLDARGS+=(--with-container)
BLDARGS+=(--with-context)
BLDARGS+=(--with-coroutine)
BLDARGS+=(--with-date_time)
BLDARGS+=(--with-filesystem)
BLDARGS+=(--with-program_options)
BLDARGS+=(--with-regex)
BLDARGS+=(--with-system)
BLDARGS+=(--with-atomic)
BLDARGS+=(--with-thread)
fi
BLDARGS+=(-j$((2*${NUM_PROCESSORS:-2})))
BLDARGS+=(--prefix=${BOOST_ROOT}/_INSTALLED_)
BLDARGS+=(-d0) # suppress messages/output
if [[ -z ${COMSPEC:-} ]]; then
if [[ "$(uname)" == "Darwin" ]] ; then
BLDARGS+=(cxxflags="-std=c++14 -fvisibility=default")
else
BLDARGS+=(cxxflags="-std=c++14")
BLDARGS+=(runtime-link="static,shared")
fi
BLDARGS+=(--layout=tagged)
./bootstrap.sh
./b2 "${BLDARGS[@]}" stage
./b2 "${BLDARGS[@]}" install
else
BLDARGS+=(runtime-link="static,shared")
BLDARGS+=(--layout=versioned)
BLDARGS+=(--toolset="${BOOST_TOOLSET}")
BLDARGS+=(address-model=64)
BLDARGS+=(architecture=x86)
BLDARGS+=(link=static)
BLDARGS+=(threading=multi)
cmd /E:ON /D /S /C"bootstrap.bat"
./b2.exe "${BLDARGS[@]}" stage
./b2.exe "${BLDARGS[@]}" install
fi
if [[ ${CI:-false} == "true" ]]; then
# save some disk space...these are mostly
# obj files and don't need to be kept in CI contexts
rm -rf bin.v2
fi
cd $odir

View File

@@ -0,0 +1,34 @@
#!/usr/bin/env bash
set -e
IFS=. read cm_maj cm_min cm_rel <<<"$1"
: ${cm_rel:-0}
CMAKE_ROOT=${2:-"${HOME}/cmake"}
function cmake_version ()
{
if [[ -d ${CMAKE_ROOT} ]] ; then
local perms=$(test $(uname) = "Linux" && echo "/111" || echo "+111")
local installed=$(find ${CMAKE_ROOT} -perm ${perms} -type f -name cmake)
if [[ "${installed}" != "" ]] ; then
echo "$(${installed} --version | head -1)"
fi
fi
}
installed=$(cmake_version)
if [[ "${installed}" != "" && ${installed} =~ ${cm_maj}.${cm_min}.${cm_rel} ]] ; then
echo "cmake already installed: ${installed}"
exit
fi
pkgname="cmake-${cm_maj}.${cm_min}.${cm_rel}-$(uname)-x86_64.tar.gz"
tmppkg="/tmp/cmake.tar.gz"
wget --quiet https://cmake.org/files/v${cm_maj}.${cm_min}/${pkgname} -O ${tmppkg}
mkdir -p ${CMAKE_ROOT}
cd ${CMAKE_ROOT}
tar --strip-components 1 -xf ${tmppkg}
rm -f ${tmppkg}
echo "installed: $(cmake_version)"

View File

@@ -0,0 +1,15 @@
/var/log/rippled/*.log {
daily
minsize 200M
rotate 7
nocreate
missingok
notifempty
compress
compresscmd /usr/bin/nice
compressoptions -n19 ionice -c3 gzip
compressext .gz
postrotate
/opt/ripple/bin/rippled --conf /opt/ripple/etc/rippled.cfg logrotate
endscript
}

View File

@@ -0,0 +1,15 @@
[Unit]
Description=Ripple Daemon
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
ExecStart=/opt/rippled-reporting/bin/rippled-reporting --silent --conf /etc/opt/rippled-reporting/rippled-reporting.cfg
Restart=on-failure
User=rippled-reporting
Group=rippled-reporting
LimitNOFILE=65536
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,15 @@
[Unit]
Description=Ripple Daemon
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
ExecStart=/opt/ripple/bin/rippled --net --silent --conf /etc/opt/ripple/rippled.cfg
Restart=on-failure
User=rippled
Group=rippled
LimitNOFILE=65536
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,10 @@
# For automatic updates, symlink this file to /etc/cron.d/
# Do not remove the newline at the end of this cron script
# bash required for use of RANDOM below.
SHELL=/bin/bash
PATH=/sbin;/bin;/usr/sbin;/usr/bin
# invoke check/update script with random delay up to 59 mins
0 * * * * root sleep $((RANDOM*3540/32768)) && /opt/ripple/bin/update-rippled.sh

View File

@@ -0,0 +1,65 @@
#!/usr/bin/env bash
# auto-update script for rippled daemon
# Check for sudo/root permissions
if [[ $(id -u) -ne 0 ]] ; then
echo "This update script must be run as root or sudo"
exit 1
fi
LOCKDIR=/tmp/rippleupdate.lock
UPDATELOG=/var/log/rippled/update.log
function cleanup {
# If this directory isn't removed, future updates will fail.
rmdir $LOCKDIR
}
# Use mkdir to check if process is already running. mkdir is atomic, as against file create.
if ! mkdir $LOCKDIR 2>/dev/null; then
echo $(date -u) "lockdir exists - won't proceed." >> $UPDATELOG
exit 1
fi
trap cleanup EXIT
source /etc/os-release
can_update=false
if [[ "$ID" == "ubuntu" || "$ID" == "debian" ]] ; then
# Silent update
apt-get update -qq
# The next line is an "awk"ward way to check if the package needs to be updated.
RIPPLE=$(apt-get install -s --only-upgrade rippled | awk '/^Inst/ { print $2 }')
test "$RIPPLE" == "rippled" && can_update=true
function apply_update {
apt-get install rippled -qq
}
elif [[ "$ID" == "fedora" || "$ID" == "centos" || "$ID" == "rhel" || "$ID" == "scientific" ]] ; then
RIPPLE_REPO=${RIPPLE_REPO-stable}
yum --disablerepo=* --enablerepo=ripple-$RIPPLE_REPO clean expire-cache
yum check-update -q --enablerepo=ripple-$RIPPLE_REPO rippled || can_update=true
function apply_update {
yum update -y --enablerepo=ripple-$RIPPLE_REPO rippled
}
else
echo "unrecognized distro!"
exit 1
fi
# Do the actual update and restart the service after reloading systemctl daemon.
if [ "$can_update" = true ] ; then
exec 3>&1 1>>${UPDATELOG} 2>&1
set -e
apply_update
systemctl daemon-reload
systemctl restart rippled.service
echo $(date -u) "rippled daemon updated."
else
echo $(date -u) "no updates available" >> $UPDATELOG
fi

View File

@@ -0,0 +1,20 @@
#!/usr/bin/env bash
function error {
echo $1
exit 1
}
cd /opt/rippled_bld/pkg/rippled
export RIPPLED_VERSION=$(egrep -i -o "\b(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(-[0-9a-z\-]+(\.[0-9a-z\-]+)*)?(\+[0-9a-z\-]+(\.[0-9a-z\-]+)*)?\b" src/ripple/protocol/impl/BuildInfo.cpp)
: ${PKG_OUTDIR:=/opt/rippled_bld/pkg/out}
export PKG_OUTDIR
if [ ! -d ${PKG_OUTDIR} ]; then
error "${PKG_OUTDIR} is not mounted"
fi
if [ -x ${OPENSSL_ROOT}/bin/openssl ]; then
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${OPENSSL_ROOT}/lib ${OPENSSL_ROOT}/bin/openssl version -a
fi

View File

@@ -0,0 +1,36 @@
ARG DIST_TAG=18.04
FROM ubuntu:$DIST_TAG
ARG GIT_COMMIT=unknown
ARG CI_USE=false
LABEL git-commit=$GIT_COMMIT
# install/setup prerequisites:
COPY ubuntu-builder/ubuntu_setup.sh /tmp/
COPY shared/build_deps.sh /tmp/
COPY shared/install_cmake.sh /tmp/
COPY shared/install_boost.sh /tmp/
RUN chmod +x /tmp/ubuntu_setup.sh && \
chmod +x /tmp/build_deps.sh && \
chmod +x /tmp/install_boost.sh && \
chmod +x /tmp/install_cmake.sh
RUN /tmp/ubuntu_setup.sh
RUN /tmp/install_cmake.sh 3.16.1 /opt/local/cmake-3.16
RUN ln -s /opt/local/cmake-3.16 /opt/local/cmake
ENV PATH="/opt/local/cmake/bin:$PATH"
# also install min supported cmake for testing
RUN if [ "${CI_USE}" = true ] ; then /tmp/install_cmake.sh 3.9.0 /opt/local/cmake-3.9; fi
RUN /tmp/build_deps.sh
ENV PLANTUML_JAR="/opt/plantuml/plantuml.jar"
ENV BOOST_ROOT="/opt/local/boost/_INSTALLED_"
ENV OPENSSL_ROOT="/opt/local/openssl"
# prep files for package building
RUN mkdir -m 777 -p /opt/rippled_bld/pkg/debian
RUN update-alternatives --set gcc /usr/bin/gcc-8
WORKDIR /opt/rippled_bld/pkg
COPY packaging/dpkg/build_dpkg.sh ./
CMD ./build_dpkg.sh

View File

@@ -0,0 +1,189 @@
#!/usr/bin/env bash
set -ex
source /etc/os-release
if [[ ${VERSION_ID} =~ ^18\. || ${VERSION_ID} =~ ^16\. ]] ; then
echo "setup for ${PRETTY_NAME}"
else
echo "${VERSION} not supported"
exit 1
fi
export DEBIAN_FRONTEND="noninteractive"
echo "Acquire::Retries 3;" > /etc/apt/apt.conf.d/80-retries
echo "Acquire::http::Pipeline-Depth 0;" >> /etc/apt/apt.conf.d/80-retries
echo "Acquire::http::No-Cache true;" >> /etc/apt/apt.conf.d/80-retries
echo "Acquire::BrokenProxy true;" >> /etc/apt/apt.conf.d/80-retries
apt-get update -o Acquire::CompressionTypes::Order::=gz
apt-get -y update
apt-get -y install apt-utils
apt-get -y install software-properties-common wget
apt-get -y upgrade
if [[ ${VERSION_ID} =~ ^18\. ]] ; then
apt-add-repository -y multiverse
apt-add-repository -y universe
fi
add-apt-repository -y ppa:ubuntu-toolchain-r/test
apt-get -y clean
apt-get -y update
apt-get -y --fix-missing install \
make cmake ninja-build autoconf automake libtool pkg-config libtool \
openssl libssl-dev \
liblzma-dev libbz2-dev zlib1g-dev \
libjemalloc-dev \
python-pip \
gdb gdbserver \
libstdc++6 \
flex bison parallel \
libicu-dev texinfo \
java-common javacc \
dpkg-dev debhelper devscripts fakeroot \
debmake git-buildpackage dh-make gitpkg debsums gnupg \
dh-buildinfo dh-make dh-systemd \
apt-transport-https
apt-get -y install gcc-7 g++-7
update-alternatives --install \
/usr/bin/gcc gcc /usr/bin/gcc-7 40 \
--slave /usr/bin/g++ g++ /usr/bin/g++-7 \
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-7 \
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-7 \
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-7 \
--slave /usr/bin/gcov gcov /usr/bin/gcov-7 \
--slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-dump-7 \
--slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-tool-7
apt-get -y install gcc-8 g++-8
update-alternatives --install \
/usr/bin/gcc gcc /usr/bin/gcc-8 20 \
--slave /usr/bin/g++ g++ /usr/bin/g++-8 \
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-8 \
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-8 \
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-8 \
--slave /usr/bin/gcov gcov /usr/bin/gcov-8 \
--slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-dump-8 \
--slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-tool-8
update-alternatives --auto gcc
update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-7 40
update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-8 20
update-alternatives --auto cpp
if [ "${CI_USE}" = true ] ; then
apt-get -y install gcc-6 g++-6
update-alternatives --install \
/usr/bin/gcc gcc /usr/bin/gcc-6 10 \
--slave /usr/bin/g++ g++ /usr/bin/g++-6 \
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-6 \
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-6 \
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-6 \
--slave /usr/bin/gcov gcov /usr/bin/gcov-6 \
--slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-dump-6 \
--slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-tool-6
apt-get -y install gcc-9 g++-9
update-alternatives --install \
/usr/bin/gcc gcc /usr/bin/gcc-9 15 \
--slave /usr/bin/g++ g++ /usr/bin/g++-9 \
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-9 \
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-9 \
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-9 \
--slave /usr/bin/gcov gcov /usr/bin/gcov-9 \
--slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-dump-9 \
--slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-tool-9
fi
if [[ ${VERSION_ID} =~ ^18\. ]] ; then
apt-get -y install binutils
elif [[ ${VERSION_ID} =~ ^16\. ]] ; then
apt-get -y install python-software-properties binutils-gold
fi
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
if [[ ${VERSION_ID} =~ ^18\. ]] ; then
cat << EOF > /etc/apt/sources.list.d/llvm.list
deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic main
deb-src http://apt.llvm.org/bionic/ llvm-toolchain-bionic main
deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic-7 main
deb-src http://apt.llvm.org/bionic/ llvm-toolchain-bionic-7 main
deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic-8 main
deb-src http://apt.llvm.org/bionic/ llvm-toolchain-bionic-8 main
deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic-9 main
deb-src http://apt.llvm.org/bionic/ llvm-toolchain-bionic-9 main
EOF
elif [[ ${VERSION_ID} =~ ^16\. ]] ; then
cat << EOF > /etc/apt/sources.list.d/llvm.list
deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial main
deb-src http://apt.llvm.org/xenial/ llvm-toolchain-xenial main
deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial-7 main
deb-src http://apt.llvm.org/xenial/ llvm-toolchain-xenial-7 main
deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial-8 main
deb-src http://apt.llvm.org/xenial/ llvm-toolchain-xenial-8 main
deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial-9 main
deb-src http://apt.llvm.org/xenial/ llvm-toolchain-xenial-9 main
EOF
fi
apt-get -y update
apt-get -y install \
clang-7 libclang-common-7-dev libclang-7-dev libllvm7 llvm-7 \
llvm-7-dev llvm-7-runtime clang-format-7 python-clang-7 \
lld-7 libfuzzer-7-dev libc++-7-dev
update-alternatives --install \
/usr/bin/clang clang /usr/bin/clang-7 40 \
--slave /usr/bin/clang++ clang++ /usr/bin/clang++-7 \
--slave /usr/bin/llvm-profdata llvm-profdata /usr/bin/llvm-profdata-7 \
--slave /usr/bin/asan-symbolize asan-symbolize /usr/bin/asan_symbolize-7 \
--slave /usr/bin/llvm-symbolizer llvm-symbolizer /usr/bin/llvm-symbolizer-7 \
--slave /usr/bin/clang-format clang-format /usr/bin/clang-format-7 \
--slave /usr/bin/llvm-ar llvm-ar /usr/bin/llvm-ar-7 \
--slave /usr/bin/llvm-cov llvm-cov /usr/bin/llvm-cov-7 \
--slave /usr/bin/llvm-nm llvm-nm /usr/bin/llvm-nm-7
apt-get -y install \
clang-8 libclang-common-8-dev libclang-8-dev libllvm8 llvm-8 \
llvm-8-dev llvm-8-runtime clang-format-8 python-clang-8 \
lld-8 libfuzzer-8-dev libc++-8-dev
update-alternatives --install \
/usr/bin/clang clang /usr/bin/clang-8 20 \
--slave /usr/bin/clang++ clang++ /usr/bin/clang++-8 \
--slave /usr/bin/llvm-profdata llvm-profdata /usr/bin/llvm-profdata-8 \
--slave /usr/bin/asan-symbolize asan-symbolize /usr/bin/asan_symbolize-8 \
--slave /usr/bin/llvm-symbolizer llvm-symbolizer /usr/bin/llvm-symbolizer-8 \
--slave /usr/bin/clang-format clang-format /usr/bin/clang-format-8 \
--slave /usr/bin/llvm-ar llvm-ar /usr/bin/llvm-ar-8 \
--slave /usr/bin/llvm-cov llvm-cov /usr/bin/llvm-cov-8 \
--slave /usr/bin/llvm-nm llvm-nm /usr/bin/llvm-nm-8
update-alternatives --auto clang
if [ "${CI_USE}" = true ] ; then
apt-get -y install \
clang-9 libclang-common-9-dev libclang-9-dev libllvm9 llvm-9 \
llvm-9-dev llvm-9-runtime clang-format-9 python-clang-9 \
lld-9 libfuzzer-9-dev libc++-9-dev
update-alternatives --install \
/usr/bin/clang clang /usr/bin/clang-9 20 \
--slave /usr/bin/clang++ clang++ /usr/bin/clang++-9 \
--slave /usr/bin/llvm-profdata llvm-profdata /usr/bin/llvm-profdata-9 \
--slave /usr/bin/asan-symbolize asan-symbolize /usr/bin/asan_symbolize-9 \
--slave /usr/bin/llvm-symbolizer llvm-symbolizer /usr/bin/llvm-symbolizer-9 \
--slave /usr/bin/clang-format clang-format /usr/bin/clang-format-9 \
--slave /usr/bin/llvm-ar llvm-ar /usr/bin/llvm-ar-9 \
--slave /usr/bin/llvm-cov llvm-cov /usr/bin/llvm-cov-9 \
--slave /usr/bin/llvm-nm llvm-nm /usr/bin/llvm-nm-9
# only install latest lldb
apt-get -y install lldb-9 python-lldb-9 liblldb-9-dev
update-alternatives --install \
/usr/bin/lldb lldb /usr/bin/lldb-9 50 \
--slave /usr/bin/lldb-server lldb-server /usr/bin/lldb-server-9 \
--slave /usr/bin/lldb-argdumper lldb-argdumper /usr/bin/lldb-argdumper-9 \
--slave /usr/bin/lldb-instr lldb-instr /usr/bin/lldb-instr-9 \
--slave /usr/bin/lldb-mi lldb-mi /usr/bin/lldb-mi-9
update-alternatives --auto clang
fi
apt-get -y autoremove

View File

@@ -16,9 +16,6 @@ Loop: ripple.app ripple.overlay
Loop: ripple.app ripple.peerfinder
ripple.app > ripple.peerfinder
Loop: ripple.app ripple.protocol
ripple.app > ripple.protocol
Loop: ripple.app ripple.rpc
ripple.rpc > ripple.app
@@ -49,12 +46,6 @@ Loop: ripple.nodestore ripple.overlay
Loop: ripple.overlay ripple.rpc
ripple.rpc ~= ripple.overlay
Loop: test.app test.jtx
test.app > test.jtx
Loop: test.app test.rpc
test.rpc ~= test.app
Loop: test.jtx test.toplevel
test.toplevel > test.jtx

View File

@@ -4,6 +4,7 @@ ripple.app > ripple.conditions
ripple.app > ripple.consensus
ripple.app > ripple.crypto
ripple.app > ripple.json
ripple.app > ripple.protocol
ripple.app > ripple.resource
ripple.app > test.unit_test
ripple.basics > ripple.beast
@@ -90,6 +91,8 @@ test.app > ripple.overlay
test.app > ripple.protocol
test.app > ripple.resource
test.app > ripple.rpc
test.app > test.jtx
test.app > test.rpc
test.app > test.toplevel
test.app > test.unit_test
test.basics > ripple.basics
@@ -108,9 +111,8 @@ test.consensus > ripple.app
test.consensus > ripple.basics
test.consensus > ripple.beast
test.consensus > ripple.consensus
test.consensus > ripple.core
test.consensus > ripple.ledger
test.consensus > ripple.protocol
test.consensus > ripple.rpc
test.consensus > test.csf
test.consensus > test.toplevel
test.consensus > test.unit_test
@@ -184,6 +186,7 @@ test.protocol > ripple.basics
test.protocol > ripple.beast
test.protocol > ripple.crypto
test.protocol > ripple.json
test.protocol > ripple.ledger
test.protocol > ripple.protocol
test.protocol > test.toplevel
test.resource > ripple.basics

270
Builds/linux/README.md Normal file
View File

@@ -0,0 +1,270 @@
# Linux Build Instructions
This document focuses on building rippled for development purposes under recent
Ubuntu linux distributions. To build rippled for Redhat, Fedora or Centos
builds, including docker based builds for those distributions, please consult
the [rippled-package-builder](https://github.com/ripple/rippled-package-builder)
repository.
Note: Ubuntu 16.04 users may need to update their compiler (see the dependencies
section). For non Ubuntu distributions, the steps below should work be
installing the appropriate dependencies using that distribution's package
management tools.
## Dependencies
gcc-8 or later is required.
Use `apt-get` to install the dependencies provided by the distribution
```
$ apt-get update
$ apt-get install -y gcc g++ wget git cmake pkg-config libprotoc-dev protobuf-compiler libprotobuf-dev libssl-dev
```
To build the software in reporting mode, install these additional dependencies:
```
$ apt-get install -y autoconf flex bison
```
Advanced users can choose to install newer versions of gcc, or the clang compiler.
At this time, rippled only supports protobuf version 2. Using version 3 of
protobuf will give errors.
### Build Boost
Boost 1.70 or later is required. We recommend downloading and compiling boost
with the following process: After changing to the directory where
you wish to download and compile boost, run
```
$ wget https://boostorg.jfrog.io/artifactory/main/release/1.70.0/source/boost_1_70_0.tar.gz
$ tar -xzf boost_1_70_0.tar.gz
$ cd boost_1_70_0
$ ./bootstrap.sh
$ ./b2 headers
$ ./b2 -j<Num Parallel>
```
### (Optional) Dependencies for Building Source Documentation
Source code documentation is not required for running/debugging rippled. That
said, the documentation contains some helpful information about specific
components of the application. For more information on how to install and run
the necessary components, see [this document](../../docs/README.md)
## Build
### Clone the rippled repository
From a shell:
```
git clone git@github.com:ripple/rippled.git
cd rippled
```
For a stable release, choose the `master` branch or one of the tagged releases
listed on [GitHub](https://github.com/ripple/rippled/releases).
```
git checkout master
```
or to test the latest release candidate, choose the `release` branch.
```
git checkout release
```
If you are doing development work and want the latest set of untested
features, you can consider using the `develop` branch instead.
```
git checkout develop
```
### Configure Library Paths
If you didn't persistently set the `BOOST_ROOT` environment variable to the
directory in which you compiled boost, then you should set it temporarily.
For example, you built Boost in your home directory `~/boost_1_70_0`, you
would do for any shell in which you want to build:
```
export BOOST_ROOT=~/boost_1_70_0
```
Alternatively, you can add `DBOOST_ROOT=~/boost_1_70_0` to the command line when
invoking `cmake`.
### Generate Configuration
All builds should be done in a separate directory from the source tree root
(a subdirectory is fine). For example, from the root of the ripple source tree:
```
mkdir my_build
cd my_build
```
followed by:
```
cmake -DCMAKE_BUILD_TYPE=Debug ..
```
If your operating system does not provide static libraries (Arch Linux, and
Manjaro Linux, for example), you must configure a non-static build by adding
`-Dstatic=OFF` to the above cmake line.
`CMAKE_BUILD_TYPE` can be changed as desired for `Debug` vs.
`Release` builds (all four standard cmake build types are supported).
To select a different compiler (most likely gcc will be found by default), pass
`-DCMAKE_C_COMPILER=<path/to/c-compiler>` and
`-DCMAKE_CXX_COMPILER=</path/to/cxx-compiler>` when configuring. If you prefer,
you can instead set `CC` and `CXX` environment variables which cmake will honor.
#### Options During Configuration:
The CMake file defines a number of configure-time options which can be
examined by running `cmake-gui` or `ccmake` to generated the build. In
particular, the `unity` option allows you to select between the unity and
non-unity builds. `unity` builds are faster to compile since they combine
multiple sources into a single compiliation unit - this is the default if you
don't specify. `nounity` builds can be helpful for detecting include omissions
or for finding other build-related issues, but aren't generally needed for
testing and running.
* `-Dunity=ON` to enable/disable unity builds (defaults to ON)
* `-Dassert=ON` to enable asserts
* `-Djemalloc=ON` to enable jemalloc support for heap checking
* `-Dsan=thread` to enable the thread sanitizer with clang
* `-Dsan=address` to enable the address sanitizer with clang
* `-Dstatic=ON` to enable static linking library dependencies
* `-Dreporting=ON` to build code necessary for reporting mode (defaults to OFF)
Several other infrequently used options are available - run `ccmake` or
`cmake-gui` for a list of all options.
### Build
Once you have generated the build system, you can run the build via cmake:
```
cmake --build . -- -j <parallel jobs>
```
the `-j` parameter in this example tells the build tool to compile several
files in parallel. This value should be chosen roughly based on the number of
cores you have available and/or want to use for building.
When the build completes successfully, you will have a `rippled` executable in
the current directory, which can be used to connect to the network (when
properly configured) or to run unit tests.
#### Optional Installation
The rippled cmake build supports an installation target that will install
rippled as well as a support library that can be used to sign transactions. In
order to build and install the files, specify the `install` target when
building, e.g.:
```
cmake -DCMAKE_BUILD_TYPE=Debug -DCMAKE_INSTALL_PREFIX=/opt/local ..
cmake --build . --target install -- -j <parallel jobs>
```
We recommend specifying `CMAKE_INSTALL_PREFIX` when configuring in order to
explicitly control the install location for your files. Without this setting,
cmake will typically install in `/usr/local`. It is also possible to "rehome"
the installation by specifying the `DESTDIR` env variable during the install phase,
e.g.:
```
DESTDIR=~/mylibs cmake --build . --target install -- -j <parallel jobs>
```
in which case, the files would be installed in the `CMAKE_INSTALL_PREFIX` within
the specified `DESTDIR` path.
#### Signing Library
If you want to use the signing support library to create an application, there
are two simple mechanisms with cmake + git that facilitate this.
With either option below, you will have access to a library from the
rippled project that you can link to in your own project's CMakeLists.txt, e.g.:
```
target_link_libraries (my-signing-app Ripple::xrpl_core)
```
##### Option 1: git submodules + add_subdirectory
First, add the rippled repo as a submodule to your project repo:
```
git submodule add -b master https://github.com/ripple/rippled.git vendor/rippled
```
change the `vendor/rippled` path as desired for your repo layout. Furthermore,
change the branch name if you want to track a different rippled branch, such
as `develop`.
Second, to bring this submodule into your project, just add the rippled subdirectory:
```
add_subdirectory (vendor/rippled)
```
##### Option 2: installed rippled + find_package
First, follow the "Optional Installation" instructions above to
build and install the desired version of rippled.
To make use of the installed files, add the following to your CMakeLists.txt file:
```
set (CMAKE_MODULE_PATH /opt/local/lib/cmake/ripple ${CMAKE_MODULE_PATH})
find_package(Ripple REQUIRED)
```
change the `/opt/local` module path above to match your chosen installation prefix.
## Unit Tests (Recommended)
`rippled` builds a set of unit tests into the server executable. To run these unit
tests after building, pass the `--unittest` option to the compiled `rippled`
executable. The executable will exit with summary info after running the unit tests.
## Workaround for a compile error in soci
Compilation errors have been observed with Apple Clang 13.1.6+ and soci v4.x. soci compiles with the `-Werror` flag which causes warnings to be treated as errors. These warnings pertain to style (not correctness). However, they cause the cmake process to fail.
Here's an example of how this looks:
```
.../rippled/.nih_c/unix_makefiles/AppleClang_13.1.6.13160021/Debug/src/soci/src/core/session.cpp:450:66: note: in instantiation of function template specialization 'soci::use<std::string>' requested here
return prepare << backEnd_->get_column_descriptions_query(), use(table_name, "t");
^
1 error generated.
```
Please apply the below patch (courtesy of Scott Determan) to remove these errors. `.nih_c/unix_makefiles/AppleClang_13.1.6.13160021/Debug/src/soci/cmake/SociConfig.cmake` file needs to be edited. This file is an example for Mac OS and it might be slightly different for other OS/Architectures.
```
diff --git a/cmake/SociConfig.cmake b/cmake/SociConfig.cmake
index 97d907e4..11bcd1f3 100644
--- a/cmake/SociConfig.cmake
+++ b/cmake/SociConfig.cmake
@@ -58,8 +58,8 @@ if (MSVC)
else()
- set(SOCI_GCC_CLANG_COMMON_FLAGS
- "-pedantic -Werror -Wno-error=parentheses -Wall -Wextra -Wpointer-arith -Wcast-align -Wcast-qual -Wfloat-equal -Woverloaded-virtual -Wredundant-decls -Wno-long-long")
+ set(SOCI_GCC_CLANG_COMMON_FLAGS "")
+ # "-pedantic -Werror -Wno-error=parentheses -Wall -Wextra -Wpointer-arith -Wcast-align -Wcast-qual -Wfloat-equal -Woverloaded-virtual -Wredundant-decls -Wno-long-long")
```

3
Builds/macos/README.md Normal file
View File

@@ -0,0 +1,3 @@
# macOS Build Instructions
[Build and Run rippled on macOS](https://xrpl.org/build-run-rippled-macos.html)

View File

@@ -1,18 +1,14 @@
cmake_minimum_required (VERSION 3.16)
set(CMAKE_CXX_EXTENSIONS OFF)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
if (POLICY CMP0074)
cmake_policy(SET CMP0074 NEW)
endif ()
if(POLICY CMP0144)
cmake_policy(SET CMP0144 NEW)
endif()
project (rippled)
set(CMAKE_CXX_EXTENSIONS OFF)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(Boost_NO_BOOST_CMAKE ON)
# make GIT_COMMIT_HASH define available to all sources
@@ -49,6 +45,7 @@ include(RippledSanity)
include(RippledVersion)
include(RippledSettings)
include(RippledNIH)
include(RippledRelease)
# this check has to remain in the top-level cmake
# because of the early return statement
if (packages_only)
@@ -86,5 +83,4 @@ include(RippledCore)
include(RippledInstall)
include(RippledCov)
include(RippledMultiConfig)
include(RippledDocs)
include(RippledValidatorKeys)

View File

@@ -1,185 +1,67 @@
Xahau has many and diverse stakeholders, and everyone deserves
a chance to contribute meaningful changes to the code that runs Xahau.
# Contributing
The XRP Ledger has many and diverse stakeholders, and everyone deserves a chance to contribute meaningful changes to the code that runs the XRPL.
To contribute, please:
1. Fork the repository under your own user.
2. Create a new branch on which to write your changes. Please note that changes which alter transaction processing must be composed via and guarded using [Amendments](https://xrpl.org/amendments.html). Changes which are _read only_ i.e. RPC, or changes which are only refactors and maintain the existing behaviour do not need to be made through an Amendment.
3. Write and test your code.
4. Ensure that your code compiles with the provided build engine and update the provided build engine as part of your PR where needed and where appropriate.
5. Write test cases for your code and include those in `src/test` such that they are runnable from the command line using `./rippled -u`. (Some changes will not be able to be tested this way.)
6. Ensure your code passes automated checks (e.g. clang-format and levelization.)
7. Squash your commits (i.e. rebase) into as few commits as is reasonable to describe your changes at a high level (typically a single commit for a small change.)
8. Open a PR to the main repository onto the _develop_ branch, and follow the provided template.
We assume you are familiar with the general practice of [making
contributions on GitHub][1]. This file includes only special
instructions specific to this project.
## Before you start
In general, contributions should be developed in your personal
[fork](https://github.com/xahau/xahaud/fork).
The following branches exist in the main project repository:
- `dev`: The latest set of unreleased features, and the most common
starting point for contributions.
- `candidate`: The latest beta release or release candidate.
- `release`: The latest stable release.
The tip of each branch must be signed. In order for GitHub to sign a
squashed commit that it builds from your pull request, GitHub must know
your verifying key. Please set up [signature verification][signing].
[rippled]: https://github.com/xahau/xahaud
[signing]:
https://docs.github.com/en/authentication/managing-commit-signature-verification/about-commit-signature-verification
## Major contributions
If your contribution is a major feature or breaking change, then you
must first write a Xahau Standard (XLS) describing it. Go to
[Standards](https://github.com/XRPLF/XRPL-Standards/discussions),
choose the next available standard number, and open a discussion with an
appropriate title to propose your draft standard.
When you submit a pull request, please link the corresponding XLS in the
description. An XLS still in draft status is considered a
work-in-progress and open for discussion. Please allow time for
questions, suggestions, and changes to the XLS draft. It is the
responsibility of the XLS author to update the draft to match the final
implementation when its corresponding pull request is merged, unless the
author delegates that responsibility to others.
## Before making a pull request
Changes that alter transaction processing must be guarded by an
[Amendment](https://docs.xahau.network/features/amendments).
All other changes that maintain the existing behavior do not need an
Amendment.
Ensure that your code compiles according to the build instructions in the
[`documentation`](https://docs.xahau.network/infrastructure/building-xahau).
If you create new source files, they must go under `src/ripple`.
You will need to add them to one of the
[source lists](./Builds/CMake/RippledCore.cmake) in CMake.
Please write tests for your code.
If you create new test source files, they must go under `src/test`.
You will need to add them to one of the
[source lists](./Builds/CMake/RippledCore.cmake) in CMake.
If your test can be run offline, in under 60 seconds, then it can be an
automatic test run by `rippled --unittest`.
Otherwise, it must be a manual test.
The source must be formatted according to the style guide below.
Header includes must be [levelized](./Builds/levelization).
## Pull requests
In general, pull requests use `develop` as the base branch.
(Hotfixes are an exception.)
Changes to pull requests must be added as new commits.
Once code reviewers have started looking at your code, please avoid
force-pushing a branch in a pull request.
This preserves the ability for reviewers to filter changes since their last
review.
A pull request must obtain **approvals from at least two reviewers** before it
can be considered for merge by a Maintainer.
Maintainers retain discretion to require more approvals if they feel the
credibility of the existing approvals is insufficient.
Pull requests must be merged by [squash-and-merge][2]
to preserve a linear history for the `develop` branch.
# Major Changes
If your code change is a major feature, a breaking change or in some other way makes a significant alteration to the way the XRPL will operate, then you must first write an XLS document (XRP Ledger Standard) describing your change.
To do this:
1. Go to [XLS Standards](https://github.com/XRPLF/XRPL-Standards/discussions).
2. Choose the next available standard number.
3. Open a discussion with the appropriate title to propose your draft standard.
4. Link your XLS in your PR.
# Style guide
This is a non-exhaustive list of recommended style guidelines. These are
not always strictly enforced and serve as a way to keep the codebase
coherent rather than a set of _thou shalt not_ commandments.
This is a non-exhaustive list of recommended style guidelines. These are not always strictly enforced and serve as a way to keep the codebase coherent rather than a set of _thou shalt not_ commandments.
## Formatting
All code must conform to `clang-format` version 10,
according to the settings in [`.clang-format`](./.clang-format),
unless the result would be unreasonably difficult to read or maintain.
To demarcate lines that should be left as-is, surround them with comments like
this:
```
// clang-format off
...
// clang-format on
```
You can format individual files in place by running `clang-format -i <file>...`
from any directory within this project.
You can install a pre-commit hook to automatically run `clang-format` before every commit:
```
pip3 install pre-commit
pre-commit install
```
All code must conform to `clang-format` version 10, unless the result would be unreasonably difficult to read or maintain.
To change your code to conform use `clang-format -i <your changed files>`.
## Avoid
1. Proliferation of nearly identical code.
2. Proliferation of new files and classes.
3. Complex inheritance and complex OOP patterns.
4. Unmanaged memory allocation and raw pointers.
5. Macros and non-trivial templates (unless they add significant value).
6. Lambda patterns (unless these add significant value).
7. CPU or architecture-specific code unless there is a good reason to
include it, and where it is used, guard it with macros and provide
explanatory comments.
5. Macros and non-trivial templates (unless they add significant value.)
6. Lambda patterns (unless these add significant value.)
7. CPU or architecture-specific code unless there is a good reason to include it, and where it is used guard it with macros and provide explanatory comments.
8. Importing new libraries unless there is a very good reason to do so.
## Seek to
9. Extend functionality of existing code rather than creating new code.
10. Prefer readability over terseness where important logic is
concerned.
11. Inline functions that are not used or are not likely to be used
elsewhere in the codebase.
12. Use clear and self-explanatory names for functions, variables,
structs and classes.
13. Use TitleCase for classes, structs and filenames, camelCase for
function and variable names, lower case for namespaces and folders.
14. Provide as many comments as you feel that a competent programmer
would need to understand what your code does.
10. Prefer readability over terseness where important logic is concerned.
11. Inline functions that are not used or are not likely to be used elsewhere in the codebase.
12. Use clear and self-explanatory names for functions, variables, structs and classes.
13. Use TitleCase for classes, structs and filenames, camelCase for function and variable names, lower case for namespaces and folders.
14. Provide as many comments as you feel that a competent programmer would need to understand what your code does.
# Maintainers
Maintainers are ecosystem participants with elevated access to the repository. They are able to push new code, make decisions on when a release should be made, etc.
Maintainers are ecosystem participants with elevated access to the repository.
They are able to push new code, make decisions on when a release should be
made, etc.
## Code Review
New contributors' PRs must be reviewed by at least two of the maintainers. Well established prior contributors can be reviewed by a single maintainer.
## Adding and Removing
New maintainers can be proposed by two existing maintainers, subject to a vote by a quorum of the existing maintainers. A minimum of 50% support and a 50% participation is required. In the event of a tie vote, the addition of the new maintainer will be rejected.
## Adding and removing
Existing maintainers can resign, or be subject to a vote for removal at the behest of two existing maintainers. A minimum of 60% agreement and 50% participation are required. The XRP Ledger Foundation will have the ability, for cause, to remove an existing maintainer without a vote.
New maintainers can be proposed by two existing maintainers, subject to a vote
by a quorum of the existing maintainers.
A minimum of 50% support and a 50% participation is required.
In the event of a tie vote, the addition of the new maintainer will be
rejected.
Existing maintainers can resign, or be subject to a vote for removal at the
behest of two existing maintainers.
A minimum of 60% agreement and 50% participation are required.
The XRP Ledger Foundation will have the ability, for cause, to remove an
existing maintainer without a vote.
## Current Maintainers
* [Richard Holland](https://github.com/RichardAH) (XRPL Labs + XRP Ledger Foundation)
* [Denis Angell](https://github.com/dangell7) (XRPL Labs + XRP Ledger Foundation)
* [Wietse Wind](https://github.com/WietseWind) (XRPL Labs + XRP Ledger Foundation)
[1]: https://docs.github.com/en/get-started/quickstart/contributing-to-projects
[2]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/incorporating-changes-from-a-pull-request/about-pull-request-merges#squash-and-merge-your-commits
## Existing Maintainers
* [JoelKatz](https://github.com/JoelKatz) (Ripple)
* [Manojsdoshi](https://github.com/manojsdoshi) (Ripple)
* [N3tc4t](https://github.com/n3tc4t) (XRPL Labs)
* [Nikolaos D Bougalis](https://github.com/nbougalis) (Ripple)
* [Nixer89](https://github.com/nixer89) (XRP Ledger Foundation)
* [RichardAH](https://github.com/RichardAH) (XRPL Labs + XRP Ledger Foundation)
* [Seelabs](https://github.com/seelabs) (Ripple)
* [Silkjaer](https://github.com/Silkjaer) (XRP Ledger Foundation)
* [WietseWind](https://github.com/WietseWind) (XRPL Labs + XRP Ledger Foundation)
* [Ximinez](https://github.com/ximinez) (Ripple)

View File

@@ -2,7 +2,6 @@ ISC License
Copyright (c) 2011, Arthur Britto, David Schwartz, Jed McCaleb, Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant.
Copyright (c) 2012-2020, the XRP Ledger developers.
Copyright (c) 2020-2024, XRPL Labs.
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above

View File

@@ -1,71 +1,63 @@
# Xahau
# Hooks Public Testnet + Documentation
**Note:** Throughout this README, references to "we" or "our" pertain to the community and contributors involved in the Xahau network. It does not imply a legal entity or a specific collection of individuals.
Please see [Hooks Testnet V2](https://hooks-testnet-v2.xrpl-labs.com/) for faucet + documentation + explorer + builder.
[Xahau](https://xahau.network/) is a decentralized cryptographic ledger that builds upon the robust foundation of the XRP Ledger. It inherits the XRP Ledger's Byzantine Fault Tolerant consensus algorithm and enhances it with additional features and functionalities. Developers and users familiar with the XRP Ledger will find that most documentation and tutorials available on [xrpl.org](https://xrpl.org) are relevant and applicable to Xahau, including those related to running validators and managing validator keys. For Xahau specific documentation you can visit our [documentation](https://docs.xahau.network/)
# The XRP Ledger
## XAH
XAH is the public, counterparty-free asset native to Xahau and functions primarily as network gas. Transactions submitted to the Xahau network must supply an appropriate amount of XAH, to be burnt by the network as a fee, in order to be successfully included in a validated ledger. In addition, XAH also acts as a bridge currency within the Xahau DEX. XAH is traded on the open-market and is available for anyone to access. Xahau was created in 2023 with a supply of 600 million units of XAH.
The [XRP Ledger](https://xrpl.org/) is a decentralized cryptographic ledger powered by a network of peer-to-peer nodes. The XRP Ledger uses a novel Byzantine Fault Tolerant consensus algorithm to settle and record transactions in a secure distributed database without a central operator.
## xahaud
The server software that powers Xahau is called `xahaud` and is available in this repository under the permissive [ISC open-source license](LICENSE.md). The `xahaud` server software is written primarily in C++ and runs on a variety of platforms. The `xahaud` server software can run in several modes depending on its configuration.
## XRP
[XRP](https://xrpl.org/xrp.html) is a public, counterparty-free asset native to the XRP Ledger, and is designed to bridge the many different currencies in use worldwide. XRP is traded on the open-market and is available for anyone to access. The XRP Ledger was created in 2012 with a finite supply of 100 billion units of XRP. Its creators gifted 80 billion XRP to a company, now called [Ripple](https://ripple.com/), to develop the XRP Ledger and its ecosystem. Ripple uses XRP to help build the Internet of Value, ushering in a world in which money moves as fast and efficiently as information does today.
## rippled
The server software that powers the XRP Ledger is called `rippled` and is available in this repository under the permissive [ISC open-source license](LICENSE.md). The `rippled` server software is written primarily in C++ and runs on a variety of platforms. The `rippled` server software can run in several modes depending on its [configuration](https://xrpl.org/rippled-server-modes.html).
### Build from Source
* [Read the build instructions in our documentation](https://docs.xahau.network/infrastructure/building-xahau)
* If you encounter any issues, please [open an issue](https://github.com/xahau/xahaud/issues)
* [Linux](Builds/linux/README.md)
* [Mac](Builds/macos/README.md) (Not recommended for production)
* [Windows](Builds/VisualStudio2017/README.md) (Not recommended for production)
## Highlights of Xahau
## Key Features of the XRP Ledger
1. **Hooks**: Hooks are small, efficient WebAssembly modules designed specifically for Xahau. They add a robust smart contract functionality to Xahau, allowing you to construct and deploy applications with bespoke functionalities. Hooks can block or allow transactions to and from the account, change and keep track of the hooks internal state and logic, and autonomously initiate new transactions on the accounts behalf. They can be written in any language that can be compiled into WebAssembly.
- **[Censorship-Resistant Transaction Processing][]:** No single party decides which transactions succeed or fail, and no one can "roll back" a transaction after it completes. As long as those who choose to participate in the network keep it healthy, they can settle transactions in seconds.
- **[Fast, Efficient Consensus Algorithm][]:** The XRP Ledger's consensus algorithm settles transactions in 4 to 5 seconds, processing at a throughput of up to 1500 transactions per second. These properties put XRP at least an order of magnitude ahead of other top digital assets.
- **[Finite XRP Supply][]:** When the XRP Ledger began, 100 billion XRP were created, and no more XRP will ever be created. The available supply of XRP decreases slowly over time as small amounts are destroyed to pay transaction costs.
- **[Responsible Software Governance][]:** A team of full-time, world-class developers at Ripple maintain and continually improve the XRP Ledger's underlying software with contributions from the open-source community. Ripple acts as a steward for the technology and an advocate for its interests, and builds constructive relationships with governments and financial institutions worldwide.
- **[Secure, Adaptable Cryptography][]:** The XRP Ledger relies on industry standard digital signature systems like ECDSA (the same scheme used by Bitcoin) but also supports modern, efficient algorithms like Ed25519. The extensible nature of the XRP Ledger's software makes it possible to add and disable algorithms as the state of the art in cryptography advances.
- **[Modern Features for Smart Contracts][]:** Features like Escrow, Checks, and Payment Channels support cutting-edge financial applications including the [Interledger Protocol](https://interledger.org/). This toolbox of advanced features comes with safety features like a process for amending the network and separate checks against invariant constraints.
- **[On-Ledger Decentralized Exchange][]:** In addition to all the features that make XRP useful on its own, the XRP Ledger also has a fully-functional accounting system for tracking and trading obligations denominated in any way users want, and an exchange built into the protocol. The XRP Ledger can settle long, cross-currency payment paths and exchanges of multiple currencies in atomic transactions, bridging gaps of trust with XRP.
2. **Balance Rewards**: Xahau offers a Balance Rewards feature that provides a 4% per annum reward. This feature encourages users to maintain a balance in their accounts and rewards them for doing so.
[Censorship-Resistant Transaction Processing]: https://xrpl.org/xrp-ledger-overview.html#censorship-resistant-transaction-processing
[Fast, Efficient Consensus Algorithm]: https://xrpl.org/xrp-ledger-overview.html#fast-efficient-consensus-algorithm
[Finite XRP Supply]: https://xrpl.org/xrp-ledger-overview.html#finite-xrp-supply
[Responsible Software Governance]: https://xrpl.org/xrp-ledger-overview.html#responsible-software-governance
[Secure, Adaptable Cryptography]: https://xrpl.org/xrp-ledger-overview.html#secure-adaptable-cryptography
[Modern Features for Smart Contracts]: https://xrpl.org/xrp-ledger-overview.html#modern-features-for-smart-contracts
[On-Ledger Decentralized Exchange]: https://xrpl.org/xrp-ledger-overview.html#on-ledger-decentralized-exchange
3. **URIToken**: The URIToken is a feature in Xahau that allows for the creation and management of non fungible tokens within the network. This feature can be used for a variety of purposes and specific use cases.
4. **Import/B2M**: The Import/B2M feature in Xahau allows for the importation of assets into the network. This feature can be used to bring external assets into the Xahau network, expanding the range of assets that can be managed and traded within the network.
5. **Governance Game**: The Governance Game is a feature in Xahau that allows for the decentralized governance of the network. This feature allows users to participate in the decision-making process of the network, ensuring that the network remains democratic and responsive to the needs of its users.
## Binary Releases and Versioning System
Xahau provides pre-compiled binary releases of its software, which are ready-to-run versions that users can download and execute without compiling the source code themselves. These binaries are built automatically using GitHub Actions whenever a new commit is pushed or a pull request is merged.
The versioning system for Xahau binaries is based on the date of the build, the branch name, and a build number, following the format `YYYY.MM.DD-branch+buildnumber`. For example, `2023.10.30-release+443` indicates a binary built on October 30, 2023, from the `release` branch, and it is the 443rd build from that branch.
Users can access these binaries on the [Xahau Build Server](https://build.xahau.tech/), which provides an organized list of releases along with release notes for each version. This system simplifies the deployment process for users and ensures they can easily identify and download the appropriate version for their needs.
## Source Code
Here are some good places to start learning the source code:
- Read the markdown files in the source tree: `src/ripple/**/*.md`.
- Read [the levelization document](./Builds/levelization) to get an idea of the internal dependency graph.
- In the big picture, the `main` function constructs an `ApplicationImp` object, which implements the `Application` virtual interface. Almost every component in the application takes an `Application&` parameter in its constructor, typically named `app` and stored as a member variable `app_`. This allows most components to depend on any other component.
[![travis-ci.com: Build Status](https://travis-ci.com/ripple/rippled.svg?branch=develop)](https://travis-ci.com/ripple/rippled)
[![codecov.io: Code Coverage](https://codecov.io/gh/ripple/rippled/branch/develop/graph/badge.svg)](https://codecov.io/gh/ripple/rippled)
### Repository Contents
| Folder | Contents |
|:-----------|:-------------------------------------------------|
| `./Builds` | Platform-specific guides for building `xahaud`. |
| `./bin` | Scripts and data files for Ripple integrators. |
| `./Builds` | Platform-specific guides for building `rippled`. |
| `./docs` | Source documentation files and doxygen config. |
| `./cfg` | Example configuration files. |
| `./src` | Source code. |
Some of the directories under `src` are external repositories included using
git-subtree. See those directories' README files for more details.
## Resources
- **Documentation**: Documentation for XRPL, Xahau and Hooks.
- [Xrpl Documentation](https://xrpl.org)
- [Xahau Documentation](https://docs.xahau.network/)
- [Hooks Technical Documentation](https://xrpl-hooks.readme.io/)
- **Explorers**: Explore the Xahau ledger using various explorers:
- [xahauexplorer.com](https://xahauexplorer.com)
- [xahscan.com](https://xahscan.com)
- [xahau.xrpl.org](https://xahau.xrpl.org)
- [explorer.xahau.network](https://explorer.xahau.network)
- **Testnet & Faucet**: Test applications and obtain test XAH at [xahau-test.net](https://xahau-test.net) and use the testnet explorer at [explorer.xahau.network](https://explorer.xahau.network).
- **Supporting Wallets**: A list of wallets that support XAH and Xahau-based assets.
- [Xaman](https://xaman.app)
- [Crossmark](https://crossmark.io)
## See Also
* [XRP Ledger Dev Portal](https://xrpl.org/)
* [Setup and Installation](https://xrpl.org/install-rippled.html)
* [Source Documentation (Doxygen)](https://xrplf.github.io/rippled/)
* [Learn more about the XRP Ledger (YouTube)](https://www.youtube.com/playlist?list=PLJQ55Tj1hIVZtJ_JdTvSum2qMTsedWkNi)

View File

@@ -1,74 +0,0 @@
# Release Notes
This document contains the release notes for `xahaud`, the reference server implementation of the Xahau protocol. To learn more about how to build, run or update a `xahaud` server, visit https://docs.xahau.network/infrastructure/peering/connect-to-xahau-mainnet
Have new ideas? Need help with setting up your node? [Please open an issue here](https://github.com/xahau/xahaud/issues/new/choose).
# Introducing Xahau version 2023.10.30-release+443
Version 2023.10.30-release+443 of `xahaud`, the reference server implementation of the Xahau protocol, is now available at [Build Server](https://build.xahau.tech/).
[Download Release Binary](https://build.xahau.tech/2023.10.30-release%2B443)
[Sign Up for Future Release Announcements](https://groups.google.com/g/xahau-server)
<!-- BREAK -->
## Action Required
New amendments are now open for voting according to Xahau's [amendment process](https://docs.xahau.network/features/amendments), which enables protocol changes following five days of >80% support from trusted validators.
If you operate a Xahau server, upgrade to version 2023.10.30-release+443 by October 31 to ensure service continuity. The exact time that protocol changes take effect depends on the voting decisions of the decentralized network.
## Install / Upgrade
On supported platforms, see the [instructions on installing or updating `xahaud`](https://docs.xahau.network/infrastructure/peering/connect-to-xahau-mainnet).
## New Amendments
- **`Hooks`**: This amendment activates hooks and the hook API in the Xahau network, allowing custom logic to be executed on the ledger in response to transactions.
- **`BalanceRewards`**: This amendment enables `ClaimReward` and `GenesisMint` transactions, facilitating balance rewards to be paid in XAH, the Xahau network's native currency.
- **`PaychanAndEscrowForTokens`**: This amendment allows the use of IOU tokens for PaymentChannels and Escrow transactions, enhancing flexibility and functionality.
- **`URIToken`**: This amendment activates URITokens, which are non-fungible, hook-friendly tokens in the Xahau network.
- **`Import`**: This amendment enables the Import transaction for B2M xpop processing, allowing transactions to be imported from another network or system.
- **`XahauGenesis`**: This amendment activates the genesis amendment for the initial distribution of XAH and the establishment of the governance game.
- **`HooksUpdate1`**: This amendment extends the hooks API to include Xpop functionality, enabling more complex transactions involving Xpop.
## Changelog
### New Features and Improvements
- **Server Definitions**: The new feature introduces a `server_definitions` endpoint. This endpoint is designed to return a JSON object that contains the definitions of various types, fields, and transaction results used in the Xahau protocol.
This feature enhances the functionality of the system by providing an efficient way to fetch and verify the current definitions used in the Ripple protocol.
### GitHub
The public source code repository for `xahaud` is hosted on GitHub at <https://github.com/xahau/xahaud>.
We welcome all contributions and invite everyone to join the community of Xahau developers to help build the Internet of Value.
### Credits
The following people contributed directly to this release:
- Nikolaos D. Bougalis <nikb@bougalis.net>
- Wietse Wind <wietse@xrpl-labs.com>
- Richard Holland <richard@xrpl-labs.com>
- Denis Angell <denis@xrpl-labs.com>
Bug Bounties and Responsible Disclosures:
We welcome reviews of the rippled code and urge researchers to
responsibly disclose any issues they may find.
To report a bug, please send a detailed report to:
bugs@xahau.network

View File

@@ -5,255 +5,7 @@
This document contains the release notes for `rippled`, the reference server implementation of the XRP Ledger protocol. To learn more about how to build, run or update a `rippled` server, visit https://xrpl.org/install-rippled.html
Have new ideas? Need help with setting up your node? [Please open an issue here](https://github.com/xrplf/rippled/issues/new/choose).
# Introducing XRP Ledger version 1.10.1
Version 1.10.1 of `rippled`, the reference server implementation of the XRP Ledger protocol, is now available. This release restores packages for Ubuntu 18.04.
Compared to version 1.10.0, the only C++ code change fixes an edge case in Reporting Mode.
If you are already running version 1.10.0, then upgrading to version 1.10.1 is generally not required.
[Sign Up for Future Release Announcements](https://groups.google.com/g/ripple-server)
<!-- BREAK -->
## Install / Upgrade
On supported platforms, see the [instructions on installing or updating `rippled`](https://xrpl.org/install-rippled.html).
## Changelog
- [`da18c86cbf`](https://github.com/ripple/rippled/commit/da18c86cbfea1d8fe6940035f9103e15890d47ce) Build packages with Ubuntu 18.04
- [`f7b3ddd87b`](https://github.com/ripple/rippled/commit/f7b3ddd87b8ef093a06ab1420bea57ed1e77643a) Reporting Mode: Do not attempt to acquire missing data from peer network (#4458)
### GitHub
The public source code repository for `rippled` is hosted on GitHub at <https://github.com/XRPLF/rippled>.
We welcome all contributions and invite everyone to join the community of XRP Ledger developers to help build the Internet of Value.
### Credits
The following people contributed directly to this release:
- John Freeman <jfreeman08@gmail.com>
- Mark Travis <mtrippled@users.noreply.github.com>
- Michael Legleux <mlegleux@ripple.com>
Bug Bounties and Responsible Disclosures:
We welcome reviews of the rippled code and urge researchers to
responsibly disclose any issues they may find.
To report a bug, please send a detailed report to:
bugs@xrpl.org
# Introducing XRP Ledger version 1.10.0
Version 1.10.0 of `rippled`, the reference server implementation of the XRP Ledger protocol, is now available. This release introduces six new amendments, detailed below, and cleans up code to improve performance.
[Sign Up for Future Release Announcements](https://groups.google.com/g/ripple-server)
<!-- BREAK -->
## Action Required
Six new amendments are now open for voting according to the XRP Ledger's [amendment process](https://xrpl.org/amendments.html), which enables protocol changes following two weeks of >80% support from trusted validators.
If you operate an XRP Ledger server, upgrade to version 1.10.0 by March 21 to ensure service continuity. The exact time that protocol changes take effect depends on the voting decisions of the decentralized network.
## Install / Upgrade
On supported platforms, see the [instructions on installing or updating `rippled`](https://xrpl.org/install-rippled.html).
## New Amendments
- **`featureImmediateOfferKilled`**: Changes the response code of an `OfferCreate` transaction with the `tfImmediateOrCancel` flag to return `tecKILLED` when no funds are moved. The previous return code of `tecSUCCESS` was unintuitive. [#4157](https://github.com/XRPLF/rippled/pull/4157)
- **`featureDisallowIncoming`**: Enables an account to block incoming checks, payment channels, NFToken offers, and trust lines. [#4336](https://github.com/XRPLF/rippled/pull/4336)
- **`featureXRPFees`**: Simplifies transaction cost calculations to use XRP directly, rather than calculating indirectly in "fee units" and translating the results to XRP. Updates all instances of "fee units" in the protocol and ledger data to be drops of XRP instead. [#4247](https://github.com/XRPLF/rippled/pull/4247)
- **`fixUniversalNumber`**: Simplifies and unifies the code for decimal floating point math. In some cases, this provides slightly better accuracy than the previous code, resulting in calculations whose least significant digits are different than when calculated with the previous code. The different results may cause other edge case differences where precise calculations are used, such as ranking of offers or processing of payments that use several different paths. [#4192](https://github.com/XRPLF/rippled/pull/4192)
- **`fixNonFungibleTokensV1_2`**: This amendment is a combination of NFToken fixes. [#4417](https://github.com/XRPLF/rippled/pull/4417)
- Fixes unburnable NFTokens when it has over 500 offers. [#4346](https://github.com/XRPLF/rippled/pull/4346)
- Fixes 3 NFToken offer acceptance issues. [#4380](https://github.com/XRPLF/rippled/pull/4380)
- Prevents brokered sales of NFTokens to owners. [#4403](https://github.com/XRPLF/rippled/pull/4403)
- Only allows the destination to settle NFToken offers through brokerage. [#4399](https://github.com/XRPLF/rippled/pull/4399)
- **`fixTrustLinesToSelf`**: Trust lines must be between two different accounts, but two exceptions exist because of a bug that briefly existed. This amendment removes those trust lines. [69bb2be](https://github.com/XRPLF/rippled/pull/4270/commits/69bb2be446e3cc24c694c0835b48bd2ecd3d119e)
## Changelog
### New Features and Improvements
- **Improve Handshake in the peer protocol**: Switched to using a cryptographically secure PRNG for the Instance Cookie. `rippled` now uses hex encoding for the `Closed-Ledger` and `Previous-Ledger` fields in the Handshake. Also added `--newnodeid` and `--nodeid` command line options. [5a15229](https://github.com/XRPLF/rippled/pull/4270/commits/5a15229eeb13b69c8adf1f653b88a8f8b9480546)
- **RPC tooBusy response now has 503 HTTP status code**: Added ripplerpc 3.0, enabling RPC tooBusy responses to return relevant HTTP status codes. This is a non-breaking change that only applies to JSON-RPC when you include `"ripplerpc": "3.0"` in the request. [#4143](https://github.com/XRPLF/rippled/pull/4143)
- **Use the Conan package manager**: Added a `conanfile.py` and Conan recipe for Snappy. Removed the RocksDB recipe from the repo; you can now get it from Conan Center. [#4367](https://github.com/XRPLF/rippled/pull/4367), [c2b03fe](https://github.com/XRPLF/rippled/commit/c2b03fecca19a304b37467b01fa78593d3dce3fb)
- **Update Build Instructions**: Updated the build instructions to build with the Conan package manager and restructured info for easier comprehension. [#4376](https://github.com/XRPLF/rippled/pull/4376), [#4383](https://github.com/XRPLF/rippled/pull/4383)
- **Revise CONTRIBUTING**: Updated code contribution guidelines. `rippled` is an open source project and contributions are very welcome. [#4382](https://github.com/XRPLF/rippled/pull/4382)
- **Update documented pathfinding configuration defaults**: `417cfc2` changed the default Path Finding configuration values, but missed updating the values documented in rippled-example.cfg. Updated those defaults and added recommended values for nodes that want to support advanced pathfinding. [#4409](https://github.com/XRPLF/rippled/pull/4409)
- **Remove gRPC code previously used for the Xpring SDK**: Removed gRPC code used for the Xpring SDK. The gRPC API is also enabled locally by default in `rippled-example.cfg`. This API is used for [Reporting Mode](https://xrpl.org/build-run-rippled-in-reporting-mode.html) and [Clio](https://github.com/XRPLF/clio). [28f4cc7](https://github.com/XRPLF/rippled/pull/4321/commits/28f4cc7817c2e477f0d7e9ade8f07a45ff2b81f1)
- **Switch from C++17 to C++20**: Updated `rippled` to use C++20. [92d35e5](https://github.com/XRPLF/rippled/pull/4270/commits/92d35e54c7de6bbe44ff6c7c52cc0765b3f78258)
- **Support for Boost 1.80.0:**: [04ef885](https://github.com/XRPLF/rippled/pull/4321/commits/04ef8851081f6ee9176783ad3725960b8a931ebb)
- **Reduce default reserves to 10/2**: Updated the hard-coded default reserves to match the current settings on Mainnet. [#4329](https://github.com/XRPLF/rippled/pull/4329)
- **Improve self-signed certificate generation**: Improved speed and security of TLS certificate generation on fresh startup. [0ecfc7c](https://github.com/XRPLF/rippled/pull/4270/commits/0ecfc7cb1a958b731e5f184876ea89ae2d4214ee)
### Bug Fixes
- **Update command-line usage help message**: Added `manifest` and `validator_info` to the `rippled` CLI usage statement. [b88ed5a](https://github.com/XRPLF/rippled/pull/4270/commits/b88ed5a8ec2a0735031ca23dc6569d54787dc2f2)
- **Work around gdb bug by changing a template parameter**: Added a workaround for a bug in gdb, where unsigned template parameters caused issues with RTTI. [#4332](https://github.com/XRPLF/rippled/pull/4332)
- **Fix clang 15 warnings**: [#4325](https://github.com/XRPLF/rippled/pull/4325)
- **Catch transaction deserialization error in doLedgerGrpc**: Fixed an issue in the gRPC API, so `Clio` can extract ledger headers and state objects from specific transactions that can't be deserialized by `rippled` code. [#4323](https://github.com/XRPLF/rippled/pull/4323)
- **Update dependency: gRPC**: New Conan recipes broke the old version of gRPC, so the dependency was updated. [#4407](https://github.com/XRPLF/rippled/pull/4407)
- **Fix Doxygen workflow**: Added options to build documentation that don't depend on the library dependencies of `rippled`. [#4372](https://github.com/XRPLF/rippled/pull/4372)
- **Don't try to read SLE with key 0 from the ledger**: Fixed the `preclaim` function to check for 0 in `NFTokenSellOffer` and `NFTokenBuyOffer` before calling `Ledger::read`. This issue only affected debug builds. [#4351](https://github.com/XRPLF/rippled/pull/4351)
- **Update broken link to hosted Doxygen content**: [5e1cb09](https://github.com/XRPLF/rippled/pull/4270/commits/5e1cb09b8892e650f6c34a66521b6b1673bd6b65)
### Code Cleanup
- **Prevent unnecessary `shared_ptr` copies by accepting a value in `SHAMapInnerNode::setChild`**: [#4266](https://github.com/XRPLF/rippled/pull/4266)
- **Release TaggedCache object memory outside the lock**: [3726f8b](https://github.com/XRPLF/rippled/pull/4321/commits/3726f8bf31b3eab8bab39dce139656fd705ae9a0)
- **Rename SHAMapStoreImp::stopping() to healthWait()**: [7e9e910](https://github.com/XRPLF/rippled/pull/4321/commits/7e9e9104eabbf0391a0837de5630af17a788e233)
- **Improve wrapper around OpenSSL RAND**: [7b3507b](https://github.com/XRPLF/rippled/pull/4270/commits/7b3507bb873495a974db33c57a888221ddabcacc)
- **Improve AccountID string conversion caching**: Improved memory cache usage. [e2eed96](https://github.com/XRPLF/rippled/pull/4270/commits/e2eed966b0ecb6445027e6a023b48d702c5f4832)
- **Build the command map at compile time**: [9aaa0df](https://github.com/XRPLF/rippled/pull/4270/commits/9aaa0dff5fd422e5f6880df8e20a1fd5ad3b4424)
- **Avoid unnecessary copying and dynamic memory allocations**: [d318ab6](https://github.com/XRPLF/rippled/pull/4270/commits/d318ab612adc86f1fd8527a50af232f377ca89ef)
- **Use constexpr to check memo validity**: [e67f905](https://github.com/XRPLF/rippled/pull/4270/commits/e67f90588a9050162881389d7e7d1d0fb31066b0)
- **Remove charUnHex**: [83ac141](https://github.com/XRPLF/rippled/pull/4270/commits/83ac141f656b1a95b5661853951ebd95b3ffba99)
- **Remove deprecated AccountTxOld.cpp**: [ce64f7a](https://github.com/XRPLF/rippled/pull/4270/commits/ce64f7a90f99c6b5e68d3c3d913443023de061a6)
- **Remove const_cast usage**: [23ce431](https://github.com/XRPLF/rippled/pull/4321/commits/23ce4318768b718c82e01004d23f1abc9a9549ff)
- **Remove inaccessible code paths and outdated data format wchar_t**: [95fabd5](https://github.com/XRPLF/rippled/pull/4321/commits/95fabd5762a4917753c06268192e4d4e4baef8e4)
- **Improve move semantics in Expected**: [#4326](https://github.com/XRPLF/rippled/pull/4326)
### GitHub
The public source code repository for `rippled` is hosted on GitHub at <https://github.com/XRPLF/rippled>.
We welcome all contributions and invite everyone to join the community of XRP Ledger developers to help build the Internet of Value.
### Credits
The following people contributed directly to this release:
- Alexander Kremer <akremer@ripple.com>
- Alloy Networks <45832257+alloynetworks@users.noreply.github.com>
- CJ Cobb <46455409+cjcobb23@users.noreply.github.com>
- Chenna Keshava B S <ckbs.keshava56@gmail.com>
- Crypto Brad Garlinghouse <cryptobradgarlinghouse@protonmail.com>
- Denis Angell <dangell@transia.co>
- Ed Hennis <ed@ripple.com>
- Elliot Lee <github.public@intelliot.com>
- Gregory Popovitch <greg7mdp@gmail.com>
- Howard Hinnant <howard.hinnant@gmail.com>
- J. Scott Branson <18340247+crypticrabbit@users.noreply.github.com>
- John Freeman <jfreeman08@gmail.com>
- ledhed2222 <ledhed2222@users.noreply.github.com>
- Levin Winter <33220502+levinwinter@users.noreply.github.com>
- manojsdoshi <mdoshi@ripple.com>
- Nik Bougalis <nikb@bougalis.net>
- RichardAH <richard.holland@starstone.co.nz>
- Scott Determan <scott.determan@yahoo.com>
- Scott Schurr <scott@ripple.com>
- Shawn Xie <35279399+shawnxie999@users.noreply.github.com>
Security Bug Bounty Acknowledgements:
- Aaron Hook
- Levin Winter
Bug Bounties and Responsible Disclosures:
We welcome reviews of the rippled code and urge researchers to
responsibly disclose any issues they may find.
To report a bug, please send a detailed report to:
bugs@xrpl.org
# Introducing XRP Ledger version 1.9.4
Version 1.9.4 of `rippled`, the reference implementation of the XRP Ledger protocol is now available. This release introduces an amendment that removes the ability for an NFT issuer to indicate that trust lines should be automatically created for royalty payments from secondary sales of NFTs, in response to a bug report that indicated how this functionality could be abused to mount a denial of service attack against the issuer.
## Action Required
This release introduces a new amendment to the XRP Ledger protocol, **`fixRemoveNFTokenAutoTrustLine`** to mitigate a potential denial-of-service attack against NFT issuers that minted NFTs and allowed secondary trading of those NFTs to create trust lines for any asset.
This amendment is open for voting according to the XRP Ledger's [amendment process](https://xrpl.org/amendments.html), which enables protocol changes following two weeks of >80% support from trusted validators.
If you operate an XRP Ledger server, then you should upgrade to version 1.9.4 within two weeks, to ensure service continuity. The exact time that protocol changes take effect depends on the voting decisions of the decentralized network.
For more information about NFTs on the XRP Ledger, see [NFT Conceptual Overview](https://xrpl.org/nft-conceptual-overview.html).
## Install / Upgrade
On supported platforms, see the [instructions on installing or updating `rippled`](https://xrpl.org/install-rippled.html).
## Changelog
## Contributions
The primary change in this release is the following bug fix:
- **Introduce fixRemoveNFTokenAutoTrustLine amendment**: Introduces the `fixRemoveNFTokenAutoTrustLine` amendment, which disables the `tfTrustLine` flag, which a malicious attacker could exploit to mount denial-of-service attacks against NFT issuers that specified the flag on their NFTs. ([#4301](https://github.com/XRPLF/rippled/4301))
### GitHub
The public source code repository for `rippled` is hosted on GitHub at <https://github.com/XRPLF/rippled>.
We welcome all contributions and invite everyone to join the community of XRP Ledger developers and help us build the Internet of Value.
### Credits
The following people contributed directly to this release:
- Scott Schurr <scott@ripple.com>
- Howard Hinnant <howard@ripple.com>
- Scott Determan <scott.determan@yahoo.com>
- Ikko Ashimine <eltociear@gmail.com>
Have new ideas? Need help with setting up your node? Come visit us [here](https://github.com/xrplf/rippled/issues/new/choose)
# Introducing XRP Ledger version 1.9.3
@@ -267,7 +19,7 @@ On supported platforms, see the [instructions on installing or updating `rippled
## Contributions
This release contains the following bug fixes:
This releases contains the following bug fixes:
- **Change by-value to by-reference to persist vote**: A minor technical flaw, caused by use of a copy instead of a reference, resulted in operator-configured "yes" votes to not be properly loaded after a restart. ([#4256](https://github.com/XRPLF/rippled/pull/4256))
- **Properly handle self-assignment of PublicKey**: The `PublicKey` copy assignment operator mishandled the case where a `PublicKey` would be assigned to itself, and could result in undefined behavior.

View File

@@ -1,74 +1,149 @@
### Operating the Xahau server securely
### Operating an XRP Ledger server securely
For more details on operating the Xahau server securely, please visit https://docs.xahau.network/infrastructure/building-xahau.
For more details on operating an XRP Ledger server securely, please visit https://xrpl.org/manage-the-rippled-server.html.
# Security Policy
## Supported Versions
Software constantly evolves. In order to focus resources, we only generally only accept vulnerability reports that affect recent and current versions of the software. We always accept reports for issues present in the **release**, **candidate** or **dev** branches, and with proposed, [open pull requests](https://github.com/xahau/xahaud/pulls).
Software constantly evolves. In order to focus resources, we only generally only accept vulnerability reports that affect recent and current versions of the software. We always accept reports for issues present in the **master**, **release** or **develop** branches, and with proposed, [open pull requests](https://github.com/ripple/rippled/pulls).
# Responsible Disclosure
## Identifying and Reporting Vulnerabilities
## Responsible disclosure policy
We take security seriously and we do our best to ensure that all our releases are bug free. But we aren't perfect and sometimes things will slip through.
At [Xahau](https://xahau.network) we believe that the security of our systems is extremely important.
### Responsible Investigation
Despite our concern for the security of our systems during product development and maintenance, there's always the possibility of someone finding something we need to improve / update / change / fix / ...
We urge you to examine our code carefully and responsibly, and to disclose any issues that you identify in a responsible fashion.
We appreciate you notifying us if you found a weak point in one of our systems as soon as possible so that we can take measures immediately to protect our customers and their data.
Responsible investigation includes, but isn't limited to, the following:
## How to report
- Not performing tests on the main network. If testing is necessary, use the [Testnet or Devnet](https://xrpl.org/xrp-testnet-faucet.html).
- Not targeting physical security measures, or attempting to use social engineering, spam, distributed denial of service (DDOS) attacks, etc.
- Investigating bugs in a way that makes a reasonable, good faith effort not to be disruptive or harmful to the XRP Ledger and the broader ecosystem.
If you believe you found a security issue in one of our systems, please notify us as soon as possible by [send an email to bugs@xahau.network](mailto:bugs@xahau.network).
### Responsible Disclosure
## Rules
If you discover a vulnerability or potential threat, or if you _think_
you have, please reach out by dropping an email using the contact
information below.
This responsible disclosure policy is not an open invitation to actively scan our network and applications for vulnerabilities. Our continuous monitoring will likely detect your scan and these will be investigated.
Your report should include the following:
### We ask you to:
- Your contact information (typically, an email address);
- The description of the vulnerability;
- The attack scenario (if any);
- The steps to reproduce the vulnerability;
- Any other relevant details or artifacts, including code, scripts or patches.
- Not share information about the security issue with others until the problem is resolved and to immediately delete any confidential data acquired
- Not further abuse the problem, for example, by downloading more data than is necessary in order to demonstrate the leak or to view, delete or amend the data of third parties
- Provide detailed information in order for us to reproduce, validate and resolve the problem as quickly as possible. Include your test data, timestamps and URL(s) of the system(s) involved
- Leave your contact details (e-mail address and/or phone number) so that we may contact you about the progress of the solution. We do accept anonymous reports
- Do not use attacks on physical security, social engineering, distributed denial of service, spam or applications of third parties
In your mail, please describe of the issue or the potential threat; if possible, please include a "repro" (code that can reproduce the issue) or describe the best way to reproduce and replicate the issue. Please make your report as extensive as possible.
## Responsible Disclosure procedure(s)
For more information on responsible disclosure, please read this [Wikipedia article](https://en.wikipedia.org/wiki/Responsible_disclosure).
### When you report a security issue, we will act according to the following:
## Report Handling Process
- You will receive a confirmation of receipt from us within 4 working days after the report was made
- You will receive a response with the assessment of the security issue and an expected date of resolution within 4 working days after the confirmation of receipt was sent
- We will take no legal steps against you in relation to the report if you have kept to the conditions as set out above
- We will handle your report confidentially and we will not share your details with third parties without your permission, unless that is necessary in order to fulfil a legal obligation
Please report the bug directly to us and limit further disclosure. If you want to prove that you knew the bug as of a given time, consider using a cryptographic precommitment: hash the content of your report and publish the hash on a medium of your choice (e.g. on Twitter or as a memo in a transaction) as "proof" that you had written the text at a given point in time.
### This responsible disclosure scheme is not intended for:
Once we receive a report, we:
- Complaints
- Website unavailable reports
- Phishing reports
- Fraud reports
1. Assign two people to independently evaluate the report;
2. Consider their recommendations;
3. If action is necessary, formulate a plan to address the issue;
4. Communicate privately with the reporter to explain our plan.
5. Prepare, test and release a version which fixes the issue; and
6. Announce the vulnerability publicly.
For these complaints or reports, please [contact our support team](mailto:bugs@xahau.network).
We will triage and respond to your disclosure within 24 hours. Beyond that, we will work to analyze the issue in more detail, formulate, develop and test a fix.
## Bug bounty program
While we commit to responding with 24 hours of your initial report with our triage assessment, we cannot guarantee a response time for the remaining steps. We will communicate with you throughout this process, letting you know where we are and keeping you updated on the timeframe.
[Xahau](https://xahau.network) encourages the reporting of security issues or vulnerabilities. We may make an appropriate reward for confidential disclosure of any design or implementation issue that could be used to compromise the confidentiality or integrity of our users' data that was not yet known to us. We decide whether the report is eligible and the amount of the reward.
## Bug Bounty Program
## Exclusions
[Ripple](https://ripple.com) is generously sponsoring a bug bounty program for vulnerabilities in [`rippled`](https://github.com/ripple/rippled) (and other related projects, like [`ripple-lib`](https://github.com/ripple/ripple-lib)).
### The following type of security problems are excluded
This program allows us to recognize and reward individuals or groups that identify and report bugs. In summary, order to qualify for a bounty, the bug must be:
- (D)DOS attacks
- Error messages or error pages without sensitive data
- Tests & sample data as publicly available in our repositories at Github
- Common issues like browser header warnings or DNS configuration, identified by vulnerability scans
- Vulnerability scan reports for software we publicly use
- Security issues related to outdated OS's, browsers or plugins
- Reports for security problems that we have been notified of before
1. **In scope**. Only bugs in software under the scope of the program qualify. Currently, that means `rippled` and `ripple-lib`.
2. **Relevant**. A security issue, posing a danger to user funds, privacy or the operation of the XRP Ledger.
3. **Original and previously unknown**. Bugs that are already known and discussed in public do not qualify. Previously reported bugs, even if publicly unknown, are not eligible.
4. **Specific**. We welcome general security advice or recommendations, but we cannot pay bounties for that.
5. **Fixable**. There has to be something we can do to permanently fix the problem. Note that bugs in other peoples software may still qualify in some cases. For example, if you find a bug in a library that we use which can compromises the security of software that is in scope and we can get it fixed, you may qualify for a bounty.
6. **Unused**. If you use the exploit to attack the XRP Ledger, you do not qualify for a bounty. If you report a vulnerability used in an ongoing or past attack and there is specific, concrete evidence that suggests you are the attacker we reserve the right not to pay a bounty.
Please note: Reports that are lacking any proof (such as screenshots or other data), detailed information or details on how to reproduce any unexpected result will be investigated but will not be eligible for any reward.
The amount paid varies dramatically. Vulnerabilities that are harmless on their own, but could form part of a critical exploit will usually receive a bounty. Full-blown exploits can receive much higher bounties. Please dont hold back partial vulnerabilities while trying to construct a full-blown exploit. We will pay a bounty to anyone who reports a complete chain of vulnerabilities even if they have reported each component of the exploit separately and those vulnerabilities have been fixed in the meantime. However, to qualify for a the full bounty, you must to have been the first to report each of the partial exploits.
This policy is based on the National Cyber Security Centres Responsible Disclosure Guidelines and an [example by Floor Terra](https://responsibledisclosure.nl).
### Contacting Us
To report a qualifying bug, please send a detailed report to:
|Email Address|bugs@ripple.com |
|:-----------:|:----------------------------------------------------|
|Short Key ID | `0xC57929BE` |
|Long Key ID | `0xCD49A0AFC57929BE` |
|Fingerprint | `24E6 3B02 37E0 FA9C 5E96 8974 CD49 A0AF C579 29BE` |
The full PGP key for this address, which is also available on several key servers (e.g. on [keys.gnupg.net](https://keys.gnupg.net)), is:
```
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQINBFUwGHYBEAC0wpGpBPkd8W1UdQjg9+cEFzeIEJRaoZoeuJD8mofwI5Ejnjdt
kCpUYEDal0ygkKobu8SzOoATcDl18iCrScX39VpTm96vISFZMhmOryYCIp4QLJNN
4HKc2ZdBj6W4igNi6vj5Qo6JMyGpLY2mz4CZskbt0TNuUxWrGood+UrCzpY8x7/N
a93fcvNw+prgCr0rCH3hAPmAFfsOBbtGzNnmq7xf3jg5r4Z4sDiNIF1X1y53DAfV
rWDx49IKsuCEJfPMp1MnBSvDvLaQ2hKXs+cOpx1BCZgHn3skouEUxxgqbtTzBLt1
xXpmuijsaltWngPnGO7mOAzbpZSdBm82/Emrk9bPMuD0QaLQjWr7HkTSUs6ZsKt4
7CLPdWqxyY/QVw9UaxeHEtWGQGMIQGgVJGh1fjtUr5O1sC9z9jXcQ0HuIHnRCTls
GP7hklJmfH5V4SyAJQ06/hLuEhUJ7dn+BlqCsT0tLmYTgZYNzNcLHcqBFMEZHvHw
9GENMx/tDXgajKql4bJnzuTK0iGU/YepanANLd1JHECJ4jzTtmKOus9SOGlB2/l1
0t0ADDYAS3eqOdOcUvo9ElSLCI5vSVHhShSte/n2FMWU+kMUboTUisEG8CgQnrng
g2CvvQvqDkeOtZeqMcC7HdiZS0q3LJUWtwA/ViwxrVlBDCxiTUXCotyBWwARAQAB
tDBSaXBwbGUgTGFicyBCdWcgQm91bnR5IFByb2dyYW0gPGJ1Z3NAcmlwcGxlLmNv
bT6JAjcEEwEKACEFAlUwGHYCGwMFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AACgkQ
zUmgr8V5Kb6R0g//SwY/mVJY59k87iL26/KayauSoOcz7xjcST26l4ZHVVX85gOY
HYZl8k0+m8X3zxeYm9a3QAoAml8sfoaFRFQP8ynnefRrLUPaZ2MjbJ0SACMwZNef
T6o7Mi8LBAaiNZdYVyIfX1oM6YXtqYkuJdav6ZCyvVYqc9OvMJPY2ZzJYuI/ZtvQ
/lTndxCeg9ALNX/iezOLGdfMpf4HuIFVwcPPlwGi+HDlB9/bggDEHC8z434SXVFc
aQatXAPcDkjMUweU7y0CZtYEj00HITd4pSX6MqGiHrxlDZTqinCOPs1Ieqp7qufs
MzlM6irLGucxj1+wa16ieyYvEtGaPIsksUKkywx0O7cf8N2qKg+eIkUk6O0Uc6eO
CszizmiXIXy4O6OiLlVHGKkXHMSW9Nwe9GE95O8G9WR8OZCEuDv+mHPAutO+IjdP
PDAAUvy+3XnkceO+HGWRpVvJZfFP2YH4A33InFL5yqlJmSoR/yVingGLxk55bZDM
+HYGR3VeMb8Xj1rf/02qERsZyccMCFdAvKDbTwmvglyHdVLu5sPmktxbBYiemfyJ
qxMxmYXCc9S0hWrWZW7edktBa9NpE58z1mx+hRIrDNbS2sDHrib9PULYCySyVYcF
P+PWEe1CAS5jqkR2ker5td2/pHNnJIycynBEs7l6zbc9fu+nktFJz0q2B+GJAhwE
EAEKAAYFAlUwGaQACgkQ+tiY1qQ2QkjMFw//f2hNY3BPNe+1qbhzumMDCnbTnGif
kLuAGl9OKt81VHG1f6RnaGiLpR696+6Ja45KzH15cQ5JJl5Bgs1YkR/noTGX8IAD
c70eNwiFu8JXTaaeeJrsmFkF9Tueufb364risYkvPP8tNUD3InBFEZT3WN7JKwix
coD4/BwekUwOZVDd/uCFEyhlhZsROxdKNisNo3VtAq2s+3tIBAmTrriFUl0K+ZC5
zgavcpnPN57zMtW9aK+VO3wXqAKYLYmtgxkVzSLUZt2M7JuwOaAdyuYWAneKZPCu
1AXkmyo+d84sd5mZaKOr5xArAFiNMWPUcZL4rkS1Fq4dKtGAqzzR7a7hWtA5o27T
6vynuxZ1n0PPh0er2O/zF4znIjm5RhTlfjp/VmhZdQfpulFEQ/dMxxGkQ9z5IYbX
mTlSDbCSb+FMsanRBJ7Drp5EmBIudVGY6SHI5Re1RQiEh7GoDfUMUwZO+TVDII5R
Ra7WyuimYleJgDo/+7HyfuIyGDaUCVj6pwVtYtYIdOI3tTw1R1Mr0V8yaNVnJghL
CHcEJQL+YHSmiMM3ySil3O6tm1By6lFz8bVe/rgG/5uklQrnjMR37jYboi1orCC4
yeIoQeV0ItlxeTyBwYIV/o1DBNxDevTZvJabC93WiGLw2XFjpZ0q/9+zI2rJUZJh
qxmKP+D4e27lCI65Ag0EVTAYdgEQAMvttYNqeRNBRpSX8fk45WVIV8Fb21fWdwk6
2SkZnJURbiC0LxQnOi7wrtii7DeFZtwM2kFHihS1VHekBnIKKZQSgGoKuFAQMGyu
a426H4ZsSmA9Ufd7kRbvdtEcp7/RTAanhrSL4lkBhaKJrXlxBJ27o3nd7/rh7r3a
OszbPY6DJ5bWClX3KooPTDl/RF2lHn+fweFk58UvuunHIyo4BWJUdilSXIjLun+P
Qaik4ZAsZVwNhdNz05d+vtai4AwbYoO7adboMLRkYaXSQwGytkm+fM6r7OpXHYuS
cR4zB/OK5hxCVEpWfiwN71N2NMvnEMaWd/9uhqxJzyvYgkVUXV9274TUe16pzXnW
ZLfmitjwc91e7mJBBfKNenDdhaLEIlDRwKTLj7k58f9srpMnyZFacntu5pUMNblB
cjXwWxz5ZaQikLnKYhIvrIEwtWPyjqOzNXNvYfZamve/LJ8HmWGCKao3QHoAIDvB
9XBxrDyTJDpxbog6Qu4SY8AdgVlan6c/PsLDc7EUegeYiNTzsOK+eq3G5/E92eIu
TsUXlciypFcRm1q8vLRr+HYYe2mJDo4GetB1zLkAFBcYJm/x9iJQbu0hn5NxJvZO
R0Y5nOJQdyi+muJzKYwhkuzaOlswzqVXkq/7+QCjg7QsycdcwDjiQh3OrsgXHrwl
M7gyafL9ABEBAAGJAh8EGAEKAAkFAlUwGHYCGwwACgkQzUmgr8V5Kb50BxAAhj9T
TwmNrgRldTHszj+Qc+v8RWqV6j+R+zc0cn5XlUa6XFaXI1OFFg71H4dhCPEiYeN0
IrnocyMNvCol+eKIlPKbPTmoixjQ4udPTR1DC1Bx1MyW5FqOrsgBl5t0e1VwEViM
NspSStxu5Hsr6oWz2GD48lXZWJOgoL1RLs+uxjcyjySD/em2fOKASwchYmI+ezRv
plfhAFIMKTSCN2pgVTEOaaz13M0U+MoprThqF1LWzkGkkC7n/1V1f5tn83BWiagG
2N2Q4tHLfyouzMUKnX28kQ9sXfxwmYb2sA9FNIgxy+TdKU2ofLxivoWT8zS189z/
Yj9fErmiMjns2FzEDX+bipAw55X4D/RsaFgC+2x2PDbxeQh6JalRA2Wjq32Ouubx
u+I4QhEDJIcVwt9x6LPDuos1F+M5QW0AiUhKrZJ17UrxOtaquh/nPUL9T3l2qPUn
1ChrZEEEhHO6vA8+jn0+cV9n5xEz30Str9iHnDQ5QyR5LyV4UBPgTdWyQzNVKA69
KsSr9lbHEtQFRzGuBKwt6UlSFv9vPWWJkJit5XDKAlcKuGXj0J8OlltToocGElkF
+gEBZfoOWi/IBjRLrFW2cT3p36DTR5O1Ud/1DLnWRqgWNBLrbs2/KMKE6EnHttyD
7Tz8SQkuxltX/yBXMV3Ddy0t6nWV2SZEfuxJAQI=
=spg4
-----END PGP PUBLIC KEY BLOCK-----
```

470
bin/browser.js Executable file
View File

@@ -0,0 +1,470 @@
#!/usr/bin/node
//
// ledger?l=L
// transaction?h=H
// ledger_entry?l=L&h=H
// account?l=L&a=A
// directory?l=L&dir_root=H&i=I
// directory?l=L&o=A&i=I // owner directory
// offer?l=L&offer=H
// offer?l=L&account=A&i=I
// ripple_state=l=L&a=A&b=A&c=C
// account_lines?l=L&a=A
//
// A=address
// C=currency 3 letter code
// H=hash
// I=index
// L=current | closed | validated | index | hash
//
var async = require("async");
var extend = require("extend");
var http = require("http");
var url = require("url");
var Remote = require("ripple-lib").Remote;
var program = process.argv[1];
var httpd_response = function (res, opts) {
var self=this;
res.statusCode = opts.statusCode;
res.end(
"<HTML>"
+ "<HEAD><TITLE>Title</TITLE></HEAD>"
+ "<BODY BACKGROUND=\"#FFFFFF\">"
+ "State:" + self.state
+ "<UL>"
+ "<LI><A HREF=\"/\">home</A>"
+ "<LI>" + html_link('r4EM4gBQfr1QgQLXSPF4r7h84qE9mb6iCC')
// + "<LI><A HREF=\""+test+"\">rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh</A>"
+ "<LI><A HREF=\"/ledger\">ledger</A>"
+ "</UL>"
+ (opts.body || '')
+ '<HR><PRE>'
+ (opts.url || '')
+ '</PRE>'
+ "</BODY>"
+ "</HTML>"
);
};
var html_link = function (generic) {
return '<A HREF="' + build_uri({ type: 'account', account: generic}) + '">' + generic + '</A>';
};
// Build a link to a type.
var build_uri = function (params, opts) {
var c;
if (params.type === 'account') {
c = {
pathname: 'account',
query: {
l: params.ledger,
a: params.account,
},
};
} else if (params.type === 'ledger') {
c = {
pathname: 'ledger',
query: {
l: params.ledger,
},
};
} else if (params.type === 'transaction') {
c = {
pathname: 'transaction',
query: {
h: params.hash,
},
};
} else {
c = {};
}
opts = opts || {};
c.protocol = "http";
c.hostname = opts.hostname || self.base.hostname;
c.port = opts.port || self.base.port;
return url.format(c);
};
var build_link = function (item, link) {
console.log(link);
return "<A HREF=" + link + ">" + item + "</A>";
};
var rewrite_field = function (type, obj, field, opts) {
if (field in obj) {
obj[field] = rewrite_type(type, obj[field], opts);
}
};
var rewrite_type = function (type, obj, opts) {
if ('amount' === type) {
if ('string' === typeof obj) {
// XRP.
return '<B>' + obj + '</B>';
} else {
rewrite_field('address', obj, 'issuer', opts);
return obj;
}
return build_link(
obj,
build_uri({
type: 'account',
account: obj
}, opts)
);
}
if ('address' === type) {
return build_link(
obj,
build_uri({
type: 'account',
account: obj
}, opts)
);
}
else if ('ledger' === type) {
return build_link(
obj,
build_uri({
type: 'ledger',
ledger: obj,
}, opts)
);
}
else if ('node' === type) {
// A node
if ('PreviousTxnID' in obj)
obj.PreviousTxnID = rewrite_type('transaction', obj.PreviousTxnID, opts);
if ('Offer' === obj.LedgerEntryType) {
if ('NewFields' in obj) {
if ('TakerGets' in obj.NewFields)
obj.NewFields.TakerGets = rewrite_type('amount', obj.NewFields.TakerGets, opts);
if ('TakerPays' in obj.NewFields)
obj.NewFields.TakerPays = rewrite_type('amount', obj.NewFields.TakerPays, opts);
}
}
obj.LedgerEntryType = '<B>' + obj.LedgerEntryType + '</B>';
return obj;
}
else if ('transaction' === type) {
// Reference to a transaction.
return build_link(
obj,
build_uri({
type: 'transaction',
hash: obj
}, opts)
);
}
return 'ERROR: ' + type;
};
var rewrite_object = function (obj, opts) {
var out = extend({}, obj);
rewrite_field('address', out, 'Account', opts);
rewrite_field('ledger', out, 'parent_hash', opts);
rewrite_field('ledger', out, 'ledger_index', opts);
rewrite_field('ledger', out, 'ledger_current_index', opts);
rewrite_field('ledger', out, 'ledger_hash', opts);
if ('ledger' in obj) {
// It's a ledger header.
out.ledger = rewrite_object(out.ledger, opts);
if ('ledger_hash' in out.ledger)
out.ledger.ledger_hash = '<B>' + out.ledger.ledger_hash + '</B>';
delete out.ledger.hash;
delete out.ledger.totalCoins;
}
if ('TransactionType' in obj) {
// It's a transaction.
out.TransactionType = '<B>' + obj.TransactionType + '</B>';
rewrite_field('amount', out, 'TakerGets', opts);
rewrite_field('amount', out, 'TakerPays', opts);
rewrite_field('ledger', out, 'inLedger', opts);
out.meta.AffectedNodes = out.meta.AffectedNodes.map(function (node) {
var kind = 'CreatedNode' in node
? 'CreatedNode'
: 'ModifiedNode' in node
? 'ModifiedNode'
: 'DeletedNode' in node
? 'DeletedNode'
: undefined;
if (kind) {
node[kind] = rewrite_type('node', node[kind], opts);
}
return node;
});
}
else if ('node' in obj && 'LedgerEntryType' in obj.node) {
// Its a ledger entry.
if (obj.node.LedgerEntryType === 'AccountRoot') {
rewrite_field('address', out.node, 'Account', opts);
rewrite_field('transaction', out.node, 'PreviousTxnID', opts);
rewrite_field('ledger', out.node, 'PreviousTxnLgrSeq', opts);
}
out.node.LedgerEntryType = '<B>' + out.node.LedgerEntryType + '</B>';
}
return out;
};
var augment_object = function (obj, opts, done) {
if (obj.node.LedgerEntryType == 'AccountRoot') {
var tx_hash = obj.node.PreviousTxnID;
var tx_ledger = obj.node.PreviousTxnLgrSeq;
obj.history = [];
async.whilst(
function () { return tx_hash; },
function (callback) {
// console.log("augment_object: request: %s %s", tx_hash, tx_ledger);
opts.remote.request_tx(tx_hash)
.on('success', function (m) {
tx_hash = undefined;
tx_ledger = undefined;
//console.log("augment_object: ", JSON.stringify(m));
m.meta.AffectedNodes.filter(function(n) {
// console.log("augment_object: ", JSON.stringify(n));
// if (n.ModifiedNode)
// console.log("augment_object: %s %s %s %s %s %s/%s", 'ModifiedNode' in n, n.ModifiedNode && (n.ModifiedNode.LedgerEntryType === 'AccountRoot'), n.ModifiedNode && n.ModifiedNode.FinalFields && (n.ModifiedNode.FinalFields.Account === obj.node.Account), Object.keys(n)[0], n.ModifiedNode && (n.ModifiedNode.LedgerEntryType), obj.node.Account, n.ModifiedNode && n.ModifiedNode.FinalFields && n.ModifiedNode.FinalFields.Account);
// if ('ModifiedNode' in n && n.ModifiedNode.LedgerEntryType === 'AccountRoot')
// {
// console.log("***: ", JSON.stringify(m));
// console.log("***: ", JSON.stringify(n));
// }
return 'ModifiedNode' in n
&& n.ModifiedNode.LedgerEntryType === 'AccountRoot'
&& n.ModifiedNode.FinalFields
&& n.ModifiedNode.FinalFields.Account === obj.node.Account;
})
.forEach(function (n) {
tx_hash = n.ModifiedNode.PreviousTxnID;
tx_ledger = n.ModifiedNode.PreviousTxnLgrSeq;
obj.history.push({
tx_hash: tx_hash,
tx_ledger: tx_ledger
});
console.log("augment_object: next: %s %s", tx_hash, tx_ledger);
});
callback();
})
.on('error', function (m) {
callback(m);
})
.request();
},
function (err) {
if (err) {
done();
}
else {
async.forEach(obj.history, function (o, callback) {
opts.remote.request_account_info(obj.node.Account)
.ledger_index(o.tx_ledger)
.on('success', function (m) {
//console.log("augment_object: ", JSON.stringify(m));
o.Balance = m.account_data.Balance;
// o.account_data = m.account_data;
callback();
})
.on('error', function (m) {
o.error = m;
callback();
})
.request();
},
function (err) {
done(err);
});
}
});
}
else {
done();
}
};
if (process.argv.length < 4 || process.argv.length > 7) {
console.log("Usage: %s ws_ip ws_port [<ip> [<port> [<start>]]]", program);
}
else {
var ws_ip = process.argv[2];
var ws_port = process.argv[3];
var ip = process.argv.length > 4 ? process.argv[4] : "127.0.0.1";
var port = process.argv.length > 5 ? process.argv[5] : "8080";
// console.log("START");
var self = this;
var remote = (new Remote({
websocket_ip: ws_ip,
websocket_port: ws_port,
trace: false
}))
.on('state', function (m) {
console.log("STATE: %s", m);
self.state = m;
})
// .once('ledger_closed', callback)
.connect()
;
self.base = {
hostname: ip,
port: port,
remote: remote,
};
// console.log("SERVE");
var server = http.createServer(function (req, res) {
var input = "";
req.setEncoding();
req.on('data', function (buffer) {
// console.log("DATA: %s", buffer);
input = input + buffer;
});
req.on('end', function () {
// console.log("URL: %s", req.url);
// console.log("HEADERS: %s", JSON.stringify(req.headers, undefined, 2));
var _parsed = url.parse(req.url, true);
var _url = JSON.stringify(_parsed, undefined, 2);
// console.log("HEADERS: %s", JSON.stringify(_parsed, undefined, 2));
if (_parsed.pathname === "/account") {
var request = remote
.request_ledger_entry('account_root')
.ledger_index(-1)
.account_root(_parsed.query.a)
.on('success', function (m) {
// console.log("account_root: %s", JSON.stringify(m, undefined, 2));
augment_object(m, self.base, function() {
httpd_response(res,
{
statusCode: 200,
url: _url,
body: "<PRE>"
+ JSON.stringify(rewrite_object(m, self.base), undefined, 2)
+ "</PRE>"
});
});
})
.request();
} else if (_parsed.pathname === "/ledger") {
var request = remote
.request_ledger(undefined, { expand: true, transactions: true })
.on('success', function (m) {
// console.log("Ledger: %s", JSON.stringify(m, undefined, 2));
httpd_response(res,
{
statusCode: 200,
url: _url,
body: "<PRE>"
+ JSON.stringify(rewrite_object(m, self.base), undefined, 2)
+"</PRE>"
});
})
if (_parsed.query.l && _parsed.query.l.length === 64) {
request.ledger_hash(_parsed.query.l);
}
else if (_parsed.query.l) {
request.ledger_index(Number(_parsed.query.l));
}
else {
request.ledger_index(-1);
}
request.request();
} else if (_parsed.pathname === "/transaction") {
var request = remote
.request_tx(_parsed.query.h)
// .request_transaction_entry(_parsed.query.h)
// .ledger_select(_parsed.query.l)
.on('success', function (m) {
// console.log("transaction: %s", JSON.stringify(m, undefined, 2));
httpd_response(res,
{
statusCode: 200,
url: _url,
body: "<PRE>"
+ JSON.stringify(rewrite_object(m, self.base), undefined, 2)
+"</PRE>"
});
})
.on('error', function (m) {
httpd_response(res,
{
statusCode: 200,
url: _url,
body: "<PRE>"
+ 'ERROR: ' + JSON.stringify(m, undefined, 2)
+"</PRE>"
});
})
.request();
} else {
var test = build_uri({
type: 'account',
ledger: 'closed',
account: 'rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh',
}, self.base);
httpd_response(res,
{
statusCode: req.url === "/" ? 200 : 404,
url: _url,
});
}
});
});
server.listen(port, ip, undefined,
function () {
console.log("Listening at: http://%s:%s", ip, port);
});
}
// vim:sw=2:sts=2:ts=8:et

24
bin/ci/README.md Normal file
View File

@@ -0,0 +1,24 @@
In this directory are two scripts, `build.sh` and `test.sh` used for building
and testing rippled.
(For now, they assume Bash and Linux. Once I get Windows containers for
testing, I'll try them there, but if Bash is not available, then they will
soon be joined by PowerShell scripts `build.ps` and `test.ps`.)
We don't want these scripts to require arcane invocations that can only be
pieced together from within a CI configuration. We want something that humans
can easily invoke, read, and understand, for when we eventually have to test
and debug them interactively. That means:
(1) They should work with no arguments.
(2) They should document their arguments.
(3) They should expand short arguments into long arguments.
While we want to provide options for common use cases, we don't need to offer
the kitchen sink. We can rightfully expect users with esoteric, complicated
needs to write their own scripts.
To make argument-handling easy for us, the implementers, we can just take all
arguments from environment variables. They have the nice advantage that every
command-line uses named arguments. For the benefit of us and our users, we
document those variables at the top of each script.

31
bin/ci/build.sh Executable file
View File

@@ -0,0 +1,31 @@
#!/usr/bin/env bash
set -o xtrace
set -o errexit
# The build system. Either 'Unix Makefiles' or 'Ninja'.
GENERATOR=${GENERATOR:-Unix Makefiles}
# The compiler. Either 'gcc' or 'clang'.
COMPILER=${COMPILER:-gcc}
# The build type. Either 'Debug' or 'Release'.
BUILD_TYPE=${BUILD_TYPE:-Debug}
# Additional arguments to CMake.
# We use the `-` substitution here instead of `:-` so that callers can erase
# the default by setting `$CMAKE_ARGS` to the empty string.
CMAKE_ARGS=${CMAKE_ARGS-'-Dwerr=ON'}
# https://gitlab.kitware.com/cmake/cmake/issues/18865
CMAKE_ARGS="-DBoost_NO_BOOST_CMAKE=ON ${CMAKE_ARGS}"
if [[ ${COMPILER} == 'gcc' ]]; then
export CC='gcc'
export CXX='g++'
elif [[ ${COMPILER} == 'clang' ]]; then
export CC='clang'
export CXX='clang++'
fi
mkdir build
cd build
cmake -G "${GENERATOR}" -DCMAKE_BUILD_TYPE=${BUILD_TYPE} ${CMAKE_ARGS} ..
cmake --build . -- -j $(nproc)

41
bin/ci/test.sh Executable file
View File

@@ -0,0 +1,41 @@
#!/usr/bin/env bash
set -o xtrace
set -o errexit
# Set to 'true' to run the known "manual" tests in rippled.
MANUAL_TESTS=${MANUAL_TESTS:-false}
# The maximum number of concurrent tests.
CONCURRENT_TESTS=${CONCURRENT_TESTS:-$(nproc)}
# The path to rippled.
RIPPLED=${RIPPLED:-build/rippled}
# Additional arguments to rippled.
RIPPLED_ARGS=${RIPPLED_ARGS:-}
function join_by { local IFS="$1"; shift; echo "$*"; }
declare -a manual_tests=(
'beast.chrono.abstract_clock'
'beast.unit_test.print'
'ripple.NodeStore.Timing'
'ripple.app.Flow_manual'
'ripple.app.NoRippleCheckLimits'
'ripple.app.PayStrandAllPairs'
'ripple.consensus.ByzantineFailureSim'
'ripple.consensus.DistributedValidators'
'ripple.consensus.ScaleFreeSim'
'ripple.tx.CrossingLimits'
'ripple.tx.FindOversizeCross'
'ripple.tx.Offer_manual'
'ripple.tx.OversizeMeta'
'ripple.tx.PlumpBook'
)
if [[ ${MANUAL_TESTS} == 'true' ]]; then
RIPPLED_ARGS+=" --unittest=$(join_by , "${manual_tests[@]}")"
else
RIPPLED_ARGS+=" --unittest --quiet --unittest-log"
fi
RIPPLED_ARGS+=" --unittest-jobs ${CONCURRENT_TESTS}"
${RIPPLED} ${RIPPLED_ARGS}

274
bin/ci/ubuntu/build-and-test.sh Executable file
View File

@@ -0,0 +1,274 @@
#!/usr/bin/env bash
set -ex
function version_ge() { test "$(echo "$@" | tr " " "\n" | sort -rV | head -n 1)" == "$1"; }
__dirname=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
echo "using CC: ${CC}"
"${CC}" --version
export CC
COMPNAME=$(basename $CC)
echo "using CXX: ${CXX:-notset}"
if [[ $CXX ]]; then
"${CXX}" --version
export CXX
fi
: ${BUILD_TYPE:=Debug}
echo "BUILD TYPE: ${BUILD_TYPE}"
: ${TARGET:=install}
echo "BUILD TARGET: ${TARGET}"
JOBS=${NUM_PROCESSORS:-2}
if [[ ${TRAVIS:-false} != "true" ]]; then
JOBS=$((JOBS+1))
fi
if [[ ! -z "${CMAKE_EXE:-}" ]] ; then
export PATH="$(dirname ${CMAKE_EXE}):$PATH"
fi
if [ -x /usr/bin/time ] ; then
: ${TIME:="Duration: %E"}
export TIME
time=/usr/bin/time
else
time=
fi
echo "Building rippled"
: ${CMAKE_EXTRA_ARGS:=""}
if [[ ${NINJA_BUILD:-} == true ]]; then
CMAKE_EXTRA_ARGS+=" -G Ninja"
fi
coverage=false
if [[ "${TARGET}" == "coverage_report" ]] ; then
echo "coverage option detected."
coverage=true
fi
cmake --version
CMAKE_VER=$(cmake --version | cut -d " " -f 3 | head -1)
#
# allow explicit setting of the name of the build
# dir, otherwise default to the compiler.build_type
#
: "${BUILD_DIR:=${COMPNAME}.${BUILD_TYPE}}"
BUILDARGS="--target ${TARGET}"
BUILDTOOLARGS=""
if version_ge $CMAKE_VER "3.12.0" ; then
BUILDARGS+=" --parallel"
fi
if [[ ${NINJA_BUILD:-} == false ]]; then
if version_ge $CMAKE_VER "3.12.0" ; then
BUILDARGS+=" ${JOBS}"
else
BUILDTOOLARGS+=" -j ${JOBS}"
fi
fi
if [[ ${VERBOSE_BUILD:-} == true ]]; then
CMAKE_EXTRA_ARGS+=" -DCMAKE_VERBOSE_MAKEFILE=ON"
if version_ge $CMAKE_VER "3.14.0" ; then
BUILDARGS+=" --verbose"
else
if [[ ${NINJA_BUILD:-} == false ]]; then
BUILDTOOLARGS+=" verbose=1"
else
BUILDTOOLARGS+=" -v"
fi
fi
fi
if [[ ${USE_CCACHE:-} == true ]]; then
echo "using ccache with basedir [${CCACHE_BASEDIR:-}]"
CMAKE_EXTRA_ARGS+=" -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
fi
if [ -d "build/${BUILD_DIR}" ]; then
rm -rf "build/${BUILD_DIR}"
fi
mkdir -p "build/${BUILD_DIR}"
pushd "build/${BUILD_DIR}"
# cleanup possible artifacts
rm -fv CMakeFiles/CMakeOutput.log CMakeFiles/CMakeError.log
# Clean up NIH directories which should be git repos, but aren't
for nih_path in ${NIH_CACHE_ROOT}/*/*/*/src ${NIH_CACHE_ROOT}/*/*/src
do
for dir in lz4 snappy rocksdb
do
if [ -e ${nih_path}/${dir} -a \! -e ${nih_path}/${dir}/.git ]
then
ls -la ${nih_path}/${dir}*
rm -rfv ${nih_path}/${dir}*
fi
done
done
# generate
${time} cmake ../.. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} ${CMAKE_EXTRA_ARGS}
# Display the cmake output, to help with debugging if something fails
for file in CMakeOutput.log CMakeError.log
do
if [ -f CMakeFiles/${file} ]
then
ls -l CMakeFiles/${file}
cat CMakeFiles/${file}
fi
done
# build
export DESTDIR=$(pwd)/_INSTALLED_
${time} eval cmake --build . ${BUILDARGS} -- ${BUILDTOOLARGS}
if [[ ${TARGET} == "docs" ]]; then
## mimic the standard test output for docs build
## to make controlling processes like jenkins happy
if [ -f docs/html/index.html ]; then
echo "1 case, 1 test total, 0 failures"
else
echo "1 case, 1 test total, 1 failures"
fi
exit
fi
popd
if [[ "${TARGET}" == "validator-keys" ]] ; then
export APP_PATH="$PWD/build/${BUILD_DIR}/validator-keys/validator-keys"
else
export APP_PATH="$PWD/build/${BUILD_DIR}/rippled"
fi
echo "using APP_PATH: ${APP_PATH}"
# See what we've actually built
ldd ${APP_PATH}
: ${APP_ARGS:=}
if [[ "${TARGET}" == "validator-keys" ]] ; then
APP_ARGS="--unittest"
else
function join_by { local IFS="$1"; shift; echo "$*"; }
# This is a list of manual tests
# in rippled that we want to run
# ORDER matters here...sorted in approximately
# descending execution time (longest running tests at top)
declare -a manual_tests=(
'ripple.ripple_data.reduce_relay_simulate'
'ripple.tx.Offer_manual'
'ripple.tx.CrossingLimits'
'ripple.tx.PlumpBook'
'ripple.app.Flow_manual'
'ripple.tx.OversizeMeta'
'ripple.consensus.DistributedValidators'
'ripple.app.NoRippleCheckLimits'
'ripple.ripple_data.compression'
'ripple.NodeStore.Timing'
'ripple.consensus.ByzantineFailureSim'
'beast.chrono.abstract_clock'
'beast.unit_test.print'
)
if [[ ${TRAVIS:-false} != "true" ]]; then
# these two tests cause travis CI to run out of memory.
# TODO: investigate possible workarounds.
manual_tests=(
'ripple.consensus.ScaleFreeSim'
'ripple.tx.FindOversizeCross'
"${manual_tests[@]}"
)
fi
if [[ ${MANUAL_TESTS:-} == true ]]; then
APP_ARGS+=" --unittest=$(join_by , "${manual_tests[@]}")"
else
APP_ARGS+=" --unittest --quiet --unittest-log"
fi
if [[ ${coverage} == false && ${PARALLEL_TESTS:-} == true ]]; then
APP_ARGS+=" --unittest-jobs ${JOBS}"
fi
if [[ ${IPV6_TESTS:-} == true ]]; then
APP_ARGS+=" --unittest-ipv6"
fi
fi
if [[ ${coverage} == true && $CC =~ ^gcc ]]; then
# Push the results (lcov.info) to codecov
codecov -X gcov # don't even try and look for .gcov files ;)
find . -name "*.gcda" | xargs rm -f
fi
if [[ ${SKIP_TESTS:-} == true ]]; then
echo "skipping tests."
exit
fi
ulimit -a
corepat=$(cat /proc/sys/kernel/core_pattern)
if [[ ${corepat} =~ ^[:space:]*\| ]] ; then
echo "WARNING: core pattern is piping - can't search for core files"
look_core=false
else
look_core=true
coredir=$(dirname ${corepat})
fi
if [[ ${look_core} == true ]]; then
before=$(ls -A1 ${coredir})
fi
set +e
echo "Running tests for ${APP_PATH}"
if [[ ${MANUAL_TESTS:-} == true && ${PARALLEL_TESTS:-} != true ]]; then
for t in "${manual_tests[@]}" ; do
${APP_PATH} --unittest=${t}
TEST_STAT=$?
if [[ $TEST_STAT -ne 0 ]] ; then
break
fi
done
else
${APP_PATH} ${APP_ARGS}
TEST_STAT=$?
fi
set -e
if [[ ${look_core} == true ]]; then
after=$(ls -A1 ${coredir})
oIFS="${IFS}"
IFS=$'\n\r'
found_core=false
for l in $(diff -w --suppress-common-lines <(echo "$before") <(echo "$after")) ; do
if [[ "$l" =~ ^[[:space:]]*\>[[:space:]]*(.+)$ ]] ; then
corefile="${BASH_REMATCH[1]}"
echo "FOUND core dump file at '${coredir}/${corefile}'"
gdb_output=$(/bin/mktemp /tmp/gdb_output_XXXXXXXXXX.txt)
found_core=true
gdb \
-ex "set height 0" \
-ex "set logging file ${gdb_output}" \
-ex "set logging on" \
-ex "print 'ripple::BuildInfo::versionString'" \
-ex "thread apply all backtrace full" \
-ex "info inferiors" \
-ex quit \
"$APP_PATH" \
"${coredir}/${corefile}" &> /dev/null
echo -e "CORE INFO: \n\n $(cat ${gdb_output}) \n\n)"
fi
done
IFS="${oIFS}"
fi
if [[ ${found_core} == true ]]; then
exit -1
else
exit $TEST_STAT
fi

View File

@@ -0,0 +1,36 @@
#!/usr/bin/env bash
# run our build script in a docker container
# using travis-ci hosts
set -eux
function join_by { local IFS="$1"; shift; echo "$*"; }
set +x
echo "VERBOSE_BUILD=true" > /tmp/co.env
matchers=(
'TRAVIS.*' 'CI' 'CC' 'CXX'
'BUILD_TYPE' 'TARGET' 'MAX_TIME'
'CODECOV.+' 'CMAKE.*' '.+_TESTS'
'.+_OPTIONS' 'NINJA.*' 'NUM_.+'
'NIH_.+' 'BOOST.*' '.*CCACHE.*')
matchstring=$(join_by '|' "${matchers[@]}")
echo "MATCHSTRING IS:: $matchstring"
env | grep -E "^(${matchstring})=" >> /tmp/co.env
set -x
# need to eliminate TRAVIS_CMD...don't want to pass it to the container
cat /tmp/co.env | grep -v TRAVIS_CMD > /tmp/co.env.2
mv /tmp/co.env.2 /tmp/co.env
cat /tmp/co.env
mkdir -p -m 0777 ${TRAVIS_BUILD_DIR}/cores
echo "${TRAVIS_BUILD_DIR}/cores/%e.%p" | sudo tee /proc/sys/kernel/core_pattern
docker run \
-t --env-file /tmp/co.env \
-v ${TRAVIS_HOME}:${TRAVIS_HOME} \
-w ${TRAVIS_BUILD_DIR} \
--cap-add SYS_PTRACE \
--ulimit "core=-1" \
$DOCKER_IMAGE \
/bin/bash -c 'if [[ $CC =~ ([[:alpha:]]+)-([[:digit:].]+) ]] ; then sudo update-alternatives --set ${BASH_REMATCH[1]} /usr/bin/$CC; fi; bin/ci/ubuntu/build-and-test.sh'

View File

@@ -0,0 +1,44 @@
#!/usr/bin/env bash
# some cached files create churn, so save them here for
# later restoration before packing the cache
set -eux
clean_cache="travis_clean_cache"
if [[ ! ( "${TRAVIS_JOB_NAME}" =~ "windows" || \
"${TRAVIS_JOB_NAME}" =~ "prereq-keep" ) ]] && \
( [[ "${TRAVIS_COMMIT_MESSAGE}" =~ "${clean_cache}" ]] || \
( [[ -v TRAVIS_PULL_REQUEST_SHA && \
"${TRAVIS_PULL_REQUEST_SHA}" != "" ]] && \
git log -1 "${TRAVIS_PULL_REQUEST_SHA}" | grep -cq "${clean_cache}" -
)
)
then
find ${TRAVIS_HOME}/_cache -maxdepth 2 -type d
rm -rf ${TRAVIS_HOME}/_cache
mkdir -p ${TRAVIS_HOME}/_cache
fi
pushd ${TRAVIS_HOME}
if [ -f cache_ignore.tar ] ; then
rm -f cache_ignore.tar
fi
if [ -d _cache/nih_c ] ; then
find _cache/nih_c -name "build.ninja" | tar rf cache_ignore.tar --files-from -
find _cache/nih_c -name ".ninja_deps" | tar rf cache_ignore.tar --files-from -
find _cache/nih_c -name ".ninja_log" | tar rf cache_ignore.tar --files-from -
find _cache/nih_c -name "*.log" | tar rf cache_ignore.tar --files-from -
find _cache/nih_c -name "*.tlog" | tar rf cache_ignore.tar --files-from -
# show .a files in the cache, for sanity checking
find _cache/nih_c -name "*.a" -ls
fi
if [ -d _cache/ccache ] ; then
find _cache/ccache -name "stats" | tar rf cache_ignore.tar --files-from -
fi
if [ -f cache_ignore.tar ] ; then
tar -tf cache_ignore.tar
fi
popd

64
bin/debug_local_sign.js Normal file
View File

@@ -0,0 +1,64 @@
var ripple = require('ripple-lib');
var v = {
seed: "snoPBrXtMeMyMHUVTgbuqAfg1SUTb",
addr: "rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh"
};
var remote = ripple.Remote.from_config({
"trusted" : true,
"websocket_ip" : "127.0.0.1",
"websocket_port" : 5006,
"websocket_ssl" : false,
"local_signing" : true
});
var tx_json = {
"Account" : v.addr,
"Amount" : "10000000",
"Destination" : "rEu2ULPiEQm1BAL8pYzmXnNX1aFX9sCks",
"Fee" : "10",
"Flags" : 0,
"Sequence" : 3,
"TransactionType" : "Payment"
//"SigningPubKey": '0396941B22791A448E5877A44CE98434DB217D6FB97D63F0DAD23BE49ED45173C9'
};
remote.on('connected', function () {
var req = remote.request_sign(v.seed, tx_json);
req.message.debug_signing = true;
req.on('success', function (result) {
console.log("SERVER RESULT");
console.log(result);
var sim = {};
var tx = remote.transaction();
tx.tx_json = tx_json;
tx._secret = v.seed;
tx.complete();
var unsigned = tx.serialize().to_hex();
tx.sign();
sim.tx_blob = tx.serialize().to_hex();
sim.tx_json = tx.tx_json;
sim.tx_signing_hash = tx.signing_hash().to_hex();
sim.tx_unsigned = unsigned;
console.log("\nLOCAL RESULT");
console.log(sim);
remote.connect(false);
});
req.on('error', function (err) {
if (err.error === "remoteError" && err.remote.error === "srcActNotFound") {
console.log("Please fund account "+v.addr+" to run this test.");
} else {
console.log('error', err);
}
remote.connect(false);
});
req.request();
});
remote.connect();

Some files were not shown because too many files have changed in this diff Show More