diff --git a/.codecov.yml b/.codecov.yml
deleted file mode 100644
index b2bc67814..000000000
--- a/.codecov.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-
-codecov:
-  ci:
-    - !appveyor
-    - travis
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index ca7367b87..553fa2be1 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -33,10 +33,27 @@ Please check [x] relevant options, delete irrelevant ones.
 - [ ] New feature (non-breaking change which adds functionality)
 - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
 - [ ] Refactor (non-breaking change that only restructures code)
-- [ ] Tests (You added tests for code that already exists, or your new feature included in this PR)
-- [ ] Documentation Updates
+- [ ] Tests (you added tests for code that already exists, or your new feature included in this PR)
+- [ ] Documentation update
+- [ ] Chore (no impact to binary, e.g. `.gitignore`, formatting, dropping support for older tooling)
 - [ ] Release
 
+### API Impact
+
+
+
+- [ ] Public API: New feature (new methods and/or new fields)
+- [ ] Public API: Breaking change (in general, breaking changes should only impact the next api_version)
+- [ ] `libxrpl` change (any change that may affect `libxrpl` or dependents of `libxrpl`)
+- [ ] Peer protocol change (must be backward compatible or bump the peer protocol version)
+
 
+-->
\ No newline at end of file
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
deleted file mode 100644
index 02475adf0..000000000
--- a/.gitlab-ci.yml
+++ /dev/null
@@ -1,169 +0,0 @@
-# I don't know what the minimum size is, but we cannot build on t3.micro.
-
-# TODO: Factor common builds between different tests.
-
-# The parameters for our job matrix:
-#
-# 1. Generator (Make, Ninja, MSBuild)
-# 2. Compiler (GCC, Clang, MSVC)
-# 3. Build type (Debug, Release)
-# 4. Definitions (-Dunity=OFF, -Dassert=ON, ...)
-
-
-.job_linux_build_test:
-  only:
-    variables:
-      - $CI_PROJECT_URL =~ /^https?:\/\/gitlab.com\//
-  stage: build
-  tags:
-    - linux
-    - c5.2xlarge
-  image: thejohnfreeman/rippled-build-ubuntu:4b73694e07f0
-  script:
-    - bin/ci/build.sh
-    - bin/ci/test.sh
-  cache:
-    # Use a different key for each unique combination of (generator, compiler,
-    # build type). Caches are stored as `.zip` files; they are not merged.
-    # Generate a new key whenever you want to bust the cache, e.g. when the
-    # dependency versions have been bumped.
-    # By default, jobs pull the cache. Only a few specially chosen jobs update
-    # the cache (with policy `pull-push`); one for each unique combination of
-    # (generator, compiler, build type).
-    policy: pull
-    paths:
-      - .nih_c/
-
-'build+test Make GCC Debug':
-  extends: .job_linux_build_test
-  variables:
-    GENERATOR: Unix Makefiles
-    COMPILER: gcc
-    BUILD_TYPE: Debug
-  cache:
-    key: 62ada41c-fc9e-4949-9533-736d4d6512b6
-    policy: pull-push
-
-'build+test Ninja GCC Debug':
-  extends: .job_linux_build_test
-  variables:
-    GENERATOR: Ninja
-    COMPILER: gcc
-    BUILD_TYPE: Debug
-  cache:
-    key: 1665d3eb-6233-4eef-9f57-172636899faa
-    policy: pull-push
-
-'build+test Ninja GCC Debug -Dstatic=OFF':
-  extends: .job_linux_build_test
-  variables:
-    GENERATOR: Ninja
-    COMPILER: gcc
-    BUILD_TYPE: Debug
-    CMAKE_ARGS: '-Dstatic=OFF'
-  cache:
-    key: 1665d3eb-6233-4eef-9f57-172636899faa
-
-'build+test Ninja GCC Debug -Dstatic=OFF -DBUILD_SHARED_LIBS=ON':
-  extends: .job_linux_build_test
-  variables:
-    GENERATOR: Ninja
-    COMPILER: gcc
-    BUILD_TYPE: Debug
-    CMAKE_ARGS: '-Dstatic=OFF -DBUILD_SHARED_LIBS=ON'
-  cache:
-    key: 1665d3eb-6233-4eef-9f57-172636899faa
-
-'build+test Ninja GCC Debug -Dunity=OFF':
-  extends: .job_linux_build_test
-  variables:
-    GENERATOR: Ninja
-    COMPILER: gcc
-    BUILD_TYPE: Debug
-    CMAKE_ARGS: '-Dunity=OFF'
-  cache:
-    key: 1665d3eb-6233-4eef-9f57-172636899faa
-
-'build+test Ninja GCC Release -Dassert=ON':
-  extends: .job_linux_build_test
-  variables:
-    GENERATOR: Ninja
-    COMPILER: gcc
-    BUILD_TYPE: Release
-    CMAKE_ARGS: '-Dassert=ON'
-  cache:
-    key: c45ec125-9625-4c19-acf7-4e889d5f90bd
-    policy: pull-push
-
-'build+test(manual) Ninja GCC Release -Dassert=ON':
-  extends: .job_linux_build_test
-  variables:
-    GENERATOR: Ninja
-    COMPILER: gcc
-    BUILD_TYPE: Release
-    CMAKE_ARGS: '-Dassert=ON'
-    MANUAL_TEST: 'true'
-  cache:
-    key: c45ec125-9625-4c19-acf7-4e889d5f90bd
-
-'build+test Make clang Debug':
-  extends: .job_linux_build_test
-  variables:
-    GENERATOR: Unix Makefiles
-    COMPILER: clang
-    BUILD_TYPE: Debug
-  cache:
-    key: bf578dc2-5277-4580-8de5-6b9523118b19
-    policy: pull-push
-
-'build+test Ninja clang Debug':
-  extends: .job_linux_build_test
-  variables:
-    GENERATOR: Ninja
-    COMPILER: clang
-    BUILD_TYPE: Debug
-  cache:
-    key: 762514c5-3d4c-4c7c-8da2-2df9d8839cbe
-    policy: pull-push
-
-'build+test Ninja clang Debug -Dunity=OFF':
-  extends: .job_linux_build_test
-  variables:
-    GENERATOR: Ninja
-    COMPILER: clang
-    BUILD_TYPE: Debug
-    CMAKE_ARGS: '-Dunity=OFF'
-  cache:
-    key: 762514c5-3d4c-4c7c-8da2-2df9d8839cbe
-
-'build+test Ninja clang Debug -Dunity=OFF -Dsan=address':
-  extends: .job_linux_build_test
-  variables:
-    GENERATOR: Ninja
-    COMPILER: clang
-    BUILD_TYPE: Debug
-    CMAKE_ARGS: '-Dunity=OFF -Dsan=address'
-    CONCURRENT_TESTS: 1
-  cache:
-    key: 762514c5-3d4c-4c7c-8da2-2df9d8839cbe
-
-'build+test Ninja clang Debug -Dunity=OFF -Dsan=undefined':
-  extends: .job_linux_build_test
-  variables:
-    GENERATOR: Ninja
-    COMPILER: clang
-    BUILD_TYPE: Debug
-    CMAKE_ARGS: '-Dunity=OFF -Dsan=undefined'
-  cache:
-    key: 762514c5-3d4c-4c7c-8da2-2df9d8839cbe
-
-'build+test Ninja clang Release -Dassert=ON':
-  extends: .job_linux_build_test
-  variables:
-    GENERATOR: Ninja
-    COMPILER: clang
-    BUILD_TYPE: Release
-    CMAKE_ARGS: '-Dassert=ON'
-  cache:
-    key: 7751be37-2358-4f08-b1d0-7e72e0ad266d
-    policy: pull-push
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 000000000..04893e956
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,6 @@
+# .pre-commit-config.yaml
+repos:
+- repo: https://github.com/pre-commit/mirrors-clang-format
+  rev: v10.0.1
+  hooks:
+  - id: clang-format
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index d8cbf4344..000000000
--- a/.travis.yml
+++ /dev/null
@@ -1,460 +0,0 @@
-# There is a known issue where Travis will have trouble fetching the cache,
-# particularly on non-linux builds. Try restarting the individual build
-# (probably will not be necessary in the "windep" stages) if the end of the
-# log looks like:
-#
-#---------------------------------------
-# attempting to download cache archive
-# fetching travisorder/cache--windows-1809-containers-f2bf1c76c7fb4095c897a4999bd7c9b3fb830414dfe91f33d665443b52416d39--compiler-gpp.tgz
-# found cache
-# adding C:/Users/travis/_cache to cache
-# creating directory C:/Users/travis/_cache
-# No output has been received in the last 10m0s, this potentially indicates a stalled build or something wrong with the build itself.
-# Check the details on how to adjust your build configuration on: https://docs.travis-ci.com/user/common-build-problems/#build-times-out-because-no-output-was-received
-# The build has been terminated
-#---------------------------------------
-
-language: cpp
-dist: bionic
-
-services:
-  - docker
-
-stages:
-  - windep-vcpkg
-  - windep-boost
-  - build
-
-env:
-  global:
-    - DOCKER_IMAGE="rippleci/rippled-ci-builder:2020-01-08"
-    - CMAKE_EXTRA_ARGS="-Dwerr=ON -Dwextra=ON"
-    - NINJA_BUILD=true
-    # change this if we get more VM capacity
-    - MAX_TIME_MIN=80
-    - CACHE_DIR=${TRAVIS_HOME}/_cache
-    - NIH_CACHE_ROOT=${CACHE_DIR}/nih_c
-    - PARALLEL_TESTS=true
-    # this is NOT used by linux container based builds (which already have boost installed)
-    - BOOST_URL='https://boostorg.jfrog.io/artifactory/main/release/1.75.0/source/boost_1_75_0.tar.gz'
-    # Alternate dowload location
-    - BOOST_URL2='https://downloads.sourceforge.net/project/boost/boost/1.75.0/boost_1_75_0.tar.bz2?r=&ts=1594393912&use_mirror=newcontinuum'
-    # Travis downloader doesn't seem to have updated certs. Using this option
-    # introduces obvious security risks, but they're Travis's risks.
-    # Note that this option is only used if the "normal" build fails.
-    - BOOST_WGET_OPTIONS='--no-check-certificate'
-    - VCPKG_DIR=${CACHE_DIR}/vcpkg
-    - USE_CCACHE=true
-    - CCACHE_BASEDIR=${TRAVIS_HOME}"
-    - CCACHE_NOHASHDIR=true
-    - CCACHE_DIR=${CACHE_DIR}/ccache
-
-before_install:
-  - export NUM_PROCESSORS=$(nproc)
-  - echo "NUM PROC is ${NUM_PROCESSORS}"
-  - if [ "$(uname)" = "Linux" ] ; then docker pull ${DOCKER_IMAGE}; fi
-  - if [ "${MATRIX_EVAL}" != "" ] ; then eval "${MATRIX_EVAL}"; fi
-  - if [ "${CMAKE_ADD}" != "" ] ; then export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} ${CMAKE_ADD}"; fi
-  - bin/ci/ubuntu/travis-cache-start.sh
-
-matrix:
-  fast_finish: true
-  allow_failures:
-    # TODO these need more investigation
-    #
-    # there are a number of UBs caught currently that need triage
-    - name: ubsan, clang-8
-    # this one often runs out of memory:
-    - name: manual tests, gcc-8, release
-    # The Windows build may fail if any of the dependencies fail, but
-    # allow the rest of the builds to continue. They may succeed if the
-    # dependency is already cached. These do not need to be retried if
-    # _any_ of the Windows builds succeed.
-    - stage: windep-vcpkg
-    - stage: windep-boost
-
-  # https://docs.travis-ci.com/user/build-config-yaml#usage-of-yaml-anchors-and-aliases
-  include:
-    # debug builds
-    - &linux
-      stage: build
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/
-      compiler: gcc-8
-      name: gcc-8, debug
-      env:
-        - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
-        - BUILD_TYPE=Debug
-      script:
-        - sudo chmod -R a+rw ${CACHE_DIR}
-        - ccache -s
-        - travis_wait ${MAX_TIME_MIN} bin/ci/ubuntu/build-in-docker.sh
-        - ccache -s
-    - <<: *linux
-      compiler: clang-8
-      name: clang-8, debug
-      env:
-        - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
-        - BUILD_TYPE=Debug
-    - <<: *linux
-      compiler: clang-8
-      name: reporting, clang-8, debug
-      env:
-        - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
-        - BUILD_TYPE=Debug
-        - CMAKE_ADD="-Dreporting=ON"
-    # coverage builds
-    - <<: *linux
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_cov/
-      compiler: gcc-8
-      name: coverage, gcc-8
-      env:
-        - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
-        - BUILD_TYPE=Debug
-        - CMAKE_ADD="-Dcoverage=ON"
-        - TARGET=coverage_report
-        - SKIP_TESTS=true
-    - <<: *linux
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_cov/
-      compiler: clang-8
-      name: coverage, clang-8
-      env:
-        - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
-        - BUILD_TYPE=Debug
-        - CMAKE_ADD="-Dcoverage=ON"
-        - TARGET=coverage_report
-        - SKIP_TESTS=true
-    # test-free builds
-    - <<: *linux
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/
-      compiler: gcc-8
-      name: no-tests-unity, gcc-8
-      env:
-        - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
-        - BUILD_TYPE=Debug
-        - CMAKE_ADD="-Dtests=OFF"
-        - SKIP_TESTS=true
-    - <<: *linux
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/
-      compiler: clang-8
-      name: no-tests-non-unity, clang-8
-      env:
-        - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
-        - BUILD_TYPE=Debug
-        - CMAKE_ADD="-Dtests=OFF -Dunity=OFF"
-        - SKIP_TESTS=true
-    # nounity
-    - <<: *linux
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_nounity/
-      compiler: gcc-8
-      name: non-unity, gcc-8
-      env:
-        - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
-        - BUILD_TYPE=Debug
-        - CMAKE_ADD="-Dunity=OFF"
-    - <<: *linux
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_nounity/
-      compiler: clang-8
-      name: non-unity, clang-8
-      env:
-        - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
-        - BUILD_TYPE=Debug
-        - CMAKE_ADD="-Dunity=OFF"
-    # manual tests
-    - <<: *linux
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_man/
-      compiler: gcc-8
-      name: manual tests, gcc-8, debug
-      env:
-        - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
-        - BUILD_TYPE=Debug
-        - MANUAL_TESTS=true
-    # manual tests
-    - <<: *linux
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_man/
-      compiler: gcc-8
-      name: manual tests, gcc-8, release
-      env:
-        - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
-        - BUILD_TYPE=Release
-        - CMAKE_ADD="-Dassert=ON -Dunity=OFF"
-        - MANUAL_TESTS=true
-    # release builds
-    - <<: *linux
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_release/
-      compiler: gcc-8
-      name: gcc-8, release
-      env:
-        - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
-        - BUILD_TYPE=Release
-        - CMAKE_ADD="-Dassert=ON -Dunity=OFF"
-    - <<: *linux
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_release/
-      compiler: clang-8
-      name: clang-8, release
-      env:
-        - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
-        - BUILD_TYPE=Release
-        - CMAKE_ADD="-Dassert=ON"
-    # asan
-    - <<: *linux
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_san/
-      compiler: clang-8
-      name: asan, clang-8
-      env:
-        - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
-        - BUILD_TYPE=Release
-        - CMAKE_ADD="-Dsan=address"
-        - ASAN_OPTIONS="print_stats=true:atexit=true"
-        #- LSAN_OPTIONS="verbosity=1:log_threads=1"
-        - PARALLEL_TESTS=false
-    # ubsan
-    - <<: *linux
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_san/
-      compiler: clang-8
-      name: ubsan, clang-8
-      env:
-        - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
-        - BUILD_TYPE=Release
-        - CMAKE_ADD="-Dsan=undefined"
-        # once we can run clean under ubsan, add halt_on_error=1 to options below
-        - UBSAN_OPTIONS="print_stacktrace=1:report_error_type=1"
-        - PARALLEL_TESTS=false
-    # tsan
-    # current tsan failure *might* be related to:
-    # https://github.com/google/sanitizers/issues/1104
-    #  but we can't get it to run, so leave it disabled for now
-    #    - <<: *linux
-    #      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_san/
-    #      compiler: clang-8
-    #      name: tsan, clang-8
-    #      env:
-    #        - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
-    #        - BUILD_TYPE=Release
-    #        - CMAKE_ADD="-Dsan=thread"
-    #        - TSAN_OPTIONS="history_size=3 external_symbolizer_path=/usr/bin/llvm-symbolizer verbosity=1"
-    #        - PARALLEL_TESTS=false
-    # dynamic lib builds
-    - <<: *linux
-      compiler: gcc-8
-      name: non-static, gcc-8
-      env:
-        - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
-        - BUILD_TYPE=Debug
-        - CMAKE_ADD="-Dstatic=OFF"
-    - <<: *linux
-      compiler: gcc-8
-      name: non-static + BUILD_SHARED_LIBS, gcc-8
-      env:
-        - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
-        - BUILD_TYPE=Debug
-        - CMAKE_ADD="-Dstatic=OFF -DBUILD_SHARED_LIBS=ON"
-    # makefile
-    - <<: *linux
-      compiler: gcc-8
-      name: makefile generator, gcc-8
-      env:
-        - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
-        - BUILD_TYPE=Debug
-        - NINJA_BUILD=false
-    # misc alternative compilers
-    - <<: *linux
-      compiler: gcc-9
-      name: gcc-9
-      env:
-        - MATRIX_EVAL="CC=gcc-9 && CXX=g++-9"
-        - BUILD_TYPE=Debug
-    - <<: *linux
-      compiler: clang-9
-      name: clang-9, debug
-      env:
-        - MATRIX_EVAL="CC=clang-9 && CXX=clang++-9"
-        - BUILD_TYPE=Debug
-    - <<: *linux
-      compiler: clang-9
-      name: clang-9, release
-      env:
-        - MATRIX_EVAL="CC=clang-9 && CXX=clang++-9"
-        - BUILD_TYPE=Release
-    # verify build with min version of cmake
-    - <<: *linux
-      compiler: gcc-8
-      name: min cmake version
-      env:
-        - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
-        - BUILD_TYPE=Debug
-        - CMAKE_EXE=/opt/local/cmake/bin/cmake
-        - SKIP_TESTS=true
-    # validator keys project as subproj of rippled
-    - <<: *linux
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_vkeys/
-      compiler: gcc-8
-      name: validator-keys
-      env:
-        - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
-        - BUILD_TYPE=Debug
-        - CMAKE_ADD="-Dvalidator_keys=ON"
-        - TARGET=validator-keys
-    # macos
-    - &macos
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_mac/
-      stage: build
-      os: osx
-      osx_image: xcode13.1
-      name: xcode13.1, debug
-      env:
-        # put NIH in non-cache location since it seems to
-        # cause failures when homebrew updates
-        - NIH_CACHE_ROOT=${TRAVIS_BUILD_DIR}/nih_c
-        - BLD_CONFIG=Debug
-        - TEST_EXTRA_ARGS=""
-        - BOOST_ROOT=${CACHE_DIR}/boost_1_75_0
-        - >-
-          CMAKE_ADD="
-          -DBOOST_ROOT=${BOOST_ROOT}/_INSTALLED_
-          -DBoost_ARCHITECTURE=-x64
-          -DBoost_NO_SYSTEM_PATHS=ON
-          -DCMAKE_VERBOSE_MAKEFILE=ON"
-      addons:
-        homebrew:
-          packages:
-            - protobuf
-            - grpc
-            - pkg-config
-            - bash
-            - ninja
-            - cmake
-            - wget
-            - zstd
-            - libarchive
-            - openssl@1.1
-          update: true
-      install:
-        - export OPENSSL_ROOT=$(brew --prefix openssl@1.1)
-        - travis_wait ${MAX_TIME_MIN} Builds/containers/shared/install_boost.sh
-        - brew uninstall --ignore-dependencies boost
-      script:
-        - mkdir -p build.macos && cd build.macos
-        - cmake -G Ninja ${CMAKE_EXTRA_ARGS} -DCMAKE_BUILD_TYPE=${BLD_CONFIG} ..
-        - travis_wait ${MAX_TIME_MIN} cmake --build . --parallel --verbose
-        - ./rippled --unittest --quiet --unittest-log --unittest-jobs ${NUM_PROCESSORS} ${TEST_EXTRA_ARGS}
-    - <<: *macos
-      name: xcode13.1, release
-      before_script:
-        - export BLD_CONFIG=Release
-        - export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} -Dassert=ON"
-    - <<: *macos
-      name: ipv6 (macos)
-      before_script:
-        - export TEST_EXTRA_ARGS="--unittest-ipv6"
-    - <<: *macos
-      osx_image: xcode13.1
-      name: xcode13.1, debug
-    # windows
-    - &windows
-      if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_win/
-      os: windows
-      env:
-        # put NIH in a non-cached location until
-        # we come up with a way to stabilize that
-        # cache on windows (minimize incremental changes)
-        - CACHE_NAME=win_01
-        - NIH_CACHE_ROOT=${TRAVIS_BUILD_DIR}/nih_c
-        - VCPKG_DEFAULT_TRIPLET="x64-windows-static"
-        - MATRIX_EVAL="CC=cl.exe && CXX=cl.exe"
-        - BOOST_ROOT=${CACHE_DIR}/boost_1_75
-        - >-
-          CMAKE_ADD="
-          -DCMAKE_PREFIX_PATH=${BOOST_ROOT}/_INSTALLED_
-          -DBOOST_ROOT=${BOOST_ROOT}/_INSTALLED_
-          -DBoost_ROOT=${BOOST_ROOT}/_INSTALLED_
-          -DBoost_DIR=${BOOST_ROOT}/_INSTALLED_/lib/cmake/Boost-1.75.0
-          -DBoost_COMPILER=vc141
-          -DCMAKE_VERBOSE_MAKEFILE=ON
-          -DCMAKE_TOOLCHAIN_FILE=${VCPKG_DIR}/scripts/buildsystems/vcpkg.cmake
-          -DVCPKG_TARGET_TRIPLET=x64-windows-static"
-      stage: windep-vcpkg
-      name: prereq-vcpkg
-      install:
-        - choco upgrade cmake.install
-        - choco install ninja visualstudio2017-workload-vctools -y
-      script:
-        - df -h
-        - env
-        - travis_wait ${MAX_TIME_MIN} bin/sh/install-vcpkg.sh openssl
-        - travis_wait ${MAX_TIME_MIN} bin/sh/install-vcpkg.sh grpc
-        - travis_wait ${MAX_TIME_MIN} bin/sh/install-vcpkg.sh libarchive[lz4]
-        # TBD consider rocksdb via vcpkg if/when we can build with the
-        # vcpkg version
-        # - travis_wait ${MAX_TIME_MIN} bin/sh/install-vcpkg.sh rocksdb[snappy,lz4,zlib]
-    - <<: *windows
-      stage: windep-boost
-      name: prereq-keep-boost
-      install:
-        - choco upgrade cmake.install
-        - choco install ninja visualstudio2017-workload-vctools -y
-        - choco install visualstudio2019buildtools visualstudio2019community visualstudio2019-workload-vctools -y
-      script:
-        - export BOOST_TOOLSET=msvc-14.1
-        - travis_wait ${MAX_TIME_MIN} Builds/containers/shared/install_boost.sh
-    - &windows-bld
-      <<: *windows
-      stage: build
-      name: windows, debug
-      before_script:
-        - export BLD_CONFIG=Debug
-      script:
-        - df -h
-        - . ./bin/sh/setup-msvc.sh
-        - mkdir -p build.ms && cd build.ms
-        - cmake -G Ninja ${CMAKE_EXTRA_ARGS} -DCMAKE_BUILD_TYPE=${BLD_CONFIG} ..
-        - travis_wait ${MAX_TIME_MIN} cmake --build . --parallel --verbose
-        # override num procs to force fewer unit test jobs
-        - export NUM_PROCESSORS=2
-        - travis_wait ${MAX_TIME_MIN} ./rippled.exe --unittest --quiet --unittest-log --unittest-jobs ${NUM_PROCESSORS}
-    - <<: *windows-bld
-      name: windows, release
-      before_script:
-        - export BLD_CONFIG=Release
-    - <<: *windows-bld
-      name: windows, visual studio, debug
-      script:
-        - mkdir -p build.ms && cd build.ms
-        - export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} -DCMAKE_GENERATOR_TOOLSET=host=x64"
-        - cmake -G "Visual Studio 15 2017 Win64" ${CMAKE_EXTRA_ARGS} ..
-        - export DESTDIR=${PWD}/_installed_
-        - travis_wait ${MAX_TIME_MIN} cmake --build . --parallel --verbose --config ${BLD_CONFIG} --target install
-        # override num procs to force fewer unit test jobs
-        - export NUM_PROCESSORS=2
-        - >-
-          travis_wait ${MAX_TIME_MIN} "./_installed_/Program Files/rippled/bin/rippled.exe" --unittest --quiet --unittest-log --unittest-jobs ${NUM_PROCESSORS}
-    - <<: *windows-bld
-      name: windows, vc2019
-      install:
-        - choco upgrade cmake.install
-        - choco install ninja -y
-        - choco install visualstudio2019buildtools visualstudio2019community visualstudio2019-workload-vctools -y
-      before_script:
-        - export BLD_CONFIG=Release
-        # we want to use the boost build from cache, which was built using the
-        # vs2017 compiler so we need to specify the Boost_COMPILER. BUT, we
-        # can't use the cmake config files generated by boost b/c they are
-        # broken for Boost_COMPILER override, so we need to specify both
-        # Boost_NO_BOOST_CMAKE and a slightly different Boost_COMPILER string
-        # to make the legacy find module work for us. If the cmake configs are
-        # fixed in the future, it should be possible to remove these
-        # workarounds.
-        - export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} -DBoost_NO_BOOST_CMAKE=ON -DBoost_COMPILER=-vc141"
-
-before_cache:
-  - if [ $(uname) = "Linux" ] ; then SUDO="sudo"; else SUDO=""; fi
-  - cd ${TRAVIS_HOME}
-  - if [ -f cache_ignore.tar ] ; then $SUDO tar xvf cache_ignore.tar; fi
-  - cd ${TRAVIS_BUILD_DIR}
-
-cache:
-  timeout: 900
-  directories:
-    - $CACHE_DIR
-
-notifications:
-  email: false
diff --git a/Builds/Test.py b/Builds/Test.py
deleted file mode 100755
index 29cf8ea8a..000000000
--- a/Builds/Test.py
+++ /dev/null
@@ -1,405 +0,0 @@
-#!/usr/bin/env python
-
-#    This file is part of rippled: https://github.com/ripple/rippled
-#    Copyright (c) 2012 - 2017 Ripple Labs Inc.
-#
-#    Permission to use, copy, modify, and/or distribute this software for any
-#    purpose  with  or without fee is hereby granted, provided that the above
-#    copyright notice and this permission notice appear in all copies.
-#
-#    THE  SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-#    WITH  REGARD  TO  THIS  SOFTWARE  INCLUDING  ALL  IMPLIED  WARRANTIES  OF
-#    MERCHANTABILITY  AND  FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-#    ANY  SPECIAL ,  DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-#    WHATSOEVER  RESULTING  FROM  LOSS  OF USE, DATA OR PROFITS, WHETHER IN AN
-#    ACTION  OF  CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-#    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-"""
-Invocation:
-
-    ./Builds/Test.py - builds and tests all configurations
-
-The build must succeed without shell aliases for this to work.
-
-To pass flags to cmake, put them at the very end of the command line, after
-the -- flag - like this:
-
-    ./Builds/Test.py -- -j4  # Pass -j4 to cmake --build
-
-
-Common problems:
-
-1) Boost not found. Solution: export BOOST_ROOT=[path to boost folder]
-
-2) OpenSSL not found. Solution: export OPENSSL_ROOT=[path to OpenSSL folder]
-
-3) cmake is not found. Solution: Be sure cmake directory is on your $PATH
-
-"""
-from __future__ import absolute_import, division, print_function, unicode_literals
-
-import argparse
-import itertools
-import os
-import platform
-import re
-import shutil
-import sys
-import subprocess
-
-
-def powerset(iterable):
-    """powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"""
-    s = list(iterable)
-    return itertools.chain.from_iterable(itertools.combinations(s, r) for r in range(len(s) + 1))
-
-IS_WINDOWS = platform.system().lower() == 'windows'
-IS_OS_X = platform.system().lower() == 'darwin'
-
-# CMake
-if IS_WINDOWS:
-    CMAKE_UNITY_CONFIGS = ['Debug', 'Release']
-    CMAKE_NONUNITY_CONFIGS = ['Debug', 'Release']
-else:
-    CMAKE_UNITY_CONFIGS = []
-    CMAKE_NONUNITY_CONFIGS = []
-CMAKE_UNITY_COMBOS = { '' : [['rippled'], CMAKE_UNITY_CONFIGS],
-    '.nounity' : [['rippled'], CMAKE_NONUNITY_CONFIGS] }
-
-if IS_WINDOWS:
-    CMAKE_DIR_TARGETS = { ('msvc' + unity,) : targets for unity, targets in
-        CMAKE_UNITY_COMBOS.items() }
-elif IS_OS_X:
-    CMAKE_DIR_TARGETS = { (build + unity,) : targets
-                   for build in ['debug', 'release']
-                   for unity, targets in CMAKE_UNITY_COMBOS.items() }
-else:
-    CMAKE_DIR_TARGETS = { (cc + "." + build + unity,) : targets
-                   for cc in ['gcc', 'clang']
-                   for build in ['debug', 'release', 'coverage', 'profile']
-                   for unity, targets in CMAKE_UNITY_COMBOS.items() }
-
-# list of tuples of all possible options
-if IS_WINDOWS or IS_OS_X:
-    CMAKE_ALL_GENERATE_OPTIONS = [tuple(x) for x in powerset(['-GNinja', '-Dassert=true'])]
-else:
-    CMAKE_ALL_GENERATE_OPTIONS = list(set(
-        [tuple(x) for x in powerset(['-GNinja', '-Dstatic=true', '-Dassert=true', '-Dsan=address'])] +
-        [tuple(x) for x in powerset(['-GNinja', '-Dstatic=true', '-Dassert=true', '-Dsan=thread'])]))
-
-parser = argparse.ArgumentParser(
-    description='Test.py - run ripple tests'
-)
-
-parser.add_argument(
-    '--all', '-a',
-    action='store_true',
-    help='Build all configurations.',
-)
-
-parser.add_argument(
-    '--keep_going', '-k',
-    action='store_true',
-    help='Keep going after one configuration has failed.',
-)
-
-parser.add_argument(
-    '--silent', '-s',
-    action='store_true',
-    help='Silence all messages except errors',
-)
-
-parser.add_argument(
-    '--verbose', '-v',
-    action='store_true',
-    help=('Report more information about which commands are executed and the '
-          'results.'),
-)
-
-parser.add_argument(
-    '--test', '-t',
-    default='',
-    help='Add a prefix for unit tests',
-)
-
-parser.add_argument(
-    '--testjobs',
-    default='0',
-    type=int,
-    help='Run tests in parallel'
-)
-
-parser.add_argument(
-    '--ipv6',
-    action='store_true',
-    help='Use IPv6 localhost when running unit tests.',
-)
-
-parser.add_argument(
-    '--clean', '-c',
-    action='store_true',
-    help='delete all build artifacts after testing',
-)
-
-parser.add_argument(
-    '--quiet', '-q',
-    action='store_true',
-    help='Reduce output where possible (unit tests)',
-)
-
-parser.add_argument(
-    '--dir', '-d',
-    default=(),
-    nargs='*',
-    help='Specify one or more CMake dir names. '
-        'Will also be used as -Dtarget=
 running cmake.'
-)
-
-parser.add_argument(
-    '--target',
-    default=(),
-    nargs='*',
-    help='Specify one or more CMake build targets. '
-        'Will be used as --target  running cmake --build.'
-    )
-
-parser.add_argument(
-    '--config',
-    default=(),
-    nargs='*',
-    help='Specify one or more CMake build configs. '
-        'Will be used as --config  running cmake --build.'
-    )
-
-parser.add_argument(
-    '--generator_option',
-    action='append',
-    help='Specify a CMake generator option. Repeat for multiple options. '
-        'Will be passed to the cmake generator. '
-        'Due to limits of the argument parser, arguments starting with \'-\' '
-        'must be attached to this option. e.g. --generator_option=-GNinja.')
-
-parser.add_argument(
-    '--build_option',
-    action='append',
-    help='Specify a build option. Repeat for multiple options. '
-        'Will be passed to the build tool via cmake --build. '
-        'Due to limits of the argument parser, arguments starting with \'-\' '
-        'must be attached to this option. e.g. --build_option=-j8.')
-
-parser.add_argument(
-    'extra_args',
-    default=(),
-    nargs='*',
-    help='Extra arguments are passed through to the tools'
-)
-
-ARGS = parser.parse_args()
-
-def decodeString(line):
-    # Python 2 vs. Python 3
-    if isinstance(line, str):
-        return line
-    else:
-        return line.decode()
-
-def shell(cmd, args=(), silent=False, cust_env=None):
-    """"Execute a shell command and return the output."""
-    silent = ARGS.silent or silent
-    verbose = not silent and ARGS.verbose
-    if verbose:
-        print('$' + cmd, *args)
-
-    command = (cmd,) + args
-
-    # shell is needed in Windows to find executable in the path
-    process = subprocess.Popen(
-        command,
-        stdin=subprocess.PIPE,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-        env=cust_env,
-        shell=IS_WINDOWS)
-    lines = []
-    count = 0
-    # readline returns '' at EOF
-    for line in iter(process.stdout.readline, ''):
-        if process.poll() is None:
-            decoded = decodeString(line)
-            lines.append(decoded)
-            if verbose:
-                print(decoded, end='')
-            elif not silent:
-                count += 1
-                if count >= 80:
-                    print()
-                    count = 0
-                else:
-                    print('.', end='')
-        else:
-            break
-
-    if not verbose and count:
-        print()
-    process.wait()
-    return process.returncode, lines
-
-def get_cmake_dir(cmake_dir):
-    return os.path.join('build' , 'cmake' , cmake_dir)
-
-def run_cmake(directory, cmake_dir, args):
-    print('Generating build in', directory, 'with', *args or ('default options',))
-    old_dir = os.getcwd()
-    if not os.path.exists(directory):
-        os.makedirs(directory)
-    os.chdir(directory)
-    if IS_WINDOWS and not any(arg.startswith("-G") for arg in args) and not os.path.exists("CMakeCache.txt"):
-        if '--ninja' in args:
-            args += ( '-GNinja', )
-        else:
-            args += ( '-GVisual Studio 14 2015 Win64', )
-    # hack to extract cmake options/args from the legacy target format
-    if re.search('\.unity', cmake_dir):
-        args += ( '-Dunity=ON', )
-    if re.search('\.nounity', cmake_dir):
-        args += ( '-Dunity=OFF', )
-    if re.search('coverage', cmake_dir):
-        args += ( '-Dcoverage=ON', )
-    if re.search('profile', cmake_dir):
-        args += ( '-Dprofile=ON', )
-    if re.search('debug', cmake_dir):
-        args += ( '-DCMAKE_BUILD_TYPE=Debug', )
-    if re.search('release', cmake_dir):
-        args += ( '-DCMAKE_BUILD_TYPE=Release', )
-    m = re.search('gcc(-[^.]*)', cmake_dir)
-    if m:
-        args += ( '-DCMAKE_C_COMPILER=' + m.group(0),
-          '-DCMAKE_CXX_COMPILER=g++' + m.group(1), )
-    elif re.search('gcc', cmake_dir):
-        args += ( '-DCMAKE_C_COMPILER=gcc', '-DCMAKE_CXX_COMPILER=g++', )
-    m = re.search('clang(-[^.]*)', cmake_dir)
-    if m:
-        args += ( '-DCMAKE_C_COMPILER=' + m.group(0),
-          '-DCMAKE_CXX_COMPILER=clang++' + m.group(1), )
-    elif re.search('clang', cmake_dir):
-        args += ( '-DCMAKE_C_COMPILER=clang', '-DCMAKE_CXX_COMPILER=clang++', )
-
-    args += ( os.path.join('..', '..', '..'), )
-    resultcode, lines = shell('cmake', args)
-
-    if resultcode:
-        print('Generating FAILED:')
-        if not ARGS.verbose:
-            print(*lines, sep='')
-        sys.exit(1)
-
-    os.chdir(old_dir)
-
-def run_cmake_build(directory, target, config, args):
-    print('Building', target, config, 'in', directory, 'with', *args or ('default options',))
-    build_args=('--build', directory)
-    if target:
-      build_args += ('--target', target)
-    if config:
-      build_args += ('--config', config)
-    if args:
-        build_args += ('--',)
-        build_args += tuple(args)
-    resultcode, lines = shell('cmake', build_args)
-
-    if resultcode:
-        print('Build FAILED:')
-        if not ARGS.verbose:
-            print(*lines, sep='')
-        sys.exit(1)
-
-def run_cmake_tests(directory, target, config):
-    failed = []
-    if IS_WINDOWS:
-        target += '.exe'
-    executable = os.path.join(directory, config if config else 'Debug', target)
-    if(not os.path.exists(executable)):
-        executable = os.path.join(directory, target)
-    print('Unit tests for', executable)
-    testflag = '--unittest'
-    quiet = ''
-    testjobs = ''
-    ipv6 = ''
-    if ARGS.test:
-        testflag += ('=' + ARGS.test)
-    if ARGS.quiet:
-        quiet = '-q'
-    if ARGS.ipv6:
-        ipv6 = '--unittest-ipv6'
-    if ARGS.testjobs:
-        testjobs = ('--unittest-jobs=' + str(ARGS.testjobs))
-    resultcode, lines = shell(executable, (testflag, quiet, testjobs, ipv6))
-
-    if resultcode:
-        if not ARGS.verbose:
-            print('ERROR:', *lines, sep='')
-        failed.append([target, 'unittest'])
-
-    return failed
-
-def main():
-    all_failed = []
-    if ARGS.all:
-        build_dir_targets = CMAKE_DIR_TARGETS
-        generator_options = CMAKE_ALL_GENERATE_OPTIONS
-    else:
-        build_dir_targets = { tuple(ARGS.dir) : [ARGS.target, ARGS.config] }
-        if ARGS.generator_option:
-            generator_options = [tuple(ARGS.generator_option)]
-        else:
-            generator_options = [tuple()]
-
-    if not build_dir_targets:
-        # Let CMake choose the build tool.
-        build_dir_targets = { () : [] }
-
-    if ARGS.build_option:
-        ARGS.build_option = ARGS.build_option + list(ARGS.extra_args)
-    else:
-        ARGS.build_option = list(ARGS.extra_args)
-
-    for args in generator_options:
-        for build_dirs, (build_targets, build_configs) in build_dir_targets.items():
-            if not build_dirs:
-                build_dirs = ('default',)
-            if not build_targets:
-                build_targets = ('rippled',)
-            if not build_configs:
-                build_configs = ('',)
-            for cmake_dir in build_dirs:
-                cmake_full_dir = get_cmake_dir(cmake_dir)
-                run_cmake(cmake_full_dir, cmake_dir, args)
-
-                for target in build_targets:
-                    for config in build_configs:
-                        run_cmake_build(cmake_full_dir, target, config, ARGS.build_option)
-                        failed = run_cmake_tests(cmake_full_dir, target, config)
-
-                        if failed:
-                            print('FAILED:', *(':'.join(f) for f in failed))
-                            if not ARGS.keep_going:
-                                sys.exit(1)
-                            else:
-                                all_failed.extend([decodeString(cmake_dir +
-                                        "." + target + "." + config), ':'.join(f)]
-                                    for f in failed)
-                        else:
-                            print('Success')
-                if ARGS.clean:
-                    shutil.rmtree(cmake_full_dir)
-
-    if all_failed:
-        if len(all_failed) > 1:
-            print()
-            print('FAILED:', *(':'.join(f) for f in all_failed))
-        sys.exit(1)
-
-if __name__ == '__main__':
-    main()
-    sys.exit(0)
diff --git a/Builds/VisualStudio2017/README.md b/Builds/VisualStudio2017/README.md
deleted file mode 100644
index fce8e824a..000000000
--- a/Builds/VisualStudio2017/README.md
+++ /dev/null
@@ -1 +0,0 @@
-[Build instructions are currently located in `BUILD.md`](../../BUILD.md)
diff --git a/Builds/VisualStudio2019/CMakeSettings-example.json b/Builds/VisualStudio2019/CMakeSettings-example.json
deleted file mode 100644
index b90bfce6b..000000000
--- a/Builds/VisualStudio2019/CMakeSettings-example.json
+++ /dev/null
@@ -1,45 +0,0 @@
-{
-  // See https://go.microsoft.com//fwlink//?linkid=834763 for more information about this file.
-  "configurations": [
-    {
-      "name": "x64-Debug",
-      "generator": "Visual Studio 16 2019",
-      "configurationType": "Debug",
-      "inheritEnvironments": [ "msvc_x64_x64" ],
-      "buildRoot": "${thisFileDir}\\build\\${name}",
-      "cmakeCommandArgs": "",
-      "buildCommandArgs": "-v:minimal",
-      "ctestCommandArgs": "",
-      "variables": [
-        {
-          "name": "BOOST_ROOT",
-          "value": "C:\\lib\\boost"
-        },
-        {
-          "name": "OPENSSL_ROOT",
-          "value": "C:\\lib\\OpenSSL-Win64"
-        }
-      ]
-    },
-    {
-      "name": "x64-Release",
-      "generator": "Visual Studio 16 2019",
-      "configurationType": "Release",
-      "inheritEnvironments": [ "msvc_x64_x64" ],
-      "buildRoot": "${thisFileDir}\\build\\${name}",
-      "cmakeCommandArgs": "",
-      "buildCommandArgs": "-v:minimal",
-      "ctestCommandArgs": "",
-      "variables": [
-        {
-          "name": "BOOST_ROOT",
-          "value": "C:\\lib\\boost"
-        },
-        {
-          "name": "OPENSSL_ROOT",
-          "value": "C:\\lib\\OpenSSL-Win64"
-        }
-      ]
-    }
-  ]
-}
diff --git a/Builds/VisualStudio2019/README.md b/Builds/VisualStudio2019/README.md
deleted file mode 100644
index e369eac67..000000000
--- a/Builds/VisualStudio2019/README.md
+++ /dev/null
@@ -1,263 +0,0 @@
-# Visual Studio 2019 Build Instructions
-
-## Important
-
-We do not recommend Windows for rippled production use at this time. Currently,
-the Ubuntu platform has received the highest level of quality assurance,
-testing, and support. Additionally, 32-bit Windows versions are not supported.
-
-## Prerequisites
-
-To clone the source code repository, create branches for inspection or
-modification, build rippled under Visual Studio, and run the unit tests you will
-need these software components
-
-| Component | Minimum Recommended Version |
-|-----------|-----------------------|
-| [Visual Studio 2019](README.md#install-visual-studio-2019)| 15.5.4 |
-| [Git for Windows](README.md#install-git-for-windows)| 2.16.1 |
-| [OpenSSL Library](README.md#install-openssl) | 1.1.1L |
-| [Boost library](README.md#build-boost) | 1.70.0 |
-| [CMake for Windows](README.md#optional-install-cmake-for-windows)* | 3.12 |
-
-\* Only needed if not using the integrated CMake in VS 2019 and prefer generating dedicated project/solution files.
-
-## Install Software
-
-### Install Visual Studio 2019
-
-If not already installed on your system, download your choice of installer from
-the [Visual Studio 2019
-Download](https://www.visualstudio.com/downloads/download-visual-studio-vs)
-page, run the installer, and follow the directions. **You may need to choose the
-`Desktop development with C++` workload to install all necessary C++ features.**
-
-Any version of Visual Studio 2019 may be used to build rippled. The **Visual
-Studio 2019 Community** edition is available free of charge (see [the product
-page](https://www.visualstudio.com/products/visual-studio-community-vs) for
-licensing details), while paid editions may be used for an initial free-trial
-period.
-
-### Install Git for Windows
-
-Git is a distributed revision control system. The Windows version also provides
-the bash shell and many Windows versions of Unix commands. While there are other
-varieties of Git (such as TortoiseGit, which has a native Windows interface and
-integrates with the Explorer shell), we recommend installing [Git for
-Windows](https://git-scm.com/) since it provides a Unix-like command line
-environment useful for running shell scripts. Use of the bash shell under
-Windows is mandatory for running the unit tests.
-
-### Install OpenSSL
-
-[Download the latest version of
-OpenSSL.](http://slproweb.com/products/Win32OpenSSL.html) There will
-several `Win64` bit variants available, you want the non-light
-`v1.1` line. As of this writing, you **should** select
-
-* Win64 OpenSSL v1.1.1q
-
-and should **not** select
-
-* Anything with "Win32" in the name
-* Anything with "light" in the name
-* Anything with "EXPERIMENTAL" in the name
-* Anything in the 3.0 line - rippled won't currently build with this version.
-
-Run the installer, and choose an appropriate location for your OpenSSL
-installation. In this guide we use `C:\lib\OpenSSL-Win64` as the destination
-location.
-
-You may be informed on running the installer that "Visual C++ 2008
-Redistributables" must first be installed first. If so, download it from the
-[same page](http://slproweb.com/products/Win32OpenSSL.html), again making sure
-to get the correct 32-/64-bit variant.
-
-* NOTE: Since rippled links statically to OpenSSL, it does not matter where the
-  OpenSSL .DLL files are placed, or what version they are. rippled does not use
-  or require any external .DLL files to run other than the standard operating
-  system ones.
-
-### Build Boost
-
-Boost 1.70 or later is required.
-
-[Download boost](http://www.boost.org/users/download/) and unpack it
-to `c:\lib`. As of this writing, the most recent version of boost is 1.80.0,
-which will unpack into a directory named `boost_1_80_0`. We recommended either
-renaming this directory to `boost`, or creating a junction link `mklink /J boost
-boost_1_80_0`, so that you can more easily switch between versions.
-
-Next, open **Developer Command Prompt** and type the following commands
-
-```powershell
-cd C:\lib\boost
-bootstrap
-```
-
-The rippled application is linked statically to the standard runtimes and
-external dependencies on Windows, to ensure that the behavior of the executable
-is not affected by changes in outside files. Therefore, it is necessary to build
-the required boost static libraries using this command:
-
-```powershell
-b2 -j --toolset=msvc-14.2 address-model=64 architecture=x86 link=static threading=multi runtime-link=shared,static stage
-```
-
-where you should replace `` with the number of parallel
-invocations to use build, e.g. `bjam -j8 ...` would use up to 8 concurrent build
-shell commands for the build.
-
-Building the boost libraries may take considerable time. When the build process
-is completed, take note of both the reported compiler include paths and linker
-library paths as they will be required later.
-
-### (Optional) Install CMake for Windows
-
-[CMake](http://cmake.org) is a cross platform build system generator. Visual
-Studio 2019 includes an integrated version of CMake that avoids having to
-manually run CMake, but it is undergoing continuous improvement. Users that
-prefer to use standard Visual Studio project and solution files need to install
-a dedicated version of CMake to generate them.  The latest version can be found
-at the [CMake download site](https://cmake.org/download/). It is recommended you
-select the install option to add CMake to your path.
-
-## Clone the rippled repository
-
-If you are familiar with cloning github repositories, just follow your normal
-process and clone `git@github.com:ripple/rippled.git`. Otherwise follow this
-section for instructions.
-
-1. If you don't have a github account, sign up for one at
-   [github.com](https://github.com/).
-2. Make sure you have Github ssh keys. For help see
-   [generating-ssh-keys](https://help.github.com/articles/generating-ssh-keys).
-
-Open the "Git Bash" shell that was installed with "Git for Windows" in the step
-above. Navigate to the directory where you want to clone rippled (git bash uses
-`/c` for windows's `C:` and forward slash where windows uses backslash, so
-`C:\Users\joe\projs` would be `/c/Users/joe/projs` in git bash). Now clone the
-repository and optionally switch to the *master* branch. Type the following at
-the bash prompt:
-
-```powershell
-git clone git@github.com:XRPLF/rippled.git
-cd rippled
-```
-If you receive an error about not having the "correct access rights" make sure
-you have Github ssh keys, as described above.
-
-For a stable release, choose the `master` branch or one of the tagged releases
-listed on [rippled's GitHub page](https://github.com/ripple/rippled/releases).
-
-```
-git checkout master
-```
-
-To test the latest release candidate, choose the `release` branch.
-
-```
-git checkout release
-```
-
-If you are doing development work and want the latest set of beta features,
-you can consider using the `develop` branch instead.
-
-```
-git checkout develop
-```
-
-# Build using Visual Studio integrated CMake
-
-In Visual Studio 2017, Microsoft added [integrated IDE support for
-cmake](https://blogs.msdn.microsoft.com/vcblog/2016/10/05/cmake-support-in-visual-studio/).
-To begin, simply:
-
-1. Launch Visual Studio and choose **File | Open | Folder**, navigating to the
-   cloned rippled folder.
-2. Right-click on `CMakeLists.txt` in the **Solution Explorer - Folder View** to
-   generate a `CMakeSettings.json` file. A sample settings file is provided
-   [here](/Builds/VisualStudio2019/CMakeSettings-example.json). Customize the
-   settings for `BOOST_ROOT`, `OPENSSL_ROOT` to match the install paths if they
-   differ from those in the file.
-4. Select either the `x64-Release` or `x64-Debug` configuration from the
-   **Project Settings** drop-down. This should invoke the built-in CMake project
-   generator. If not, you can right-click on the `CMakeLists.txt` file and
-   choose **Configure rippled**.
-5. Select the `rippled.exe`
-   option in the **Select Startup Item** drop-down. This will be the target
-   built when you press F7. Alternatively, you can choose a target to build from
-   the top-level **CMake | Build** menu. Note that at this time, there are other
-   targets listed that come from third party visual studio files embedded in the
-   rippled repo, e.g. `datagen.vcxproj`. Please ignore them.
-
-For details on configuring debugging sessions or further customization of CMake,
-please refer to the [CMake tools for VS
-documentation](https://docs.microsoft.com/en-us/cpp/ide/cmake-tools-for-visual-cpp).
-
-If using the provided `CMakeSettings.json` file, the executable will be in
-```
-.\build\x64-Release\Release\rippled.exe
-```
-or
-```
-.\build\x64-Debug\Debug\rippled.exe
-```
-These paths are relative to your cloned git repository.
-
-# Build using stand-alone CMake
-
-This requires having installed [CMake for
-Windows](README.md#optional-install-cmake-for-windows). We do not recommend
-mixing this method with the integrated CMake method for the same repository
-clone. Assuming you included the cmake executable folder in your path,
-execute the following commands within your `rippled` cloned repository:
-
-```
-mkdir build\cmake
-cd build\cmake
-cmake ..\.. -G"Visual Studio 16 2019" -Ax64 -DBOOST_ROOT="C:\lib\boost" -DOPENSSL_ROOT="C:\lib\OpenSSL-Win64" -DCMAKE_GENERATOR_TOOLSET=host=x64
-```
-Now launch Visual Studio 2019 and select **File | Open | Project/Solution**.
-Navigate to the `build\cmake` folder created above and select the `rippled.sln`
-file. You can then choose whether to build the `Debug` or `Release` solution
-configuration.
-
-The executable will be in
-```
-.\build\cmake\Release\rippled.exe
-```
-or
-```
-.\build\cmake\Debug\rippled.exe
-```
-These paths are relative to your cloned git repository.
-
-# Unity/No-Unity Builds
-
-The rippled build system defaults to using
-[unity source files](http://onqtam.com/programming/2018-07-07-unity-builds/)
-to improve build times. In some cases it might be desirable to disable the
-unity build and compile individual translation units. Here is how you can
-switch to a "no-unity" build configuration:
-
-## Visual Studio Integrated CMake
-
-Edit your `CmakeSettings.json` (described above) by adding `-Dunity=OFF`
-to the `cmakeCommandArgs` entry for each build configuration.
-
-## Standalone CMake Builds
-
-When running cmake to generate the Visual Studio project files, add
-`-Dunity=OFF` to the command line options passed to cmake.
-
-**Note:** you will need to re-run the cmake configuration step anytime you
-want to switch between unity/no-unity builds.
-
-# Unit Test (Recommended)
-
-`rippled` builds a set of unit tests into the server executable. To run these
-unit tests after building, pass the `--unittest` option to the compiled
-`rippled` executable. The executable will exit with summary info after running
-the unit tests.
-
diff --git a/Builds/build_all.sh b/Builds/build_all.sh
deleted file mode 100755
index 3a08e3b5a..000000000
--- a/Builds/build_all.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env bash
-
-num_procs=$(lscpu -p | grep -v '^#' | sort -u -t, -k 2,4 | wc -l) # number of physical cores
-
-path=$(cd $(dirname $0) && pwd)
-cd $(dirname $path)
-${path}/Test.py -a -c --testjobs=${num_procs} -- -j${num_procs}
diff --git a/Builds/containers/README.md b/Builds/containers/README.md
deleted file mode 100644
index 9d96eb771..000000000
--- a/Builds/containers/README.md
+++ /dev/null
@@ -1,31 +0,0 @@
-
-# rippled Packaging and Containers
-
-This folder contains docker container definitions and configuration
-files to support building rpm and deb packages of rippled. The container
-definitions include some additional software/packages that are used
-for general build/test CI workflows of rippled but are not explicitly
-needed for the package building workflow.
-
-## CMake Targets
-
-If you have docker installed on your local system, then the main 
-CMake file will enable several targets related to building packages:
-`rpm_container`, `rpm`, `dpkg_container`, and `dpkg`. The package targets
-depend on the container targets and will trigger a build of those first.
-The container builds can take several dozen minutes to complete (depending
-on hardware specs), so quick build cycles are not possible currently. As
-such, these targets are often best suited to CI/automated build systems.
-
-The package build can be invoked like any other cmake target from the 
-rippled root folder:
-```
-mkdir -p build/pkg && cd build/pkg
-cmake -Dpackages_only=ON ../..
-cmake --build . --target rpm
-```
-Upon successful completion, the generated package files will be in 
-the `build/pkg/packages` directory. For deb packages, simply replace
-`rpm` with `dpkg` in the build command above.
-
-
diff --git a/Builds/containers/centos-builder/Dockerfile b/Builds/containers/centos-builder/Dockerfile
deleted file mode 100644
index ef5f80f7b..000000000
--- a/Builds/containers/centos-builder/Dockerfile
+++ /dev/null
@@ -1,26 +0,0 @@
-FROM rippleci/centos:7
-ARG GIT_COMMIT=unknown
-ARG CI_USE=false
-
-LABEL git-commit=$GIT_COMMIT
-
-COPY centos-builder/centos_setup.sh /tmp/
-COPY shared/install_cmake.sh /tmp/
-RUN chmod +x /tmp/centos_setup.sh && \
-    chmod +x /tmp/install_cmake.sh
-RUN /tmp/centos_setup.sh
-
-RUN /tmp/install_cmake.sh 3.16.3 /opt/local/cmake-3.16
-RUN ln -s /opt/local/cmake-3.16 /opt/local/cmake
-ENV PATH="/opt/local/cmake/bin:$PATH"
-# TODO: Install latest CMake for testing
-RUN if [ "${CI_USE}" = true ] ; then /tmp/install_cmake.sh 3.16.3 /opt/local/cmake-3.16; fi
-
-RUN mkdir -m 777 -p /opt/rippled_bld/pkg
-
-WORKDIR /opt/rippled_bld/pkg
-RUN mkdir -m 777 ./rpmbuild
-RUN mkdir -m 777 ./rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
-
-COPY packaging/rpm/build_rpm.sh ./
-CMD ./build_rpm.sh
diff --git a/Builds/containers/centos-builder/centos_setup.sh b/Builds/containers/centos-builder/centos_setup.sh
deleted file mode 100755
index ea110058b..000000000
--- a/Builds/containers/centos-builder/centos_setup.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env bash
-set -ex
-
-source /etc/os-release
-
-yum -y upgrade
-yum -y update
-yum -y install epel-release centos-release-scl
-yum -y install \
-    wget curl time gcc-c++ yum-utils autoconf automake pkgconfig libtool \
-    libstdc++-static rpm-build gnupg which make cmake \
-    devtoolset-11 devtoolset-11-gdb devtoolset-11-binutils devtoolset-11-libstdc++-devel \
-    devtoolset-11-libasan-devel devtoolset-11-libtsan-devel devtoolset-11-libubsan-devel devtoolset-11-liblsan-devel \
-    flex flex-devel bison bison-devel parallel \
-    ncurses ncurses-devel ncurses-libs graphviz graphviz-devel \
-    lzip p7zip bzip2 bzip2-devel lzma-sdk lzma-sdk-devel xz-devel \
-    zlib zlib-devel zlib-static texinfo openssl openssl-static \
-    jemalloc jemalloc-devel \
-    libicu-devel htop \
-    rh-python38 \
-    ninja-build git svn \
-    swig perl-Digest-MD5
diff --git a/Builds/containers/gitlab-ci/build_container.sh b/Builds/containers/gitlab-ci/build_container.sh
deleted file mode 100644
index ea47b4a11..000000000
--- a/Builds/containers/gitlab-ci/build_container.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env sh
-set -ex
-pkgtype=$1
-if [ "${pkgtype}" = "rpm" ] ; then
-    container_name="${RPM_CONTAINER_NAME}"
-elif [ "${pkgtype}" = "dpkg" ] ; then
-    container_name="${DPKG_CONTAINER_NAME}"
-else
-    echo "invalid package type"
-    exit 1
-fi
-
-if docker pull "${ARTIFACTORY_HUB}/${container_name}:latest_${CI_COMMIT_REF_SLUG}"; then
-    echo "found container for latest - using as cache."
-    docker tag \
-       "${ARTIFACTORY_HUB}/${container_name}:latest_${CI_COMMIT_REF_SLUG}" \
-       "${container_name}:latest_${CI_COMMIT_REF_SLUG}"
-    CMAKE_EXTRA="-D${pkgtype}_cache_from=${container_name}:latest_${CI_COMMIT_REF_SLUG}"
-fi
-
-cmake --version
-test -d build && rm -rf build
-mkdir -p build/container && cd build/container
-eval time \
-    cmake -Dpackages_only=ON -DCMAKE_VERBOSE_MAKEFILE=ON ${CMAKE_EXTRA} \
-    -G Ninja ../..
-time cmake --build . --target "${pkgtype}_container" -- -v
-
diff --git a/Builds/containers/gitlab-ci/build_package.sh b/Builds/containers/gitlab-ci/build_package.sh
deleted file mode 100644
index 4c591677f..000000000
--- a/Builds/containers/gitlab-ci/build_package.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env sh
-set -ex
-pkgtype=$1
-if [ "${pkgtype}" = "rpm" ] ; then
-    container_name="${RPM_CONTAINER_FULLNAME}"
-    container_tag="${RPM_CONTAINER_TAG}"
-elif [ "${pkgtype}" = "dpkg" ] ; then
-    container_name="${DPKG_CONTAINER_FULLNAME}"
-    container_tag="${DPKG_CONTAINER_TAG}"
-else
-    echo "invalid package type"
-    exit 1
-fi
-time docker pull "${ARTIFACTORY_HUB}/${container_name}"
-docker tag \
-  "${ARTIFACTORY_HUB}/${container_name}" \
-  "${container_name}"
-docker images
-test -d build && rm -rf build
-mkdir -p build/${pkgtype} && cd build/${pkgtype}
-time cmake \
-  -Dpackages_only=ON \
-  -Dcontainer_label="${container_tag}" \
-  -Dhave_package_container=ON \
-  -DCMAKE_VERBOSE_MAKEFILE=ON \
-  -Dunity=OFF \
-  -G Ninja ../..
-time cmake --build . --target ${pkgtype} -- -v
diff --git a/Builds/containers/gitlab-ci/docker_alpine_setup.sh b/Builds/containers/gitlab-ci/docker_alpine_setup.sh
deleted file mode 100644
index a36543192..000000000
--- a/Builds/containers/gitlab-ci/docker_alpine_setup.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env sh
-set -e
-# used as a before/setup script for docker steps in gitlab-ci
-# expects to be run in standard alpine/dind image
-echo $(nproc)
-docker login -u rippled \
-    -p ${ARTIFACTORY_DEPLOY_KEY_RIPPLED} ${ARTIFACTORY_HUB}
-apk add --update py-pip
-apk add \
-    bash util-linux coreutils binutils grep \
-    make ninja cmake build-base gcc g++ abuild git \
-    python3 python3-dev
-pip3 install awscli
-# list curdir contents to build log:
-ls -la
diff --git a/Builds/containers/gitlab-ci/get_component.sh b/Builds/containers/gitlab-ci/get_component.sh
deleted file mode 100644
index 99963f408..000000000
--- a/Builds/containers/gitlab-ci/get_component.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env sh
-case ${CI_COMMIT_REF_NAME} in
-    develop)
-        export COMPONENT="nightly"
-        ;;
-    release)
-        export COMPONENT="unstable"
-        ;;
-    master)
-        export COMPONENT="stable"
-        ;;
-    *)
-        export COMPONENT="_unknown_"
-        ;;
-esac
-
diff --git a/Builds/containers/gitlab-ci/pkgbuild.yml b/Builds/containers/gitlab-ci/pkgbuild.yml
deleted file mode 100644
index f35186acb..000000000
--- a/Builds/containers/gitlab-ci/pkgbuild.yml
+++ /dev/null
@@ -1,646 +0,0 @@
-#########################################################################
-##                                                                     ##
-##  gitlab CI defintition for rippled build containers and distro      ##
-##  packages (rpm and dpkg).                                           ##
-##                                                                     ##
-#########################################################################
-
-# NOTE: these are sensible defaults for Ripple pipelines. These
-# can be overridden by project or group variables as needed.
-variables:
-  # these containers are built manually using the rippled
-  # cmake build (container targets) and tagged/pushed so they
-  # can be used here
-  RPM_CONTAINER_TAG: "2023-02-13"
-  RPM_CONTAINER_NAME: "rippled-rpm-builder"
-  RPM_CONTAINER_FULLNAME: "${RPM_CONTAINER_NAME}:${RPM_CONTAINER_TAG}"
-  DPKG_CONTAINER_TAG: "2023-03-20"
-  DPKG_CONTAINER_NAME: "rippled-dpkg-builder"
-  DPKG_CONTAINER_FULLNAME: "${DPKG_CONTAINER_NAME}:${DPKG_CONTAINER_TAG}"
-  ARTIFACTORY_HOST: "artifactory.ops.ripple.com"
-  ARTIFACTORY_HUB: "${ARTIFACTORY_HOST}:6555"
-  GIT_SIGN_PUBKEYS_URL: "https://gitlab.ops.ripple.com/xrpledger/rippled-packages/snippets/49/raw"
-  PUBLIC_REPO_ROOT: "https://repos.ripple.com/repos"
-  # also need to define this variable ONLY for the primary
-  # build/publish pipeline on the mainline repo:
-  #   IS_PRIMARY_REPO = "true"
-
-stages:
-  - build_packages
-  - sign_packages
-  - smoketest
-  - verify_sig
-  - tag_images
-  - push_to_test
-  - verify_from_test
-  - wait_approval_prod
-  - push_to_prod
-  - verify_from_prod
-  - get_final_hashes
-  - build_containers
-
-.dind_template: &dind_param
-  before_script:
-    - . ./Builds/containers/gitlab-ci/docker_alpine_setup.sh
-  variables:
-    docker_driver: overlay2
-    DOCKER_TLS_CERTDIR: ""
-  image:
-    name: artifactory.ops.ripple.com/docker:latest
-  services:
-    # workaround for TLS issues - consider going back
-    # back to unversioned `dind` when issues are resolved
-    - name: artifactory.ops.ripple.com/docker:stable-dind
-      alias: docker
-  tags:
-    - 4xlarge
-
-.only_primary_template: &only_primary
-  only:
-    refs:
-      - /^(master|release|develop)$/
-    variables:
-      - $IS_PRIMARY_REPO == "true"
-
-.smoketest_local_template: &run_local_smoketest
-  tags:
-    - xlarge
-  script:
-    - . ./Builds/containers/gitlab-ci/smoketest.sh local
-
-.smoketest_repo_template: &run_repo_smoketest
-  tags:
-    - xlarge
-  script:
-    - . ./Builds/containers/gitlab-ci/smoketest.sh repo
-
-#########################################################################
-##                                                                     ##
-##  stage: build_packages                                              ##
-##                                                                     ##
-##  build packages using containers from previous stage.               ##
-##                                                                     ##
-#########################################################################
-
-rpm_build:
-  timeout: "1h 30m"
-  stage: build_packages
-  <<: *dind_param
-  artifacts:
-    paths:
-      - build/rpm/packages/
-  script:
-    - . ./Builds/containers/gitlab-ci/build_package.sh rpm
-
-dpkg_build:
-  timeout: "1h 30m"
-  stage: build_packages
-  <<: *dind_param
-  artifacts:
-    paths:
-      - build/dpkg/packages/
-  script:
-    - . ./Builds/containers/gitlab-ci/build_package.sh dpkg
-
-#########################################################################
-##                                                                     ##
-##  stage: sign_packages                                               ##
-##                                                                     ##
-##  build packages using containers from previous stage.               ##
-##                                                                     ##
-#########################################################################
-
-rpm_sign:
-  stage: sign_packages
-  dependencies:
-    - rpm_build
-  image:
-    name: artifactory.ops.ripple.com/centos:7
-  <<: *only_primary
-  before_script:
-  - |
-    # Make sure GnuPG is installed
-    yum -y install gnupg rpm-sign
-    # checking GPG signing support
-    if [ -n "$GPG_KEY_B64" ]; then
-      echo "$GPG_KEY_B64"| base64 -d | gpg --batch --no-tty --allow-secret-key-import --import -
-      unset GPG_KEY_B64
-      export GPG_PASSPHRASE=$(echo $GPG_KEY_PASS_B64 | base64 -di)
-      unset GPG_KEY_PASS_B64
-      export GPG_KEYID=$(gpg --with-colon --list-secret-keys | head -n1 | cut -d : -f 5)
-    else
-      echo -e "\033[0;31m****** GPG signing disabled ******\033[0m"
-      exit 1
-    fi
-  artifacts:
-    paths:
-      - build/rpm/packages/
-  script:
-    - ls -alh build/rpm/packages
-    - . ./Builds/containers/gitlab-ci/sign_package.sh rpm
-
-dpkg_sign:
-  stage: sign_packages
-  dependencies:
-    - dpkg_build
-  image:
-    name: artifactory.ops.ripple.com/ubuntu:18.04
-  <<: *only_primary
-  before_script:
-  - |
-    # make sure we have GnuPG
-    apt update
-    apt install -y gpg dpkg-sig
-    # checking GPG signing support
-    if [ -n "$GPG_KEY_B64" ]; then
-      echo "$GPG_KEY_B64"| base64 -d | gpg --batch --no-tty --allow-secret-key-import --import -
-      unset GPG_KEY_B64
-      export GPG_PASSPHRASE=$(echo $GPG_KEY_PASS_B64 | base64 -di)
-      unset GPG_KEY_PASS_B64
-      export GPG_KEYID=$(gpg --with-colon --list-secret-keys | head -n1 | cut -d : -f 5)
-    else
-      echo -e "\033[0;31m****** GPG signing disabled ******\033[0m"
-      exit 1
-    fi
-  artifacts:
-    paths:
-      - build/dpkg/packages/
-  script:
-    - ls -alh build/dpkg/packages
-    - . ./Builds/containers/gitlab-ci/sign_package.sh dpkg
-
-#########################################################################
-##                                                                     ##
-##  stage: smoketest                                                   ##
-##                                                                     ##
-##  install unsigned packages from previous step and run unit tests.   ##
-##                                                                     ##
-#########################################################################
-
-centos_7_smoketest:
-  stage: smoketest
-  dependencies:
-    - rpm_build
-  image:
-    name: artifactory.ops.ripple.com/centos:7
-  <<: *run_local_smoketest
-
-rocky_8_smoketest:
-  stage: smoketest
-  dependencies:
-    - rpm_build
-  image:
-    name: artifactory.ops.ripple.com/rockylinux/rockylinux:8
-  <<: *run_local_smoketest
-
-fedora_37_smoketest:
-  stage: smoketest
-  dependencies:
-    - rpm_build
-  image:
-    name: artifactory.ops.ripple.com/fedora:37
-  <<: *run_local_smoketest
-
-fedora_38_smoketest:
-  stage: smoketest
-  dependencies:
-    - rpm_build
-  image:
-    name: artifactory.ops.ripple.com/fedora:38
-  <<: *run_local_smoketest
-
-ubuntu_18_smoketest:
-  stage: smoketest
-  dependencies:
-    - dpkg_build
-  image:
-    name: artifactory.ops.ripple.com/ubuntu:18.04
-  <<: *run_local_smoketest
-
-ubuntu_20_smoketest:
-  stage: smoketest
-  dependencies:
-    - dpkg_build
-  image:
-    name: artifactory.ops.ripple.com/ubuntu:20.04
-  <<: *run_local_smoketest
-
-ubuntu_22_smoketest:
-  stage: smoketest
-  dependencies:
-    - dpkg_build
-  image:
-    name: artifactory.ops.ripple.com/ubuntu:22.04
-  <<: *run_local_smoketest
-
-debian_10_smoketest:
-  stage: smoketest
-  dependencies:
-    - dpkg_build
-  image:
-    name: artifactory.ops.ripple.com/debian:10
-  <<: *run_local_smoketest
-
-debian_11_smoketest:
-  stage: smoketest
-  dependencies:
-    - dpkg_build
-  image:
-    name: artifactory.ops.ripple.com/debian:11
-  <<: *run_local_smoketest
-
-#########################################################################
-##                                                                     ##
-##  stage: verify_sig                                                  ##
-##                                                                     ##
-##  use git/gpg to verify that HEAD is signed by an approved           ##
-##  committer. The whitelist of pubkeys is manually mantained          ##
-##  and fetched from GIT_SIGN_PUBKEYS_URL (currently a snippet         ##
-##  link).                                                             ##
-##  ONLY RUNS FOR PRIMARY BRANCHES/REPO                                ##
-##                                                                     ##
-#########################################################################
-
-verify_head_signed:
-  stage: verify_sig
-  image:
-    name: artifactory.ops.ripple.com/ubuntu:latest
-  <<: *only_primary
-  script:
-    - . ./Builds/containers/gitlab-ci/verify_head_commit.sh
-
-#########################################################################
-##                                                                     ##
-##  stage: tag_images                                                  ##
-##                                                                     ##
-##  apply rippled version tag to containers from previous stage.       ##
-##  ONLY RUNS FOR PRIMARY BRANCHES/REPO                                ##
-##                                                                     ##
-#########################################################################
-
-tag_bld_images:
-  stage: tag_images
-  variables:
-    docker_driver: overlay2
-    DOCKER_TLS_CERTDIR: ""
-  image:
-    name: artifactory.ops.ripple.com/docker:latest
-  services:
-    # workaround for TLS issues - consider going back
-    # back to unversioned `dind` when issues are resolved
-    - name: artifactory.ops.ripple.com/docker:stable-dind
-      alias: docker
-  tags:
-    - large
-  dependencies:
-    - rpm_sign
-    - dpkg_sign
-  <<: *only_primary
-  script:
-    - . ./Builds/containers/gitlab-ci/tag_docker_image.sh
-
-#########################################################################
-##                                                                     ##
-##  stage: push_to_test                                                ##
-##                                                                     ##
-##  push packages to artifactory repositories (test)                   ##
-##  ONLY RUNS FOR PRIMARY BRANCHES/REPO                                ##
-##                                                                     ##
-#########################################################################
-
-push_test:
-  stage: push_to_test
-  variables:
-    DEB_REPO: "rippled-deb-test-mirror"
-    RPM_REPO: "rippled-rpm-test-mirror"
-  image:
-    name: artifactory.ops.ripple.com/alpine:latest
-  artifacts:
-    paths:
-      - files.info
-  dependencies:
-    - rpm_sign
-    - dpkg_sign
-  <<: *only_primary
-  script:
-    - . ./Builds/containers/gitlab-ci/push_to_artifactory.sh "PUT" "."
-
-#########################################################################
-##                                                                     ##
-##  stage: verify_from_test                                            ##
-##                                                                     ##
-##  install/test packages from test repos.                             ##
-##  ONLY RUNS FOR PRIMARY BRANCHES/REPO                                ##
-##                                                                     ##
-#########################################################################
-
-centos_7_verify_repo_test:
-  stage: verify_from_test
-  variables:
-    RPM_REPO: "rippled-rpm-test-mirror"
-  image:
-    name: artifactory.ops.ripple.com/centos:7
-  dependencies:
-    - rpm_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-
-rocky_8_verify_repo_test:
-  stage: verify_from_test
-  variables:
-    RPM_REPO: "rippled-rpm-test-mirror"
-  image:
-    name: artifactory.ops.ripple.com/rockylinux/rockylinux:8
-  dependencies:
-    - rpm_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-fedora_37_verify_repo_test:
-  stage: verify_from_test
-  variables:
-    RPM_REPO: "rippled-rpm-test-mirror"
-  image:
-    name: artifactory.ops.ripple.com/fedora:37
-  dependencies:
-    - rpm_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-fedora_38_verify_repo_test:
-  stage: verify_from_test
-  variables:
-    RPM_REPO: "rippled-rpm-test-mirror"
-  image:
-    name: artifactory.ops.ripple.com/fedora:38
-  dependencies:
-    - rpm_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-ubuntu_18_verify_repo_test:
-  stage: verify_from_test
-  variables:
-    DISTRO: "bionic"
-    DEB_REPO: "rippled-deb-test-mirror"
-  image:
-    name: artifactory.ops.ripple.com/ubuntu:18.04
-  dependencies:
-    - dpkg_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-ubuntu_20_verify_repo_test:
-  stage: verify_from_test
-  variables:
-    DISTRO: "focal"
-    DEB_REPO: "rippled-deb-test-mirror"
-  image:
-    name: artifactory.ops.ripple.com/ubuntu:20.04
-  dependencies:
-    - dpkg_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-ubuntu_22_verify_repo_test:
-  stage: verify_from_test
-  variables:
-    DISTRO: "jammy"
-    DEB_REPO: "rippled-deb-test-mirror"
-  image:
-    name: artifactory.ops.ripple.com/ubuntu:22.04
-  dependencies:
-    - dpkg_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-debian_10_verify_repo_test:
-  stage: verify_from_test
-  variables:
-    DISTRO: "buster"
-    DEB_REPO: "rippled-deb-test-mirror"
-  image:
-    name: artifactory.ops.ripple.com/debian:10
-  dependencies:
-    - dpkg_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-debian_11_verify_repo_test:
-  stage: verify_from_test
-  variables:
-    DISTRO: "bullseye"
-    DEB_REPO: "rippled-deb-test-mirror"
-  image:
-    name: artifactory.ops.ripple.com/debian:11
-  dependencies:
-    - dpkg_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-#########################################################################
-##                                                                     ##
-##  stage: wait_approval_prod                                          ##
-##                                                                     ##
-##  wait for manual approval before proceeding to next stage           ##
-##  which pushes to prod repo.                                         ##
-##  ONLY RUNS FOR PRIMARY BRANCHES/REPO                                ##
-##                                                                     ##
-#########################################################################
-wait_before_push_prod:
-  stage: wait_approval_prod
-  image:
-    name: artifactory.ops.ripple.com/alpine:latest
-  <<: *only_primary
-  script:
-    - echo "proceeding to next stage"
-  when: manual
-  allow_failure: false
-
-#########################################################################
-##                                                                     ##
-##  stage: push_to_prod                                                ##
-##                                                                     ##
-##  push packages to artifactory repositories (prod)                   ##
-##  ONLY RUNS FOR PRIMARY BRANCHES/REPO                                ##
-##                                                                     ##
-#########################################################################
-
-push_prod:
-  variables:
-    DEB_REPO: "rippled-deb"
-    RPM_REPO: "rippled-rpm"
-  image:
-    name: artifactory.ops.ripple.com/alpine:latest
-  stage: push_to_prod
-  artifacts:
-    paths:
-      - files.info
-  dependencies:
-    - rpm_sign
-    - dpkg_sign
-  <<: *only_primary
-  script:
-    - . ./Builds/containers/gitlab-ci/push_to_artifactory.sh "PUT" "."
-
-#########################################################################
-##                                                                     ##
-##  stage: verify_from_prod                                            ##
-##                                                                     ##
-##  install/test packages from prod repos.                             ##
-##  ONLY RUNS FOR PRIMARY BRANCHES/REPO                                ##
-##                                                                     ##
-#########################################################################
-
-centos_7_verify_repo_prod:
-  stage: verify_from_prod
-  variables:
-    RPM_REPO: "rippled-rpm"
-  image:
-    name: artifactory.ops.ripple.com/centos:7
-  dependencies:
-    - rpm_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-rocky_8_verify_repo_prod:
-  stage: verify_from_prod
-  variables:
-    RPM_REPO: "rippled-rpm"
-  image:
-    name: artifactory.ops.ripple.com/rockylinux/rockylinux:8
-  dependencies:
-    - rpm_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-fedora_37_verify_repo_prod:
-  stage: verify_from_prod
-  variables:
-    RPM_REPO: "rippled-rpm"
-  image:
-    name: artifactory.ops.ripple.com/fedora:37
-  dependencies:
-    - rpm_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-fedora_38_verify_repo_prod:
-  stage: verify_from_prod
-  variables:
-    RPM_REPO: "rippled-rpm"
-  image:
-    name: artifactory.ops.ripple.com/fedora:38
-  dependencies:
-    - rpm_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-ubuntu_18_verify_repo_prod:
-  stage: verify_from_prod
-  variables:
-    DISTRO: "bionic"
-    DEB_REPO: "rippled-deb"
-  image:
-    name: artifactory.ops.ripple.com/ubuntu:18.04
-  dependencies:
-    - dpkg_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-ubuntu_20_verify_repo_prod:
-  stage: verify_from_prod
-  variables:
-    DISTRO: "focal"
-    DEB_REPO: "rippled-deb"
-  image:
-    name: artifactory.ops.ripple.com/ubuntu:20.04
-  dependencies:
-    - dpkg_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-ubuntu_22_verify_repo_prod:
-  stage: verify_from_prod
-  variables:
-    DISTRO: "jammy"
-    DEB_REPO: "rippled-deb"
-  image:
-    name: artifactory.ops.ripple.com/ubuntu:22.04
-  dependencies:
-    - dpkg_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-debian_10_verify_repo_prod:
-  stage: verify_from_prod
-  variables:
-    DISTRO: "buster"
-    DEB_REPO: "rippled-deb"
-  image:
-    name: artifactory.ops.ripple.com/debian:10
-  dependencies:
-    - dpkg_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-debian_11_verify_repo_prod:
-  stage: verify_from_prod
-  variables:
-    DISTRO: "bullseye"
-    DEB_REPO: "rippled-deb"
-  image:
-    name: artifactory.ops.ripple.com/debian:11
-  dependencies:
-    - dpkg_sign
-  <<: *only_primary
-  <<: *run_repo_smoketest
-
-#########################################################################
-##                                                                     ##
-##  stage: get_final_hashes                                            ##
-##                                                                     ##
-##  fetch final hashes from artifactory.                               ##
-##  ONLY RUNS FOR PRIMARY BRANCHES/REPO                                ##
-##                                                                     ##
-#########################################################################
-
-get_prod_hashes:
-  variables:
-    DEB_REPO: "rippled-deb"
-    RPM_REPO: "rippled-rpm"
-  image:
-    name: artifactory.ops.ripple.com/alpine:latest
-  stage: get_final_hashes
-  artifacts:
-    paths:
-      - files.info
-  dependencies:
-    - rpm_sign
-    - dpkg_sign
-  <<: *only_primary
-  script:
-    - . ./Builds/containers/gitlab-ci/push_to_artifactory.sh "GET" ".checksums"
-
-#########################################################################
-##                                                                     ##
-##  stage: build_containers                                            ##
-##                                                                     ##
-##  build containers from docker definitions. These containers are NOT ##
-##  used for the package build. This step is only used to ensure that  ##
-##  the package build targets and files are still working properly.    ##
-##                                                                     ##
-#########################################################################
-
-build_centos_container:
-  stage: build_containers
-  <<: *dind_param
-  script:
-    - . ./Builds/containers/gitlab-ci/build_container.sh rpm
-
-build_ubuntu_container:
-  stage: build_containers
-  <<: *dind_param
-  script:
-    - . ./Builds/containers/gitlab-ci/build_container.sh dpkg
diff --git a/Builds/containers/gitlab-ci/push_to_artifactory.sh b/Builds/containers/gitlab-ci/push_to_artifactory.sh
deleted file mode 100644
index 97a9e906a..000000000
--- a/Builds/containers/gitlab-ci/push_to_artifactory.sh
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/usr/bin/env sh
-set -e
-action=$1
-filter=$2
-
-. ./Builds/containers/gitlab-ci/get_component.sh
-
-apk add curl jq coreutils util-linux
-TOPDIR=$(pwd)
-
-# DPKG
-
-cd $TOPDIR
-cd build/dpkg/packages
-CURLARGS="-sk -X${action} -urippled:${ARTIFACTORY_DEPLOY_KEY_RIPPLED}"
-RIPPLED_PKG=$(ls rippled_*.deb)
-RIPPLED_REPORTING_PKG=$(ls rippled-reporting_*.deb)
-RIPPLED_DBG_PKG=$(ls rippled-dbgsym_*.*deb)
-RIPPLED_REPORTING_DBG_PKG=$(ls rippled-reporting-dbgsym_*.*deb)
-# TODO - where to upload src tgz?
-RIPPLED_SRC=$(ls rippled_*.orig.tar.gz)
-DEB_MATRIX=";deb.component=${COMPONENT};deb.architecture=amd64"
-for dist in buster bullseye bionic focal jammy; do
-    DEB_MATRIX="${DEB_MATRIX};deb.distribution=${dist}"
-done
-echo "{ \"debs\": {" > "${TOPDIR}/files.info"
-for deb in ${RIPPLED_PKG} ${RIPPLED_DBG_PKG} ${RIPPLED_REPORTING_PKG} ${RIPPLED_REPORTING_DBG_PKG}; do
-    # first item doesn't get a comma separator
-    if [ $deb != $RIPPLED_PKG ] ; then
-        echo "," >> "${TOPDIR}/files.info"
-    fi
-    echo "\"${deb}\"": | tee -a "${TOPDIR}/files.info"
-    ca="${CURLARGS}"
-    if [ "${action}" = "PUT" ] ; then
-        url="https://${ARTIFACTORY_HOST}/artifactory/${DEB_REPO}/pool/${COMPONENT}/${deb}${DEB_MATRIX}"
-        ca="${ca} -T${deb}"
-    elif [ "${action}" = "GET" ] ; then
-        url="https://${ARTIFACTORY_HOST}/artifactory/api/storage/${DEB_REPO}/pool/${COMPONENT}/${deb}"
-    fi
-    echo "file info request url --> ${url}"
-    eval "curl ${ca} \"${url}\"" | jq -M "${filter}" | tee -a "${TOPDIR}/files.info"
-done
-echo "}," >> "${TOPDIR}/files.info"
-
-# RPM
-
-cd $TOPDIR
-cd build/rpm/packages
-RIPPLED_PKG=$(ls rippled-[0-9]*.x86_64.rpm)
-RIPPLED_DEV_PKG=$(ls rippled-devel*.rpm)
-RIPPLED_DBG_PKG=$(ls rippled-debuginfo*.rpm)
-RIPPLED_REPORTING_PKG=$(ls rippled-reporting*.rpm)
-# TODO - where to upload src rpm ?
-RIPPLED_SRC=$(ls rippled-[0-9]*.src.rpm)
-echo "\"rpms\": {" >> "${TOPDIR}/files.info"
-for rpm in ${RIPPLED_PKG} ${RIPPLED_DEV_PKG} ${RIPPLED_DBG_PKG} ${RIPPLED_REPORTING_PKG}; do
-    # first item doesn't get a comma separator
-    if [ $rpm != $RIPPLED_PKG ] ; then
-        echo "," >> "${TOPDIR}/files.info"
-    fi
-    echo "\"${rpm}\"": | tee -a "${TOPDIR}/files.info"
-    ca="${CURLARGS}"
-    if [ "${action}" = "PUT" ] ; then
-        url="https://${ARTIFACTORY_HOST}/artifactory/${RPM_REPO}/${COMPONENT}/"
-        ca="${ca} -T${rpm}"
-    elif [ "${action}" = "GET" ] ; then
-        url="https://${ARTIFACTORY_HOST}/artifactory/api/storage/${RPM_REPO}/${COMPONENT}/${rpm}"
-    fi
-    echo "file info request url --> ${url}"
-    eval "curl ${ca} \"${url}\"" | jq -M "${filter}" | tee -a "${TOPDIR}/files.info"
-done
-echo "}}" >> "${TOPDIR}/files.info"
-jq '.' "${TOPDIR}/files.info" > "${TOPDIR}/files.info.tmp"
-mv "${TOPDIR}/files.info.tmp" "${TOPDIR}/files.info"
-
-if [ ! -z "${SLACK_NOTIFY_URL}" ] && [ "${action}" = "GET" ] ; then
-    # extract files.info content to variable and sanitize so it can
-    # be interpolated into a slack text field below
-    finfo=$(cat ${TOPDIR}/files.info | sed -e ':a' -e 'N' -e '$!ba' -e 's/\n/\\n/g' | sed -E 's/"/\\"/g')
-    # try posting file info to slack.
-    # can add channel field to payload if the
-    # default channel is incorrect. Get rid of
-    # newlines in payload json since slack doesn't accept them
-    CONTENT=$(tr -d '[\n]' <> /etc/apt/sources.list
-        updateWithRetry
-        # uncomment this next line if you want to see the available package versions
-        # apt-cache policy rippled
-        apt-get -y install rippled=${dpkg_full_version}
-    elif [ "${install_from}" = "local" ] ; then
-        # cached pkg install
-        updateWithRetry
-        apt-get -y install libprotobuf-dev libprotoc-dev protobuf-compiler libssl-dev
-        rm -f build/dpkg/packages/rippled-dbgsym*.*
-        dpkg --no-debsig -i build/dpkg/packages/*.deb
-    else
-        echo "unrecognized pkg source!"
-        exit 1
-    fi
-else
-    yum -y update
-    if [ "${install_from}" = "repo" ] ; then
-        pkgs=("yum-utils coreutils util-linux")
-        if [ "$ID" = "rocky" ]; then
-            pkgs="${pkgs[@]/coreutils}"
-        fi
-        yum install -y $pkgs
-        REPOFILE="/etc/yum.repos.d/artifactory.repo"
-        echo "[Artifactory]" > ${REPOFILE}
-        echo "name=Artifactory" >> ${REPOFILE}
-        echo "baseurl=${REPO_ROOT}/${RPM_REPO}/${COMPONENT}/" >> ${REPOFILE}
-        echo "enabled=1" >> ${REPOFILE}
-        echo "gpgcheck=0" >> ${REPOFILE}
-        echo "gpgkey=${REPO_ROOT}/${RPM_REPO}/${COMPONENT}/repodata/repomd.xml.key" >> ${REPOFILE}
-        echo "repo_gpgcheck=1" >> ${REPOFILE}
-        yum -y update
-        # uncomment this next line if you want to see the available package versions
-        # yum --showduplicates list rippled
-        yum -y install ${rpm_version_release}
-    elif [ "${install_from}" = "local" ] ; then
-        # cached pkg install
-        pkgs=("yum-utils openssl-static zlib-static")
-        if [[ "$ID" =~ rocky|fedora ]]; then
-            if [[ "$ID" =~ "rocky" ]]; then
-                sed -i 's/enabled=0/enabled=1/g' /etc/yum.repos.d/Rocky-PowerTools.repo
-            fi
-            pkgs="${pkgs[@]/openssl-static}"
-        fi
-        yum install -y $pkgs
-        rm -f build/rpm/packages/rippled-debug*.rpm
-        rm -f build/rpm/packages/*.src.rpm
-        rpm -i build/rpm/packages/*.rpm
-    else
-        echo "unrecognized pkg source!"
-        exit 1
-    fi
-fi
-
-# verify installed version
-INSTALLED=$(/opt/ripple/bin/rippled --version | awk '{print $NF}')
-if [ "${rippled_version}" != "${INSTALLED}" ] ; then
-    echo "INSTALLED version ${INSTALLED} does not match ${rippled_version}"
-    exit 1
-fi
-# run unit tests
-/opt/ripple/bin/rippled --unittest --unittest-jobs $(nproc)
-/opt/ripple/bin/validator-keys --unittest
diff --git a/Builds/containers/gitlab-ci/tag_docker_image.sh b/Builds/containers/gitlab-ci/tag_docker_image.sh
deleted file mode 100644
index 662590595..000000000
--- a/Builds/containers/gitlab-ci/tag_docker_image.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env sh
-set -e
-docker login -u rippled \
-    -p ${ARTIFACTORY_DEPLOY_KEY_RIPPLED} "${ARTIFACTORY_HUB}"
-# this gives us rippled_version :
-source build/rpm/packages/build_vars
-docker pull "${ARTIFACTORY_HUB}/${RPM_CONTAINER_FULLNAME}"
-docker pull "${ARTIFACTORY_HUB}/${DPKG_CONTAINER_FULLNAME}"
-# tag/push two labels...one using the current rippled version and one just using "latest"
-for label in ${rippled_version} latest ; do
-    docker tag \
-        "${ARTIFACTORY_HUB}/${RPM_CONTAINER_FULLNAME}" \
-        "${ARTIFACTORY_HUB}/${RPM_CONTAINER_NAME}:${label}_${CI_COMMIT_REF_SLUG}"
-    docker push \
-        "${ARTIFACTORY_HUB}/${RPM_CONTAINER_NAME}:${label}_${CI_COMMIT_REF_SLUG}"
-    docker tag \
-        "${ARTIFACTORY_HUB}/${DPKG_CONTAINER_FULLNAME}" \
-        "${ARTIFACTORY_HUB}/${DPKG_CONTAINER_NAME}:${label}_${CI_COMMIT_REF_SLUG}"
-    docker push \
-        "${ARTIFACTORY_HUB}/${DPKG_CONTAINER_NAME}:${label}_${CI_COMMIT_REF_SLUG}"
-done
diff --git a/Builds/containers/gitlab-ci/verify_head_commit.sh b/Builds/containers/gitlab-ci/verify_head_commit.sh
deleted file mode 100644
index f724b7e96..000000000
--- a/Builds/containers/gitlab-ci/verify_head_commit.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env sh
-set -ex
-apt -y update
-DEBIAN_FRONTEND="noninteractive" apt-get -y install tzdata
-apt -y install software-properties-common curl git gnupg
-curl -sk -o rippled-pubkeys.txt "${GIT_SIGN_PUBKEYS_URL}"
-gpg --import rippled-pubkeys.txt
-if git verify-commit HEAD; then
-    echo "git commit signature check passed"
-else
-    echo "git commit signature check failed"
-    git log -n 5 --color \
-        --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an> [%G?]%Creset' \
-        --abbrev-commit
-    exit 1
-fi
-
diff --git a/Builds/containers/packaging/dpkg/build_dpkg.sh b/Builds/containers/packaging/dpkg/build_dpkg.sh
deleted file mode 100755
index 3cfafe9e3..000000000
--- a/Builds/containers/packaging/dpkg/build_dpkg.sh
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/env bash
-set -ex
-
-# make sure pkg source files are up to date with repo
-cd /opt/rippled_bld/pkg
-cp -fpru rippled/Builds/containers/packaging/dpkg/debian/. debian/
-cp -fpu rippled/Builds/containers/shared/rippled*.service debian/
-cp -fpu rippled/Builds/containers/shared/update_sources.sh .
-source update_sources.sh
-
-# Build the dpkg
-
-#dpkg uses - as separator, so we need to change our -bN versions to tilde
-RIPPLED_DPKG_VERSION=$(echo "${RIPPLED_VERSION}" | sed 's!-!~!g')
-# TODO - decide how to handle the trailing/release
-# version here (hardcoded to 1). Does it ever need to change?
-RIPPLED_DPKG_FULL_VERSION="${RIPPLED_DPKG_VERSION}-1"
-git config --global --add safe.directory /opt/rippled_bld/pkg/rippled
-cd /opt/rippled_bld/pkg/rippled
-if [[ -n $(git status --porcelain) ]]; then
-    git status
-    error "Unstaged changes in this repo - please commit first"
-fi
-git archive --format tar.gz --prefix rippled-${RIPPLED_DPKG_VERSION}/ -o ../rippled-${RIPPLED_DPKG_VERSION}.tar.gz HEAD
-cd ..
-# dpkg debmake would normally create this link, but we do it manually
-ln -s ./rippled-${RIPPLED_DPKG_VERSION}.tar.gz rippled_${RIPPLED_DPKG_VERSION}.orig.tar.gz
-tar xvf rippled-${RIPPLED_DPKG_VERSION}.tar.gz
-cd rippled-${RIPPLED_DPKG_VERSION}
-cp -pr ../debian .
-
-# dpkg requires a changelog. We don't currently maintain
-# a useable one, so let's just fake it with our current version
-# TODO : not sure if the "unstable" will need to change for
-# release packages (?)
-NOWSTR=$(TZ=UTC date -R)
-cat << CHANGELOG > ./debian/changelog
-rippled (${RIPPLED_DPKG_FULL_VERSION}) unstable; urgency=low
-
-  * see RELEASENOTES
-
- -- Ripple Labs Inc.   ${NOWSTR}
-CHANGELOG
-
-# PATH must be preserved for our more modern cmake in /opt/local
-# TODO : consider allowing lintian to run in future ?
-export DH_BUILD_DDEBS=1
-debuild --no-lintian --preserve-envvar PATH --preserve-env -us -uc
-rc=$?; if [[ $rc != 0 ]]; then
-    error "error building dpkg"
-fi
-cd ..
-
-# copy artifacts
-cp rippled-reporting_${RIPPLED_DPKG_FULL_VERSION}_amd64.deb ${PKG_OUTDIR}
-cp rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.deb ${PKG_OUTDIR}
-cp rippled_${RIPPLED_DPKG_FULL_VERSION}.dsc ${PKG_OUTDIR}
-# dbgsym suffix is ddeb under newer debuild, but just deb under earlier
-cp rippled-dbgsym_${RIPPLED_DPKG_FULL_VERSION}_amd64.* ${PKG_OUTDIR}
-cp rippled-reporting-dbgsym_${RIPPLED_DPKG_FULL_VERSION}_amd64.* ${PKG_OUTDIR}
-cp rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.changes ${PKG_OUTDIR}
-cp rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.build ${PKG_OUTDIR}
-cp rippled_${RIPPLED_DPKG_VERSION}.orig.tar.gz ${PKG_OUTDIR}
-cp rippled_${RIPPLED_DPKG_FULL_VERSION}.debian.tar.xz ${PKG_OUTDIR}
-# buildinfo is only generated by later version of debuild
-if [ -e rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.buildinfo ] ; then
-    cp rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.buildinfo ${PKG_OUTDIR}
-fi
-
-cat rippled_${RIPPLED_DPKG_FULL_VERSION}_amd64.changes
-# extract the text in the .changes file that appears between
-#    Checksums-Sha256:  ...
-# and
-#    Files: ...
-awk '/Checksums-Sha256:/{hit=1;next}/Files:/{hit=0}hit' \
-    rippled_${RIPPLED_DPKG_VERSION}-1_amd64.changes | \
-        sed -E 's!^[[:space:]]+!!' > shasums
-DEB_SHA256=$(cat shasums | \
-    grep "rippled_${RIPPLED_DPKG_VERSION}-1_amd64.deb" | cut -d " " -f 1)
-DBG_SHA256=$(cat shasums | \
-    grep "rippled-dbgsym_${RIPPLED_DPKG_VERSION}-1_amd64.*" | cut -d " " -f 1)
-REPORTING_DBG_SHA256=$(cat shasums | \
-    grep "rippled-reporting-dbgsym_${RIPPLED_DPKG_VERSION}-1_amd64.*" | cut -d " " -f 1)
-REPORTING_SHA256=$(cat shasums | \
-    grep "rippled-reporting_${RIPPLED_DPKG_VERSION}-1_amd64.deb" | cut -d " " -f 1)
-SRC_SHA256=$(cat shasums | \
-    grep "rippled_${RIPPLED_DPKG_VERSION}.orig.tar.gz" | cut -d " " -f 1)
-echo "deb_sha256=${DEB_SHA256}" >> ${PKG_OUTDIR}/build_vars
-echo "dbg_sha256=${DBG_SHA256}" >> ${PKG_OUTDIR}/build_vars
-echo "reporting_sha256=${REPORTING_SHA256}" >> ${PKG_OUTDIR}/build_vars
-echo "reporting_dbg_sha256=${REPORTING_DBG_SHA256}" >> ${PKG_OUTDIR}/build_vars
-echo "src_sha256=${SRC_SHA256}" >> ${PKG_OUTDIR}/build_vars
-echo "rippled_version=${RIPPLED_VERSION}" >> ${PKG_OUTDIR}/build_vars
-echo "dpkg_version=${RIPPLED_DPKG_VERSION}" >> ${PKG_OUTDIR}/build_vars
-echo "dpkg_full_version=${RIPPLED_DPKG_FULL_VERSION}" >> ${PKG_OUTDIR}/build_vars
diff --git a/Builds/containers/packaging/dpkg/debian/README.Debian b/Builds/containers/packaging/dpkg/debian/README.Debian
deleted file mode 100644
index 25ba6b55f..000000000
--- a/Builds/containers/packaging/dpkg/debian/README.Debian
+++ /dev/null
@@ -1,3 +0,0 @@
-rippled daemon
-
- -- Mike Ellery   Tue, 04 Dec 2018 18:19:03 +0000
diff --git a/Builds/containers/packaging/dpkg/debian/compat b/Builds/containers/packaging/dpkg/debian/compat
deleted file mode 100644
index f599e28b8..000000000
--- a/Builds/containers/packaging/dpkg/debian/compat
+++ /dev/null
@@ -1 +0,0 @@
-10
diff --git a/Builds/containers/packaging/dpkg/debian/control b/Builds/containers/packaging/dpkg/debian/control
deleted file mode 100644
index b840eb228..000000000
--- a/Builds/containers/packaging/dpkg/debian/control
+++ /dev/null
@@ -1,19 +0,0 @@
-Source: rippled
-Section: misc
-Priority: extra
-Maintainer: Ripple Labs Inc. 
-Build-Depends: cmake, debhelper (>=9), zlib1g-dev, dh-systemd, ninja-build
-Standards-Version: 3.9.7
-Homepage: http://ripple.com/
-
-Package: rippled
-Architecture: any
-Multi-Arch: foreign
-Depends: ${misc:Depends}, ${shlibs:Depends}
-Description: rippled daemon
-
-Package: rippled-reporting
-Architecture: any
-Multi-Arch: foreign
-Depends: ${misc:Depends}, ${shlibs:Depends}
-Description: rippled reporting daemon
diff --git a/Builds/containers/packaging/dpkg/debian/copyright b/Builds/containers/packaging/dpkg/debian/copyright
deleted file mode 100644
index dce318fd7..000000000
--- a/Builds/containers/packaging/dpkg/debian/copyright
+++ /dev/null
@@ -1,86 +0,0 @@
-Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
-Upstream-Name: rippled
-Source: https://github.com/ripple/rippled 
-
-Files: *
-Copyright: 2012-2019 Ripple Labs Inc.
-
-License:   __UNKNOWN__
-
-The accompanying files under various copyrights.
-
-Copyright (c) 2012, 2013, 2014 Ripple Labs Inc.
-
-Permission to use, copy, modify, and distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-The accompanying files incorporate work covered by the following copyright
-and previous license notice:
-
-Copyright (c) 2011 Arthur Britto, David Schwartz, Jed McCaleb,
-Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant
-
-Some code from Raw Material Software, Ltd., provided under the terms of the
-  ISC License. See the corresponding source files for more details.
-  Copyright (c) 2013 - Raw Material Software Ltd.
-  Please visit http://www.juce.com
-
-Some code from ASIO examples:
-// Copyright (c) 2003-2011 Christopher M. Kohlhoff (chris at kohlhoff dot com)
-//
-// Distributed under the Boost Software License, Version 1.0. (See accompanying
-// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-Some code from Bitcoin:
-// Copyright (c) 2009-2010 Satoshi Nakamoto
-// Copyright (c) 2011 The Bitcoin developers
-// Distributed under the MIT/X11 software license, see the accompanying
-// file license.txt or http://www.opensource.org/licenses/mit-license.php.
-
-Some code from Tom Wu:
-This software is covered under the following copyright:
-
-/*
- * Copyright (c) 2003-2005  Tom Wu
- * All Rights Reserved.
- *
- * Permission is hereby granted, free of charge, to any person obtaining
- * a copy of this software and associated documentation files (the
- * "Software"), to deal in the Software without restriction, including
- * without limitation the rights to use, copy, modify, merge, publish,
- * distribute, sublicense, and/or sell copies of the Software, and to
- * permit persons to whom the Software is furnished to do so, subject to
- * the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND, 
- * EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY 
- * WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.  
- *
- * IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
- * INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER
- * RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF
- * THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT
- * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
- *
- * In addition, the following condition applies:
- *
- * All redistributions must retain an intact copy of this copyright notice
- * and disclaimer.
- */
-
-Address all questions regarding this license to:
-
-  Tom Wu
-  tjw@cs.Stanford.EDU
diff --git a/Builds/containers/packaging/dpkg/debian/dirs b/Builds/containers/packaging/dpkg/debian/dirs
deleted file mode 100644
index aed307ee1..000000000
--- a/Builds/containers/packaging/dpkg/debian/dirs
+++ /dev/null
@@ -1,3 +0,0 @@
-/var/log/rippled/
-/var/lib/rippled/
-/etc/systemd/system/rippled.service.d/
diff --git a/Builds/containers/packaging/dpkg/debian/docs b/Builds/containers/packaging/dpkg/debian/docs
deleted file mode 100644
index 17aa96662..000000000
--- a/Builds/containers/packaging/dpkg/debian/docs
+++ /dev/null
@@ -1,3 +0,0 @@
-README.md
-LICENSE.md
-RELEASENOTES.md
diff --git a/Builds/containers/packaging/dpkg/debian/rippled-dev.install b/Builds/containers/packaging/dpkg/debian/rippled-dev.install
deleted file mode 100644
index a222857c0..000000000
--- a/Builds/containers/packaging/dpkg/debian/rippled-dev.install
+++ /dev/null
@@ -1,3 +0,0 @@
-opt/ripple/include
-opt/ripple/lib/*.a
-opt/ripple/lib/cmake/ripple
diff --git a/Builds/containers/packaging/dpkg/debian/rippled-reporting.dirs b/Builds/containers/packaging/dpkg/debian/rippled-reporting.dirs
deleted file mode 100644
index 0f5cdbd40..000000000
--- a/Builds/containers/packaging/dpkg/debian/rippled-reporting.dirs
+++ /dev/null
@@ -1,3 +0,0 @@
-/var/log/rippled-reporting/
-/var/lib/rippled-reporting/
-/etc/systemd/system/rippled-reporting.service.d/
\ No newline at end of file
diff --git a/Builds/containers/packaging/dpkg/debian/rippled-reporting.install b/Builds/containers/packaging/dpkg/debian/rippled-reporting.install
deleted file mode 100644
index 255c7b0b5..000000000
--- a/Builds/containers/packaging/dpkg/debian/rippled-reporting.install
+++ /dev/null
@@ -1,8 +0,0 @@
-bld/rippled-reporting/rippled-reporting opt/rippled-reporting/bin
-cfg/rippled-reporting.cfg opt/rippled-reporting/etc
-debian/tmp/opt/rippled-reporting/etc/validators.txt opt/rippled-reporting/etc
-
-opt/rippled-reporting/bin/update-rippled-reporting.sh
-opt/rippled-reporting/bin/getRippledReportingInfo
-opt/rippled-reporting/etc/update-rippled-reporting-cron
-etc/logrotate.d/rippled-reporting
\ No newline at end of file
diff --git a/Builds/containers/packaging/dpkg/debian/rippled-reporting.links b/Builds/containers/packaging/dpkg/debian/rippled-reporting.links
deleted file mode 100644
index ab83b0c81..000000000
--- a/Builds/containers/packaging/dpkg/debian/rippled-reporting.links
+++ /dev/null
@@ -1,3 +0,0 @@
-opt/rippled-reporting/etc/rippled-reporting.cfg etc/opt/rippled-reporting/rippled-reporting.cfg
-opt/rippled-reporting/etc/validators.txt etc/opt/rippled-reporting/validators.txt
-opt/rippled-reporting/bin/rippled-reporting usr/local/bin/rippled-reporting
diff --git a/Builds/containers/packaging/dpkg/debian/rippled-reporting.postinst b/Builds/containers/packaging/dpkg/debian/rippled-reporting.postinst
deleted file mode 100644
index 640441973..000000000
--- a/Builds/containers/packaging/dpkg/debian/rippled-reporting.postinst
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/sh
-set -e
-
-USER_NAME=rippled-reporting
-GROUP_NAME=rippled-reporting
-case "$1" in
-    configure)
-        id -u $USER_NAME >/dev/null 2>&1 || \
-        adduser --system --quiet \
-            --home /nonexistent --no-create-home \
-            --disabled-password \
-            --group "$GROUP_NAME"
-        chown -R $USER_NAME:$GROUP_NAME /var/log/rippled-reporting/
-        chown -R $USER_NAME:$GROUP_NAME /var/lib/rippled-reporting/
-        chmod 755 /var/log/rippled-reporting/
-        chmod 755 /var/lib/rippled-reporting/
-        chown -R $USER_NAME:$GROUP_NAME /opt/rippled-reporting
-
-    ;;
-
-    abort-upgrade|abort-remove|abort-deconfigure)
-    ;;
-
-    *)
-        echo "postinst called with unknown argument \`$1'" >&2
-        exit 1
-    ;;
-esac
-
-
-#DEBHELPER#
-
-exit 0
diff --git a/Builds/containers/packaging/dpkg/debian/rippled.conffiles b/Builds/containers/packaging/dpkg/debian/rippled.conffiles
deleted file mode 100644
index 0c6d1c36d..000000000
--- a/Builds/containers/packaging/dpkg/debian/rippled.conffiles
+++ /dev/null
@@ -1,2 +0,0 @@
-/opt/ripple/etc/rippled.cfg
-/opt/ripple/etc/validators.txt
diff --git a/Builds/containers/packaging/dpkg/debian/rippled.install b/Builds/containers/packaging/dpkg/debian/rippled.install
deleted file mode 100644
index 3ce9f60fb..000000000
--- a/Builds/containers/packaging/dpkg/debian/rippled.install
+++ /dev/null
@@ -1,8 +0,0 @@
-opt/ripple/bin/rippled
-opt/ripple/bin/validator-keys
-opt/ripple/bin/update-rippled.sh
-opt/ripple/bin/getRippledInfo
-opt/ripple/etc/rippled.cfg
-opt/ripple/etc/validators.txt
-opt/ripple/etc/update-rippled-cron
-etc/logrotate.d/rippled
diff --git a/Builds/containers/packaging/dpkg/debian/rippled.links b/Builds/containers/packaging/dpkg/debian/rippled.links
deleted file mode 100644
index ff2abd82b..000000000
--- a/Builds/containers/packaging/dpkg/debian/rippled.links
+++ /dev/null
@@ -1,3 +0,0 @@
-opt/ripple/etc/rippled.cfg etc/opt/ripple/rippled.cfg
-opt/ripple/etc/validators.txt etc/opt/ripple/validators.txt
-opt/ripple/bin/rippled usr/local/bin/rippled
diff --git a/Builds/containers/packaging/dpkg/debian/rippled.postinst b/Builds/containers/packaging/dpkg/debian/rippled.postinst
deleted file mode 100644
index 9838fa593..000000000
--- a/Builds/containers/packaging/dpkg/debian/rippled.postinst
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/sh
-set -e
-
-USER_NAME=rippled
-GROUP_NAME=rippled
-case "$1" in
-    configure)
-        id -u $USER_NAME >/dev/null 2>&1 || \
-        adduser --system --quiet \
-            --home /nonexistent --no-create-home \
-            --disabled-password \
-            --group "$GROUP_NAME"
-        chown -R $USER_NAME:$GROUP_NAME /var/log/rippled/
-        chown -R $USER_NAME:$GROUP_NAME /var/lib/rippled/
-        chown -R $USER_NAME:$GROUP_NAME /opt/ripple
-        chmod 755 /var/log/rippled/
-        chmod 755 /var/lib/rippled/
-        chmod 644 /opt/ripple/etc/update-rippled-cron
-        chmod 644 /etc/logrotate.d/rippled
-        chown -R root:$GROUP_NAME /opt/ripple/etc/update-rippled-cron
-    ;;
-
-    abort-upgrade|abort-remove|abort-deconfigure)
-    ;;
-
-    *)
-        echo "postinst called with unknown argument \`$1'" >&2
-        exit 1
-    ;;
-esac
-
-
-#DEBHELPER#
-
-exit 0
diff --git a/Builds/containers/packaging/dpkg/debian/rippled.postrm b/Builds/containers/packaging/dpkg/debian/rippled.postrm
deleted file mode 100644
index 9086993a1..000000000
--- a/Builds/containers/packaging/dpkg/debian/rippled.postrm
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/sh
-set -e
-
-case "$1" in
-    purge|remove|upgrade|failed-upgrade|abort-install|abort-upgrade|disappear)
-    ;;
-
-    *)
-        echo "postrm called with unknown argument \`$1'" >&2
-        exit 1
-    ;;
-esac
-
-
-#DEBHELPER#
-
-exit 0
diff --git a/Builds/containers/packaging/dpkg/debian/rippled.preinst b/Builds/containers/packaging/dpkg/debian/rippled.preinst
deleted file mode 100644
index 10575345a..000000000
--- a/Builds/containers/packaging/dpkg/debian/rippled.preinst
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/sh
-set -e
-
-case "$1" in
-    install|upgrade)
-    ;;
-
-    abort-upgrade)
-    ;;
-
-    *)
-        echo "preinst called with unknown argument \`$1'" >&2
-        exit 1
-    ;;
-esac
-
-
-#DEBHELPER#
-
-exit 0
diff --git a/Builds/containers/packaging/dpkg/debian/rippled.prerm b/Builds/containers/packaging/dpkg/debian/rippled.prerm
deleted file mode 100644
index adabdbfb7..000000000
--- a/Builds/containers/packaging/dpkg/debian/rippled.prerm
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/sh
-set -e
-
-case "$1" in
-    remove|upgrade|deconfigure)
-    ;;
-
-    failed-upgrade)
-    ;;
-
-    *)
-        echo "prerm called with unknown argument \`$1'" >&2
-        exit 1
-    ;;
-esac
-
-
-#DEBHELPER#
-
-exit 0
diff --git a/Builds/containers/packaging/dpkg/debian/rules b/Builds/containers/packaging/dpkg/debian/rules
deleted file mode 100755
index d62517d34..000000000
--- a/Builds/containers/packaging/dpkg/debian/rules
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/make -f
-export DH_VERBOSE = 1
-export DH_OPTIONS = -v
-# debuild sets some warnings that don't work well
-# for our curent build..so try to remove those flags here:
-export CFLAGS:=$(subst -Wformat,,$(CFLAGS))
-export CFLAGS:=$(subst -Werror=format-security,,$(CFLAGS))
-export CXXFLAGS:=$(subst -Wformat,,$(CXXFLAGS))
-export CXXFLAGS:=$(subst -Werror=format-security,,$(CXXFLAGS))
-
-%:
-	dh $@ --with systemd
-
-override_dh_systemd_start:
-	dh_systemd_start --no-restart-on-upgrade
-
-override_dh_auto_configure:
-	env
-	rm -rf bld
-
-	conan export external/snappy snappy/1.1.9@
-
-	conan install . \
-		--install-folder bld/rippled  \
-		--build missing \
-		--build boost \
-		--build sqlite3 \
-		--settings build_type=Release
-
-	cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-		-G Ninja \
-		-DCMAKE_BUILD_TYPE=Release \
-		-DCMAKE_INSTALL_PREFIX=/opt/ripple \
-		-Dstatic=ON \
-		-Dunity=OFF \
-		-DCMAKE_VERBOSE_MAKEFILE=ON \
-		-Dvalidator_keys=ON \
-		-B bld/rippled
-
-	conan install . \
-		--install-folder bld/rippled-reporting \
-		--build missing \
-		--build boost \
-		--build sqlite3 \
-		--build libuv \
-		--settings build_type=Release \
-		--options reporting=True
-
-	cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-		-G Ninja \
-		-DCMAKE_BUILD_TYPE=Release \
-		-DCMAKE_INSTALL_PREFIX=/opt/rippled-reporting \
-		-Dstatic=ON \
-		-Dunity=OFF \
-		-DCMAKE_VERBOSE_MAKEFILE=ON \
-		-Dreporting=ON \
-		-B bld/rippled-reporting
-
-override_dh_auto_build:
-	cmake --build bld/rippled --target rippled --target validator-keys -j${nproc}
-
-	cmake --build bld/rippled-reporting --target rippled -j${nproc}
-
-override_dh_auto_install:
-	cmake --install bld/rippled --prefix debian/tmp/opt/ripple
-	install -D bld/rippled/validator-keys/validator-keys debian/tmp/opt/ripple/bin/validator-keys
-	install -D Builds/containers/shared/update-rippled.sh debian/tmp/opt/ripple/bin/update-rippled.sh
-	install -D bin/getRippledInfo debian/tmp/opt/ripple/bin/getRippledInfo
-	install -D Builds/containers/shared/update-rippled-cron debian/tmp/opt/ripple/etc/update-rippled-cron
-	install -D Builds/containers/shared/rippled-logrotate debian/tmp/etc/logrotate.d/rippled
-	rm -rf debian/tmp/opt/ripple/lib64/cmake/date
-
-	mkdir -p debian/tmp/opt/rippled-reporting/etc
-	mkdir -p debian/tmp/opt/rippled-reporting/bin
-	cp cfg/validators-example.txt debian/tmp/opt/rippled-reporting/etc/validators.txt
-
-	sed -E 's/rippled?/rippled-reporting/g' Builds/containers/shared/update-rippled.sh > debian/tmp/opt/rippled-reporting/bin/update-rippled-reporting.sh
-	sed -E 's/rippled?/rippled-reporting/g' bin/getRippledInfo > debian/tmp/opt/rippled-reporting/bin/getRippledReportingInfo
-	sed -E 's/rippled?/rippled-reporting/g' Builds/containers/shared/update-rippled-cron > debian/tmp/opt/rippled-reporting/etc/update-rippled-reporting-cron
-	sed -E 's/rippled?/rippled-reporting/g' Builds/containers/shared/rippled-logrotate > debian/tmp/etc/logrotate.d/rippled-reporting
diff --git a/Builds/containers/packaging/dpkg/debian/source/format b/Builds/containers/packaging/dpkg/debian/source/format
deleted file mode 100644
index 163aaf8d8..000000000
--- a/Builds/containers/packaging/dpkg/debian/source/format
+++ /dev/null
@@ -1 +0,0 @@
-3.0 (quilt)
diff --git a/Builds/containers/packaging/dpkg/debian/source/local-options b/Builds/containers/packaging/dpkg/debian/source/local-options
deleted file mode 100644
index 00131ee8c..000000000
--- a/Builds/containers/packaging/dpkg/debian/source/local-options
+++ /dev/null
@@ -1,2 +0,0 @@
-#abort-on-upstream-changes
-#unapply-patches
diff --git a/Builds/containers/packaging/rpm/50-rippled-reporting.preset b/Builds/containers/packaging/rpm/50-rippled-reporting.preset
deleted file mode 100644
index 50d16dd7a..000000000
--- a/Builds/containers/packaging/rpm/50-rippled-reporting.preset
+++ /dev/null
@@ -1 +0,0 @@
-enable rippled-reporting.service
\ No newline at end of file
diff --git a/Builds/containers/packaging/rpm/50-rippled.preset b/Builds/containers/packaging/rpm/50-rippled.preset
deleted file mode 100644
index 854e20a08..000000000
--- a/Builds/containers/packaging/rpm/50-rippled.preset
+++ /dev/null
@@ -1 +0,0 @@
-enable rippled.service
\ No newline at end of file
diff --git a/Builds/containers/packaging/rpm/build_rpm.sh b/Builds/containers/packaging/rpm/build_rpm.sh
deleted file mode 100755
index 57e576b51..000000000
--- a/Builds/containers/packaging/rpm/build_rpm.sh
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env bash
-set -ex
-
-cd /opt/rippled_bld/pkg
-cp -fpu rippled/Builds/containers/packaging/rpm/rippled.spec .
-cp -fpu rippled/Builds/containers/shared/update_sources.sh .
-source update_sources.sh
-
-# Build the rpm
-
-IFS='-' read -r RIPPLED_RPM_VERSION RELEASE <<< "$RIPPLED_VERSION"
-export RIPPLED_RPM_VERSION
-
-RPM_RELEASE=${RPM_RELEASE-1}
-
-# post-release version
-if [ "hf" = "$(echo "$RELEASE" | cut -c -2)" ]; then
-    RPM_RELEASE="${RPM_RELEASE}.${RELEASE}"
-# pre-release version (-b or -rc)
-elif [[ $RELEASE ]]; then
-    RPM_RELEASE="0.${RPM_RELEASE}.${RELEASE}"
-fi
-
-export RPM_RELEASE
-
-if [[ $RPM_PATCH ]]; then
-    RPM_PATCH=".${RPM_PATCH}"
-    export RPM_PATCH
-fi
-
-cd /opt/rippled_bld/pkg/rippled
-
-if [[ -n $(git status --porcelain) ]]; then
-   git status
-   error "Unstaged changes in this repo - please commit first"
-fi
-
-git archive --format tar.gz --prefix rippled/ -o ../rpmbuild/SOURCES/rippled.tar.gz HEAD
-
-cd ..
-
-source /opt/rh/devtoolset-11/enable
-
-rpmbuild --define "_topdir ${PWD}/rpmbuild" -ba rippled.spec
-
-rc=$?; if [[ $rc != 0 ]]; then
-    error "error building rpm"
-fi
-
-# Make a tar of the rpm and source rpm
-RPM_VERSION_RELEASE=$(rpm -qp --qf='%{NAME}-%{VERSION}-%{RELEASE}' ./rpmbuild/RPMS/x86_64/rippled-[0-9]*.rpm)
-tar_file=$RPM_VERSION_RELEASE.tar.gz
-
-cp ./rpmbuild/RPMS/x86_64/* ${PKG_OUTDIR}
-cp ./rpmbuild/SRPMS/* ${PKG_OUTDIR}
-
-RPM_MD5SUM=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/RPMS/x86_64/rippled-[0-9]*.rpm 2>/dev/null)
-DBG_MD5SUM=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/RPMS/x86_64/rippled-debuginfo*.rpm 2>/dev/null)
-DEV_MD5SUM=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/RPMS/x86_64/rippled-devel*.rpm 2>/dev/null)
-REP_MD5SUM=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/RPMS/x86_64/rippled-reporting*.rpm 2>/dev/null)
-SRC_MD5SUM=$(rpm -q --queryformat '%{SIGMD5}\n' -p ./rpmbuild/SRPMS/*.rpm 2>/dev/null)
-
-RPM_SHA256="$(sha256sum ./rpmbuild/RPMS/x86_64/rippled-[0-9]*.rpm | awk '{ print $1}')"
-DBG_SHA256="$(sha256sum ./rpmbuild/RPMS/x86_64/rippled-debuginfo*.rpm | awk '{ print $1}')"
-REP_SHA256="$(sha256sum ./rpmbuild/RPMS/x86_64/rippled-reporting*.rpm | awk '{ print $1}')"
-DEV_SHA256="$(sha256sum ./rpmbuild/RPMS/x86_64/rippled-devel*.rpm | awk '{ print $1}')"
-SRC_SHA256="$(sha256sum ./rpmbuild/SRPMS/*.rpm | awk '{ print $1}')"
-
-echo "rpm_md5sum=$RPM_MD5SUM" >  ${PKG_OUTDIR}/build_vars
-echo "rep_md5sum=$REP_MD5SUM" >> ${PKG_OUTDIR}/build_vars
-echo "dbg_md5sum=$DBG_MD5SUM" >> ${PKG_OUTDIR}/build_vars
-echo "dev_md5sum=$DEV_MD5SUM" >> ${PKG_OUTDIR}/build_vars
-echo "src_md5sum=$SRC_MD5SUM" >> ${PKG_OUTDIR}/build_vars
-echo "rpm_sha256=$RPM_SHA256" >> ${PKG_OUTDIR}/build_vars
-echo "rep_sha256=$REP_SHA256" >> ${PKG_OUTDIR}/build_vars
-echo "dbg_sha256=$DBG_SHA256" >> ${PKG_OUTDIR}/build_vars
-echo "dev_sha256=$DEV_SHA256" >> ${PKG_OUTDIR}/build_vars
-echo "src_sha256=$SRC_SHA256" >> ${PKG_OUTDIR}/build_vars
-echo "rippled_version=$RIPPLED_VERSION" >> ${PKG_OUTDIR}/build_vars
-echo "rpm_version=$RIPPLED_RPM_VERSION" >> ${PKG_OUTDIR}/build_vars
-echo "rpm_file_name=$tar_file" >> ${PKG_OUTDIR}/build_vars
-echo "rpm_version_release=$RPM_VERSION_RELEASE" >> ${PKG_OUTDIR}/build_vars
diff --git a/Builds/containers/packaging/rpm/rippled.spec b/Builds/containers/packaging/rpm/rippled.spec
deleted file mode 100644
index fe451d645..000000000
--- a/Builds/containers/packaging/rpm/rippled.spec
+++ /dev/null
@@ -1,236 +0,0 @@
-%define rippled_version %(echo $RIPPLED_RPM_VERSION)
-%define rpm_release %(echo $RPM_RELEASE)
-%define rpm_patch %(echo $RPM_PATCH)
-%define _prefix /opt/ripple
-
-Name:           rippled
-# Dashes in Version extensions must be converted to underscores
-Version:        %{rippled_version}
-Release:        %{rpm_release}%{?dist}%{rpm_patch}
-Summary:        rippled daemon
-
-License:        MIT
-URL:            http://ripple.com/
-Source0:        rippled.tar.gz
-
-BuildRequires:  cmake zlib-static ninja-build
-
-%description
-rippled
-
-%package devel
-Summary: Files for development of applications using xrpl core library
-Group: Development/Libraries
-Requires: zlib-static
-
-%description devel
-core library for development of standalone applications that sign transactions.
-
-%package reporting
-Summary: Reporting Server for rippled
-
-%description reporting
-History server for XRP Ledger
-
-%prep
-%setup -c -n rippled
-
-%build
-rm -rf ~/.conan/profiles/default
-
-cp /opt/libcstd/libstdc++.so.6.0.22 /usr/lib64
-cp /opt/libcstd/libstdc++.so.6.0.22 /lib64
-ln -sf /usr/lib64/libstdc++.so.6.0.22 /usr/lib64/libstdc++.so.6
-ln -sf /lib64/libstdc++.so.6.0.22 /usr/lib64/libstdc++.so.6
-
-source /opt/rh/rh-python38/enable
-pip install "conan<2"
-conan profile new default --detect
-conan profile update settings.compiler.libcxx=libstdc++11 default
-conan profile update settings.compiler.cppstd=20 default
-
-cd rippled
-
-mkdir -p bld.rippled
-conan export external/snappy snappy/1.1.9@
-
-pushd bld.rippled
-conan install .. \
-     --settings build_type=Release \
-     --output-folder . \
-     --build missing
-
-cmake -G Ninja \
-     -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-     -DCMAKE_INSTALL_PREFIX=%{_prefix} \
-     -DCMAKE_BUILD_TYPE=Release \
-     -Dunity=OFF \
-     -Dstatic=ON \
-     -Dvalidator_keys=ON \
-     -DCMAKE_VERBOSE_MAKEFILE=ON \
-     ..
-
-cmake --build . --parallel $(nproc) --target rippled --target validator-keys
-popd
-
-mkdir -p bld.rippled-reporting
-pushd bld.rippled-reporting
-
-conan install .. \
-     --settings build_type=Release \
-     --output-folder . \
-     --build missing \
-     --settings compiler.cppstd=17 \
-     --options reporting=True
-
-cmake -G Ninja \
-     -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \
-     -DCMAKE_INSTALL_PREFIX=%{_prefix} \
-     -DCMAKE_BUILD_TYPE=Release \
-     -Dunity=OFF \
-     -Dstatic=ON \
-     -Dvalidator_keys=ON \
-     -Dreporting=ON \
-     -DCMAKE_VERBOSE_MAKEFILE=ON \
-     ..
-
-cmake --build . --parallel $(nproc) --target rippled
-
-%pre
-test -e /etc/pki/tls || { mkdir -p /etc/pki; ln -s /usr/lib/ssl /etc/pki/tls; }
-
-%install
-rm -rf $RPM_BUILD_ROOT
-DESTDIR=$RPM_BUILD_ROOT cmake --build rippled/bld.rippled --target install #-- -v
-mkdir -p $RPM_BUILD_ROOT
-rm -rf ${RPM_BUILD_ROOT}/%{_prefix}/lib64/
-install -d ${RPM_BUILD_ROOT}/etc/opt/ripple
-install -d ${RPM_BUILD_ROOT}/usr/local/bin
-
-install -D ./rippled/cfg/rippled-example.cfg ${RPM_BUILD_ROOT}/%{_prefix}/etc/rippled.cfg
-install -D ./rippled/cfg/validators-example.txt ${RPM_BUILD_ROOT}/%{_prefix}/etc/validators.txt
-
-ln -sf %{_prefix}/etc/rippled.cfg ${RPM_BUILD_ROOT}/etc/opt/ripple/rippled.cfg
-ln -sf %{_prefix}/etc/validators.txt ${RPM_BUILD_ROOT}/etc/opt/ripple/validators.txt
-ln -sf %{_prefix}/bin/rippled ${RPM_BUILD_ROOT}/usr/local/bin/rippled
-install -D rippled/bld.rippled/validator-keys/validator-keys ${RPM_BUILD_ROOT}%{_bindir}/validator-keys
-install -D ./rippled/Builds/containers/shared/rippled.service ${RPM_BUILD_ROOT}/usr/lib/systemd/system/rippled.service
-install -D ./rippled/Builds/containers/packaging/rpm/50-rippled.preset ${RPM_BUILD_ROOT}/usr/lib/systemd/system-preset/50-rippled.preset
-install -D ./rippled/Builds/containers/shared/update-rippled.sh ${RPM_BUILD_ROOT}%{_bindir}/update-rippled.sh
-install -D ./rippled/bin/getRippledInfo ${RPM_BUILD_ROOT}%{_bindir}/getRippledInfo
-install -D ./rippled/Builds/containers/shared/update-rippled-cron ${RPM_BUILD_ROOT}%{_prefix}/etc/update-rippled-cron
-install -D ./rippled/Builds/containers/shared/rippled-logrotate ${RPM_BUILD_ROOT}/etc/logrotate.d/rippled
-install -d $RPM_BUILD_ROOT/var/log/rippled
-install -d $RPM_BUILD_ROOT/var/lib/rippled
-
-# reporting mode
-%define _prefix /opt/rippled-reporting
-mkdir -p ${RPM_BUILD_ROOT}/etc/opt/rippled-reporting/
-install -D rippled/bld.rippled-reporting/rippled-reporting ${RPM_BUILD_ROOT}%{_bindir}/rippled-reporting
-install -D ./rippled/cfg/rippled-reporting.cfg ${RPM_BUILD_ROOT}%{_prefix}/etc/rippled-reporting.cfg
-install -D ./rippled/cfg/validators-example.txt ${RPM_BUILD_ROOT}%{_prefix}/etc/validators.txt
-install -D ./rippled/Builds/containers/packaging/rpm/50-rippled-reporting.preset ${RPM_BUILD_ROOT}/usr/lib/systemd/system-preset/50-rippled-reporting.preset
-ln -s %{_prefix}/bin/rippled-reporting ${RPM_BUILD_ROOT}/usr/local/bin/rippled-reporting
-ln -s %{_prefix}/etc/rippled-reporting.cfg ${RPM_BUILD_ROOT}/etc/opt/rippled-reporting/rippled-reporting.cfg
-ln -s %{_prefix}/etc/validators.txt ${RPM_BUILD_ROOT}/etc/opt/rippled-reporting/validators.txt
-install -d $RPM_BUILD_ROOT/var/log/rippled-reporting
-install -d $RPM_BUILD_ROOT/var/lib/rippled-reporting
-install -D ./rippled/Builds/containers/shared/rippled-reporting.service ${RPM_BUILD_ROOT}/usr/lib/systemd/system/rippled-reporting.service
-sed -E 's/rippled?/rippled-reporting/g' ./rippled/Builds/containers/shared/update-rippled.sh > ${RPM_BUILD_ROOT}%{_bindir}/update-rippled-reporting.sh
-sed -E 's/rippled?/rippled-reporting/g' ./rippled/bin/getRippledInfo > ${RPM_BUILD_ROOT}%{_bindir}/getRippledReportingInfo
-sed -E 's/rippled?/rippled-reporting/g' ./rippled/Builds/containers/shared/update-rippled-cron > ${RPM_BUILD_ROOT}%{_prefix}/etc/update-rippled-reporting-cron
-sed -E 's/rippled?/rippled-reporting/g' ./rippled/Builds/containers/shared/rippled-logrotate > ${RPM_BUILD_ROOT}/etc/logrotate.d/rippled-reporting
-
-
-%post
-%define _prefix /opt/ripple
-USER_NAME=rippled
-GROUP_NAME=rippled
-
-getent passwd $USER_NAME &>/dev/null || useradd $USER_NAME
-getent group $GROUP_NAME &>/dev/null || groupadd $GROUP_NAME
-
-chown -R $USER_NAME:$GROUP_NAME /var/log/rippled/
-chown -R $USER_NAME:$GROUP_NAME /var/lib/rippled/
-chown -R $USER_NAME:$GROUP_NAME %{_prefix}/
-
-chmod 755 /var/log/rippled/
-chmod 755 /var/lib/rippled/
-
-chmod 644 %{_prefix}/etc/update-rippled-cron
-chmod 644 /etc/logrotate.d/rippled
-chown -R root:$GROUP_NAME %{_prefix}/etc/update-rippled-cron
-
-%post reporting
-%define _prefix /opt/rippled-reporting
-USER_NAME=rippled-reporting
-GROUP_NAME=rippled-reporting
-
-getent passwd $USER_NAME &>/dev/null || useradd -r $USER_NAME
-getent group $GROUP_NAME &>/dev/null || groupadd $GROUP_NAME
-
-chown -R $USER_NAME:$GROUP_NAME /var/log/rippled-reporting/
-chown -R $USER_NAME:$GROUP_NAME /var/lib/rippled-reporting/
-chown -R $USER_NAME:$GROUP_NAME %{_prefix}/
-
-chmod 755 /var/log/rippled-reporting/
-chmod 755 /var/lib/rippled-reporting/
-chmod -x /usr/lib/systemd/system/rippled-reporting.service
-
-
-%files
-%define _prefix /opt/ripple
-%doc rippled/README.md rippled/LICENSE.md
-%{_bindir}/rippled
-/usr/local/bin/rippled
-%{_bindir}/update-rippled.sh
-%{_bindir}/getRippledInfo
-%{_prefix}/etc/update-rippled-cron
-%{_bindir}/validator-keys
-%config(noreplace) %{_prefix}/etc/rippled.cfg
-%config(noreplace) /etc/opt/ripple/rippled.cfg
-%config(noreplace) %{_prefix}/etc/validators.txt
-%config(noreplace) /etc/opt/ripple/validators.txt
-%config(noreplace) /etc/logrotate.d/rippled
-%config(noreplace) /usr/lib/systemd/system/rippled.service
-%config(noreplace) /usr/lib/systemd/system-preset/50-rippled.preset
-
-%dir /var/log/rippled/
-%dir /var/lib/rippled/
-
-%files devel
-%{_prefix}/include
-%{_prefix}/lib/*.a
-%{_prefix}/lib/cmake/ripple
-
-%files reporting
-%define _prefix /opt/rippled-reporting
-%doc rippled/README.md rippled/LICENSE.md
-
-%{_bindir}/rippled-reporting
-/usr/local/bin/rippled-reporting
-%config(noreplace) /etc/opt/rippled-reporting/rippled-reporting.cfg
-%config(noreplace) %{_prefix}/etc/rippled-reporting.cfg
-%config(noreplace) %{_prefix}/etc/validators.txt
-%config(noreplace) /etc/opt/rippled-reporting/validators.txt
-%config(noreplace) /usr/lib/systemd/system/rippled-reporting.service
-%config(noreplace) /usr/lib/systemd/system-preset/50-rippled-reporting.preset
-%dir /var/log/rippled-reporting/
-%dir /var/lib/rippled-reporting/
-%{_bindir}/update-rippled-reporting.sh
-%{_bindir}/getRippledReportingInfo
-%{_prefix}/etc/update-rippled-reporting-cron
-%config(noreplace) /etc/logrotate.d/rippled-reporting
-
-%changelog
-* Wed Aug 28 2019 Mike Ellery 
-- Switch to subproject build for validator-keys
-
-* Wed May 15 2019 Mike Ellery 
-- Make validator-keys use local rippled build for core lib
-
-* Wed Aug 01 2018 Mike Ellery 
-- add devel package for signing library
-
-* Thu Jun 02 2016 Brandon Wilson 
-- Install validators.txt
diff --git a/Builds/containers/shared/install_cmake.sh b/Builds/containers/shared/install_cmake.sh
deleted file mode 100755
index e6f84dd6c..000000000
--- a/Builds/containers/shared/install_cmake.sh
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env bash
-set -e
-
-IFS=. read cm_maj cm_min cm_rel <<<"$1"
-: ${cm_rel:-0}
-CMAKE_ROOT=${2:-"${HOME}/cmake"}
-
-function cmake_version ()
-{
-    if [[ -d ${CMAKE_ROOT} ]] ; then
-        local perms=$(test $(uname) = "Linux" && echo "/111" || echo "+111")
-        local installed=$(find ${CMAKE_ROOT} -perm ${perms} -type f -name cmake)
-        if [[ "${installed}" != "" ]] ; then
-            echo "$(${installed} --version | head -1)"
-        fi
-    fi
-}
-
-installed=$(cmake_version)
-if [[ "${installed}" != "" && ${installed} =~ ${cm_maj}.${cm_min}.${cm_rel} ]] ; then
-    echo "cmake already installed: ${installed}"
-    exit
-fi
-# From CMake 20+ "Linux" is lowercase so using `uname` won't create be the correct path
-if [ ${cm_min} -gt 19 ]; then
-    linux="linux"
-else
-    linux=$(uname)
-fi
-pkgname="cmake-${cm_maj}.${cm_min}.${cm_rel}-${linux}-x86_64.tar.gz"
-tmppkg="/tmp/cmake.tar.gz"
-wget --quiet https://cmake.org/files/v${cm_maj}.${cm_min}/${pkgname} -O ${tmppkg}
-mkdir -p ${CMAKE_ROOT}
-cd ${CMAKE_ROOT}
-tar --strip-components 1 -xf ${tmppkg}
-rm -f ${tmppkg}
-echo "installed: $(cmake_version)"
diff --git a/Builds/containers/shared/rippled-logrotate b/Builds/containers/shared/rippled-logrotate
deleted file mode 100644
index 120aa91d3..000000000
--- a/Builds/containers/shared/rippled-logrotate
+++ /dev/null
@@ -1,15 +0,0 @@
-/var/log/rippled/*.log {
-  daily
-  minsize 200M
-  rotate 7
-  nocreate
-  missingok
-  notifempty
-  compress
-  compresscmd /usr/bin/nice
-  compressoptions -n19 ionice -c3 gzip
-  compressext .gz
-  postrotate
-    /opt/ripple/bin/rippled --conf /opt/ripple/etc/rippled.cfg logrotate
-  endscript
-}
diff --git a/Builds/containers/shared/rippled-reporting.service b/Builds/containers/shared/rippled-reporting.service
deleted file mode 100644
index 69edf4794..000000000
--- a/Builds/containers/shared/rippled-reporting.service
+++ /dev/null
@@ -1,15 +0,0 @@
-[Unit]
-Description=Ripple Daemon
-After=network-online.target
-Wants=network-online.target
-
-[Service]
-Type=simple
-ExecStart=/opt/rippled-reporting/bin/rippled-reporting --silent --conf /etc/opt/rippled-reporting/rippled-reporting.cfg
-Restart=on-failure
-User=rippled-reporting
-Group=rippled-reporting
-LimitNOFILE=65536
-
-[Install]
-WantedBy=multi-user.target
diff --git a/Builds/containers/shared/rippled.service b/Builds/containers/shared/rippled.service
deleted file mode 100644
index 24d9dd975..000000000
--- a/Builds/containers/shared/rippled.service
+++ /dev/null
@@ -1,15 +0,0 @@
-[Unit]
-Description=Ripple Daemon
-After=network-online.target
-Wants=network-online.target
-
-[Service]
-Type=simple
-ExecStart=/opt/ripple/bin/rippled --net --silent --conf /etc/opt/ripple/rippled.cfg
-Restart=on-failure
-User=rippled
-Group=rippled
-LimitNOFILE=65536
-
-[Install]
-WantedBy=multi-user.target
diff --git a/Builds/containers/shared/update-rippled-cron b/Builds/containers/shared/update-rippled-cron
deleted file mode 100644
index c7744219f..000000000
--- a/Builds/containers/shared/update-rippled-cron
+++ /dev/null
@@ -1,10 +0,0 @@
-# For automatic updates, symlink this file to /etc/cron.d/
-# Do not remove the newline at the end of this cron script
-
-# bash required for use of RANDOM below.
-SHELL=/bin/bash
-PATH=/sbin;/bin;/usr/sbin;/usr/bin
-
-# invoke check/update script with random delay up to 59 mins
-0 * * * * root sleep $((RANDOM*3540/32768)) && /opt/ripple/bin/update-rippled.sh
-
diff --git a/Builds/containers/shared/update-rippled.sh b/Builds/containers/shared/update-rippled.sh
deleted file mode 100755
index 19409ece0..000000000
--- a/Builds/containers/shared/update-rippled.sh
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env bash
-
-# auto-update script for rippled daemon
-
-# Check for sudo/root permissions
-if [[ $(id -u) -ne 0 ]] ; then
-   echo "This update script must be run as root or sudo"
-   exit 1
-fi
-
-LOCKDIR=/tmp/rippleupdate.lock
-UPDATELOG=/var/log/rippled/update.log
-
-function cleanup {
-  # If this directory isn't removed, future updates will fail.
-  rmdir $LOCKDIR
-}
-
-# Use mkdir to check if process is already running. mkdir is atomic, as against file create.
-if ! mkdir $LOCKDIR 2>/dev/null; then
-  echo $(date -u) "lockdir exists - won't proceed." >> $UPDATELOG
-  exit 1
-fi
-trap cleanup EXIT
-
-source /etc/os-release
-can_update=false
-
-if [[ "$ID" == "ubuntu" || "$ID" == "debian" ]] ; then
-  # Silent update
-  apt-get update -qq
-
-  # The next line is an "awk"ward way to check if the package needs to be updated.
-  RIPPLE=$(apt-get install -s --only-upgrade rippled | awk '/^Inst/ { print $2 }')
-  test "$RIPPLE" == "rippled" && can_update=true
-
-  function apply_update {
-    apt-get install rippled -qq
-  }
-elif [[ "$ID" == "fedora" || "$ID" == "centos" || "$ID" == "rhel" || "$ID" == "scientific" ]] ; then
-  RIPPLE_REPO=${RIPPLE_REPO-stable}
-  yum --disablerepo=* --enablerepo=ripple-$RIPPLE_REPO clean expire-cache
-
-  yum check-update -q --enablerepo=ripple-$RIPPLE_REPO rippled || can_update=true
-
-  function apply_update {
-    yum update -y --enablerepo=ripple-$RIPPLE_REPO rippled
-  }
-else
-  echo "unrecognized distro!"
-  exit 1
-fi
-
-# Do the actual update and restart the service after reloading systemctl daemon.
-if [ "$can_update" = true ] ; then
-  exec 3>&1 1>>${UPDATELOG} 2>&1
-  set -e
-  apply_update
-  systemctl daemon-reload
-  systemctl restart rippled.service
-  echo $(date -u) "rippled daemon updated."
-else
-  echo $(date -u) "no updates available" >> $UPDATELOG
-fi
-
diff --git a/Builds/containers/shared/update_sources.sh b/Builds/containers/shared/update_sources.sh
deleted file mode 100755
index 56ca958b2..000000000
--- a/Builds/containers/shared/update_sources.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env bash
-
-function error {
-    echo $1
-    exit 1
-}
-
-cd /opt/rippled_bld/pkg/rippled
-export RIPPLED_VERSION=$(egrep -i -o "\b(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(-[0-9a-z\-]+(\.[0-9a-z\-]+)*)?(\+[0-9a-z\-]+(\.[0-9a-z\-]+)*)?\b" src/ripple/protocol/impl/BuildInfo.cpp)
-
-: ${PKG_OUTDIR:=/opt/rippled_bld/pkg/out}
-export PKG_OUTDIR
-if [ ! -d ${PKG_OUTDIR} ]; then
-    error "${PKG_OUTDIR} is not mounted"
-fi
-
-if [ -x ${OPENSSL_ROOT}/bin/openssl ]; then
-    LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${OPENSSL_ROOT}/lib ${OPENSSL_ROOT}/bin/openssl version -a
-fi
-
diff --git a/Builds/containers/ubuntu-builder/Dockerfile b/Builds/containers/ubuntu-builder/Dockerfile
deleted file mode 100644
index 23723967f..000000000
--- a/Builds/containers/ubuntu-builder/Dockerfile
+++ /dev/null
@@ -1,15 +0,0 @@
-ARG DIST_TAG=18.04
-FROM ubuntu:$DIST_TAG
-ARG GIT_COMMIT=unknown
-ARG CI_USE=false
-LABEL git-commit=$GIT_COMMIT
-
-WORKDIR /root
-COPY ubuntu-builder/ubuntu_setup.sh .
-RUN ./ubuntu_setup.sh && rm ubuntu_setup.sh
-
-RUN mkdir -m 777 -p /opt/rippled_bld/pkg/
-WORKDIR /opt/rippled_bld/pkg
-
-COPY packaging/dpkg/build_dpkg.sh ./
-CMD ./build_dpkg.sh
diff --git a/Builds/containers/ubuntu-builder/ubuntu_setup.sh b/Builds/containers/ubuntu-builder/ubuntu_setup.sh
deleted file mode 100755
index cd8db7515..000000000
--- a/Builds/containers/ubuntu-builder/ubuntu_setup.sh
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/usr/bin/env bash
-
-set -o errexit
-set -o nounset
-set -o xtrace
-
-# Parameters
-
-gcc_version=${GCC_VERSION:-10}
-cmake_version=${CMAKE_VERSION:-3.25.1}
-conan_version=${CONAN_VERSION:-1.59}
-
-apt update
-# Iteratively build the list of packages to install so that we can interleave
-# the lines with comments explaining their inclusion.
-dependencies=''
-# - to identify the Ubuntu version
-dependencies+=' lsb-release'
-# - for add-apt-repository
-dependencies+=' software-properties-common'
-# - to download CMake
-dependencies+=' curl'
-# - to build CMake
-dependencies+=' libssl-dev'
-# - Python headers for Boost.Python
-dependencies+=' python3-dev'
-# - to install Conan
-dependencies+=' python3-pip'
-# - to download rippled
-dependencies+=' git'
-# - CMake generators (but not CMake itself)
-dependencies+=' make ninja-build'
-apt install --yes ${dependencies}
-
-add-apt-repository --yes ppa:ubuntu-toolchain-r/test
-apt install --yes gcc-${gcc_version} g++-${gcc_version} \
-  debhelper debmake debsums gnupg dh-buildinfo dh-make dh-systemd cmake \
-  ninja-build zlib1g-dev make cmake ninja-build autoconf automake \
-  pkg-config apt-transport-https
-
-# Give us nice unversioned aliases for gcc and company.
-update-alternatives --install \
-  /usr/bin/gcc gcc /usr/bin/gcc-${gcc_version} 100 \
-  --slave /usr/bin/g++ g++ /usr/bin/g++-${gcc_version} \
-  --slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-${gcc_version} \
-  --slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-${gcc_version} \
-  --slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-${gcc_version} \
-  --slave /usr/bin/gcov gcov /usr/bin/gcov-${gcc_version} \
-  --slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-dump-${gcc_version} \
-  --slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-tool-${gcc_version}
-update-alternatives --auto gcc
-
-# Download and unpack CMake.
-cmake_slug="cmake-${cmake_version}"
-curl --location --remote-name \
-  "https://github.com/Kitware/CMake/releases/download/v${cmake_version}/${cmake_slug}.tar.gz"
-tar xzf ${cmake_slug}.tar.gz
-rm ${cmake_slug}.tar.gz
-
-# Build and install CMake.
-cd ${cmake_slug}
-./bootstrap --parallel=$(nproc)
-make --jobs $(nproc)
-make install
-cd ..
-rm --recursive --force ${cmake_slug}
-
-# Install Conan.
-pip3 install conan==${conan_version}
-
-conan profile new --detect gcc
-conan profile update settings.compiler=gcc gcc
-conan profile update settings.compiler.version=${gcc_version} gcc
-conan profile update settings.compiler.libcxx=libstdc++11 gcc
-conan profile update env.CC=/usr/bin/gcc gcc
-conan profile update env.CXX=/usr/bin/g++ gcc
diff --git a/Builds/linux/README.md b/Builds/linux/README.md
deleted file mode 100644
index fce8e824a..000000000
--- a/Builds/linux/README.md
+++ /dev/null
@@ -1 +0,0 @@
-[Build instructions are currently located in `BUILD.md`](../../BUILD.md)
diff --git a/Builds/macos/README.md b/Builds/macos/README.md
deleted file mode 100644
index fce8e824a..000000000
--- a/Builds/macos/README.md
+++ /dev/null
@@ -1 +0,0 @@
-[Build instructions are currently located in `BUILD.md`](../../BUILD.md)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 06b9d622c..1d916dcc6 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,67 +1,186 @@
-# Contributing
-The XRP Ledger has many and diverse stakeholders, and everyone deserves a chance to contribute meaningful changes to the code that runs the XRPL.
-To contribute, please:
-1. Fork the repository under your own user.
-2. Create a new branch on which to write your changes. Please note that changes which alter transaction processing must be composed via and guarded using [Amendments](https://xrpl.org/amendments.html). Changes which are _read only_ i.e. RPC, or changes which are only refactors and maintain the existing behaviour do not need to be made through an Amendment.
-3. Write and test your code.
-4. Ensure that your code compiles with the provided build engine and update the provided build engine as part of your PR where needed and where appropriate.
-5. Write test cases for your code and include those in `src/test` such that they are runnable from the command line using `./rippled -u`. (Some changes will not be able to be tested this way.)
-6. Ensure your code passes automated checks (e.g. clang-format and levelization.)
-7. Squash your commits (i.e. rebase) into as few commits as is reasonable to describe your changes at a high level (typically a single commit for a small change.)
-8. Open a PR to the main repository onto the _develop_ branch, and follow the provided template.
+The XRP Ledger has many and diverse stakeholders, and everyone deserves
+a chance to contribute meaningful changes to the code that runs the
+XRPL.
+
+# Contributing
+
+We assume you are familiar with the general practice of [making
+contributions on GitHub][1]. This file includes only special
+instructions specific to this project.
+
+
+## Before you start
+
+In general, contributions should be developed in your personal
+[fork](https://github.com/XRPLF/rippled/fork).
+
+The following branches exist in the main project repository:
+
+- `dev`: The latest set of unreleased features, and the most common
+    starting point for contributions.
+- `candidate`: The latest beta release or release candidate.
+- `release`: The latest stable release.
+
+The tip of each branch must be signed. In order for GitHub to sign a
+squashed commit that it builds from your pull request, GitHub must know
+your verifying key. Please set up [signature verification][signing].
+
+[rippled]: https://github.com/XRPLF/rippled
+[signing]:
+    https://docs.github.com/en/authentication/managing-commit-signature-verification/about-commit-signature-verification
+
+
+## Major contributions
+
+If your contribution is a major feature or breaking change, then you
+must first write an XRP Ledger Standard (XLS) describing it. Go to
+[XRPL-Standards](https://github.com/XRPLF/XRPL-Standards/discussions),
+choose the next available standard number, and open a discussion with an
+appropriate title to propose your draft standard.
+
+When you submit a pull request, please link the corresponding XLS in the
+description. An XLS still in draft status is considered a
+work-in-progress and open for discussion. Please allow time for
+questions, suggestions, and changes to the XLS draft. It is the
+responsibility of the XLS author to update the draft to match the final
+implementation when its corresponding pull request is merged, unless the
+author delegates that responsibility to others.
+
+
+## Before making a pull request
+
+Changes that alter transaction processing must be guarded by an
+[Amendment](https://xrpl.org/amendments.html).
+All other changes that maintain the existing behavior do not need an
+Amendment.
+
+Ensure that your code compiles according to the build instructions in
+[`BUILD.md`](./BUILD.md).
+If you create new source files, they must go under `src/ripple`.
+You will need to add them to one of the
+[source lists](./Builds/CMake/RippledCore.cmake) in CMake.
+
+Please write tests for your code.
+If you create new test source files, they must go under `src/test`.
+You will need to add them to one of the
+[source lists](./Builds/CMake/RippledCore.cmake) in CMake.
+If your test can be run offline, in under 60 seconds, then it can be an
+automatic test run by `rippled --unittest`.
+Otherwise, it must be a manual test.
+
+The source must be formatted according to the style guide below.
+
+Header includes must be [levelized](./Builds/levelization).
+
+
+## Pull requests
+
+In general, pull requests use `develop` as the base branch.
+
+(Hotfixes are an exception.)
+
+Changes to pull requests must be added as new commits.
+Once code reviewers have started looking at your code, please avoid
+force-pushing a branch in a pull request.
+This preserves the ability for reviewers to filter changes since their last
+review.
+
+A pull request must obtain **approvals from at least two reviewers** before it
+can be considered for merge by a Maintainer.
+Maintainers retain discretion to require more approvals if they feel the
+credibility of the existing approvals is insufficient.
+
+Pull requests must be merged by [squash-and-merge][2]
+to preserve a linear history for the `develop` branch.
 
-# Major Changes
-If your code change is a major feature, a breaking change or in some other way makes a significant alteration to the way the XRPL will operate, then you must first write an XLS document (XRP Ledger Standard) describing your change.
-To do this:
-1. Go to [XLS Standards](https://github.com/XRPLF/XRPL-Standards/discussions).
-2. Choose the next available standard number.
-3. Open a discussion with the appropriate title to propose your draft standard.
-4. Link your XLS in your PR.
 
 # Style guide
-This is a non-exhaustive list of recommended style guidelines. These are not always strictly enforced and serve as a way to keep the codebase coherent rather than a set of _thou shalt not_ commandments.
+
+This is a non-exhaustive list of recommended style guidelines. These are
+not always strictly enforced and serve as a way to keep the codebase
+coherent rather than a set of _thou shalt not_ commandments.
+
 
 ## Formatting
-All code must conform to `clang-format` version 10, unless the result would be unreasonably difficult to read or maintain.
-To change your code to conform use `clang-format -i `.
+
+All code must conform to `clang-format` version 10,
+according to the settings in [`.clang-format`](./.clang-format),
+unless the result would be unreasonably difficult to read or maintain.
+To demarcate lines that should be left as-is, surround them with comments like
+this:
+
+```
+// clang-format off
+...
+// clang-format on
+```
+
+You can format individual files in place by running `clang-format -i ...`
+from any directory within this project.
+
+You can install a pre-commit hook to automatically run `clang-format` before every commit:
+```
+pip3 install pre-commit
+pre-commit install
+```
 
 ## Avoid
+
 1. Proliferation of nearly identical code.
 2. Proliferation of new files and classes.
 3. Complex inheritance and complex OOP patterns.
 4. Unmanaged memory allocation and raw pointers.
-5. Macros and non-trivial templates (unless they add significant value.)
-6. Lambda patterns (unless these add significant value.)
-7. CPU or architecture-specific code unless there is a good reason to include it, and where it is used guard it with macros and provide explanatory comments.
+5. Macros and non-trivial templates (unless they add significant value).
+6. Lambda patterns (unless these add significant value).
+7. CPU or architecture-specific code unless there is a good reason to
+   include it, and where it is used, guard it with macros and provide
+   explanatory comments.
 8. Importing new libraries unless there is a very good reason to do so.
 
+
 ## Seek to
+
 9. Extend functionality of existing code rather than creating new code.
-10. Prefer readability over terseness where important logic is concerned.
-11. Inline functions that are not used or are not likely to be used elsewhere in the codebase.
-12. Use clear and self-explanatory names for functions, variables, structs and classes.
-13. Use TitleCase for classes, structs and filenames, camelCase for function and variable names, lower case for namespaces and folders.
-14. Provide as many comments as you feel that a competent programmer would need to understand what your code does.
+10. Prefer readability over terseness where important logic is
+    concerned.
+11. Inline functions that are not used or are not likely to be used
+    elsewhere in the codebase.
+12. Use clear and self-explanatory names for functions, variables,
+    structs and classes.
+13. Use TitleCase for classes, structs and filenames, camelCase for
+    function and variable names, lower case for namespaces and folders.
+14. Provide as many comments as you feel that a competent programmer
+    would need to understand what your code does.
+
 
 # Maintainers
-Maintainers are ecosystem participants with elevated access to the repository. They are able to push new code, make decisions on when a release should be made, etc.
 
-## Code Review
-New contributors' PRs must be reviewed by at least two of the maintainers. Well established prior contributors can be reviewed by a single maintainer.
+Maintainers are ecosystem participants with elevated access to the repository.
+They are able to push new code, make decisions on when a release should be
+made, etc.
 
-## Adding and Removing
-New maintainers can be proposed by two existing maintainers, subject to a vote by a quorum of the existing maintainers. A minimum of 50% support and a 50% participation is required. In the event of a tie vote, the addition of the new maintainer will be rejected.
 
-Existing maintainers can resign, or be subject to a vote for removal at the behest of two existing maintainers. A minimum of 60% agreement and 50% participation are required. The XRP Ledger Foundation will have the ability, for cause, to remove an existing maintainer without a vote.
+## Adding and removing
 
-## Existing Maintainers
-* [JoelKatz](https://github.com/JoelKatz) (Ripple)
-* [Manojsdoshi](https://github.com/manojsdoshi) (Ripple)
-* [N3tc4t](https://github.com/n3tc4t) (XRPL Labs)
-* [Nikolaos D Bougalis](https://github.com/nbougalis)
-* [Nixer89](https://github.com/nixer89) (XRP Ledger Foundation)
-* [RichardAH](https://github.com/RichardAH) (XRPL Labs + XRP Ledger Foundation)
-* [Seelabs](https://github.com/seelabs) (Ripple)
-* [Silkjaer](https://github.com/Silkjaer) (XRP Ledger Foundation)
-* [WietseWind](https://github.com/WietseWind) (XRPL Labs + XRP Ledger Foundation)
-* [Ximinez](https://github.com/ximinez) (Ripple)
+New maintainers can be proposed by two existing maintainers, subject to a vote
+by a quorum of the existing maintainers.
+A minimum of 50% support and a 50% participation is required.
+In the event of a tie vote, the addition of the new maintainer will be
+rejected.
+
+Existing maintainers can resign, or be subject to a vote for removal at the
+behest of two existing maintainers.
+A minimum of 60% agreement and 50% participation are required.
+The XRP Ledger Foundation will have the ability, for cause, to remove an
+existing maintainer without a vote.
+
+
+## Current Maintainers
+
+* [Richard Holland](https://github.com/RichardAH) (XRPL Labs + XRP Ledger Foundation)
+* [Denis Angell](https://github.com/dangell7) (XRPL Labs + XRP Ledger Foundation)
+* [Wietse Wind](https://github.com/WietseWind) (XRPL Labs + XRP Ledger Foundation)
+
+
+[1]: https://docs.github.com/en/get-started/quickstart/contributing-to-projects
+[2]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/incorporating-changes-from-a-pull-request/about-pull-request-merges#squash-and-merge-your-commits
\ No newline at end of file
diff --git a/LICENSE.md b/LICENSE.md
index 9282ed78b..e276f4ccd 100644
--- a/LICENSE.md
+++ b/LICENSE.md
@@ -2,6 +2,7 @@ ISC License
 
 Copyright (c) 2011, Arthur Britto, David Schwartz, Jed McCaleb, Vinnie Falco, Bob Way, Eric Lombrozo, Nikolaos D. Bougalis, Howard Hinnant.
 Copyright (c) 2012-2020, the XRP Ledger developers.
+Copyright (c) 2021-2024, XRPL Labs.
 
 Permission to use, copy, modify, and distribute this software for any
 purpose with or without fee is hereby granted, provided that the above
diff --git a/bin/browser.js b/bin/browser.js
deleted file mode 100755
index 81618bd00..000000000
--- a/bin/browser.js
+++ /dev/null
@@ -1,470 +0,0 @@
-#!/usr/bin/node
-//
-// ledger?l=L
-// transaction?h=H
-// ledger_entry?l=L&h=H
-// account?l=L&a=A
-// directory?l=L&dir_root=H&i=I
-// directory?l=L&o=A&i=I     // owner directory
-// offer?l=L&offer=H
-// offer?l=L&account=A&i=I
-// ripple_state=l=L&a=A&b=A&c=C
-// account_lines?l=L&a=A
-//
-// A=address
-// C=currency 3 letter code
-// H=hash
-// I=index
-// L=current | closed | validated | index | hash
-//
-
-var async     = require("async");
-var extend    = require("extend");
-var http      = require("http");
-var url       = require("url");
-
-var Remote    = require("ripple-lib").Remote;
-
-var program   = process.argv[1];
-
-var httpd_response = function (res, opts) {
-  var self=this;
-
-  res.statusCode = opts.statusCode;
-  res.end(
-    ""
-      + "Title"
-      + ""
-      + "State:" + self.state
-      + ""
-      + (opts.body || '')
-      + '
'
-      + (opts.url || '')
-      + '
'
-      + ""
-      + ""
-    );
-};
-
-var html_link = function (generic) {
-  return '' + generic + '';
-};
-
-// Build a link to a type.
-var build_uri = function (params, opts) {
-  var c;
-
-  if (params.type === 'account') {
-    c = {
-        pathname: 'account',
-        query: {
-          l: params.ledger,
-          a: params.account,
-        },
-      };
-
-  } else if (params.type === 'ledger') {
-    c = {
-        pathname: 'ledger',
-        query: {
-          l: params.ledger,
-        },
-      };
-
-  } else if (params.type === 'transaction') {
-    c = {
-        pathname: 'transaction',
-        query: {
-          h: params.hash,
-        },
-      };
-  } else {
-    c = {};
-  }
-
-  opts  = opts || {};
-
-  c.protocol  = "http";
-  c.hostname  = opts.hostname || self.base.hostname;
-  c.port      = opts.port || self.base.port;
-
-  return url.format(c);
-};
-
-var build_link = function (item, link) {
-console.log(link);
-  return "" + item + "";
-};
-
-var rewrite_field = function (type, obj, field, opts) {
-  if (field in obj) {
-    obj[field]  = rewrite_type(type, obj[field], opts);
-  }
-};
-
-var rewrite_type = function (type, obj, opts) {
-  if ('amount' === type) {
-    if ('string' === typeof obj) {
-      // XRP.
-      return '' + obj + '';
-
-    } else {
-      rewrite_field('address', obj, 'issuer', opts);
-
-      return obj; 
-    }
-    return build_link(
-      obj,
-      build_uri({
-          type: 'account',
-          account: obj
-        }, opts)
-    );
-  }
-  if ('address' === type) {
-    return build_link(
-      obj,
-      build_uri({
-          type: 'account',
-          account: obj
-        }, opts)
-    );
-  }
-  else if ('ledger' === type) {
-    return build_link(
-      obj,
-      build_uri({
-          type: 'ledger',
-          ledger: obj,
-        }, opts)
-      );
-  }
-  else if ('node' === type) {
-    // A node
-    if ('PreviousTxnID' in obj)
-      obj.PreviousTxnID      = rewrite_type('transaction', obj.PreviousTxnID, opts);
-
-    if ('Offer' === obj.LedgerEntryType) {
-      if ('NewFields' in obj) {
-        if ('TakerGets' in obj.NewFields)
-          obj.NewFields.TakerGets = rewrite_type('amount', obj.NewFields.TakerGets, opts);
-
-        if ('TakerPays' in obj.NewFields)
-          obj.NewFields.TakerPays = rewrite_type('amount', obj.NewFields.TakerPays, opts);
-      }
-    }
-
-    obj.LedgerEntryType  = '' + obj.LedgerEntryType + '';
-
-    return obj;
-  }
-  else if ('transaction' === type) {
-    // Reference to a transaction.
-    return build_link(
-      obj,
-      build_uri({
-          type: 'transaction',
-          hash: obj
-        }, opts)
-      );
-  }
-
-  return 'ERROR: ' + type;
-};
-
-var rewrite_object = function (obj, opts) {
-  var out = extend({}, obj);
-
-  rewrite_field('address', out, 'Account', opts);
-
-  rewrite_field('ledger', out, 'parent_hash', opts);
-  rewrite_field('ledger', out, 'ledger_index', opts);
-  rewrite_field('ledger', out, 'ledger_current_index', opts);
-  rewrite_field('ledger', out, 'ledger_hash', opts);
-
-  if ('ledger' in obj) {
-    // It's a ledger header.
-    out.ledger  = rewrite_object(out.ledger, opts);
-
-    if ('ledger_hash' in out.ledger)
-      out.ledger.ledger_hash = '' + out.ledger.ledger_hash + '';
-
-    delete out.ledger.hash;
-    delete out.ledger.totalCoins;
-  }
-
-  if ('TransactionType' in obj) {
-    // It's a transaction.
-    out.TransactionType = '' + obj.TransactionType + '';
-
-    rewrite_field('amount', out, 'TakerGets', opts);
-    rewrite_field('amount', out, 'TakerPays', opts);
-    rewrite_field('ledger', out, 'inLedger', opts);
-
-    out.meta.AffectedNodes = out.meta.AffectedNodes.map(function (node) {
-        var kind  = 'CreatedNode' in node
-          ? 'CreatedNode'
-          : 'ModifiedNode' in node
-            ? 'ModifiedNode'
-            : 'DeletedNode' in node
-              ? 'DeletedNode'
-              : undefined;
-        
-        if (kind) {
-          node[kind]  = rewrite_type('node', node[kind], opts);
-        }
-        return node;
-      });
-  }
-  else if ('node' in obj && 'LedgerEntryType' in obj.node) {
-    // Its a ledger entry.
-
-    if (obj.node.LedgerEntryType === 'AccountRoot') {
-      rewrite_field('address', out.node, 'Account', opts);
-      rewrite_field('transaction', out.node, 'PreviousTxnID', opts);
-      rewrite_field('ledger', out.node, 'PreviousTxnLgrSeq', opts);
-    }
-
-    out.node.LedgerEntryType = '' + out.node.LedgerEntryType + '';
-  }
-
-  return out;
-};
-
-var augment_object = function (obj, opts, done) {
-  if (obj.node.LedgerEntryType == 'AccountRoot') {
-    var   tx_hash   = obj.node.PreviousTxnID;
-    var   tx_ledger = obj.node.PreviousTxnLgrSeq;
-
-    obj.history                 = [];
-
-    async.whilst(
-      function () { return tx_hash; },
-      function (callback) {
-// console.log("augment_object: request: %s %s", tx_hash, tx_ledger);
-        opts.remote.request_tx(tx_hash)
-          .on('success', function (m) {
-              tx_hash   = undefined;
-              tx_ledger = undefined;
-
-//console.log("augment_object: ", JSON.stringify(m));
-              m.meta.AffectedNodes.filter(function(n) {
-// console.log("augment_object: ", JSON.stringify(n));
-// if (n.ModifiedNode)
-// console.log("augment_object: %s %s %s %s %s %s/%s", 'ModifiedNode' in n, n.ModifiedNode && (n.ModifiedNode.LedgerEntryType === 'AccountRoot'), n.ModifiedNode && n.ModifiedNode.FinalFields && (n.ModifiedNode.FinalFields.Account === obj.node.Account), Object.keys(n)[0], n.ModifiedNode && (n.ModifiedNode.LedgerEntryType), obj.node.Account, n.ModifiedNode && n.ModifiedNode.FinalFields && n.ModifiedNode.FinalFields.Account);
-// if ('ModifiedNode' in n && n.ModifiedNode.LedgerEntryType === 'AccountRoot')
-// {
-//   console.log("***: ", JSON.stringify(m));
-//   console.log("***: ", JSON.stringify(n));
-// }
-                  return 'ModifiedNode' in n
-                    && n.ModifiedNode.LedgerEntryType === 'AccountRoot'
-                    && n.ModifiedNode.FinalFields
-                    && n.ModifiedNode.FinalFields.Account === obj.node.Account;
-                })
-              .forEach(function (n) {
-                  tx_hash   = n.ModifiedNode.PreviousTxnID;
-                  tx_ledger = n.ModifiedNode.PreviousTxnLgrSeq;
-
-                  obj.history.push({
-                      tx_hash:    tx_hash,
-                      tx_ledger:  tx_ledger
-                    });
-console.log("augment_object: next: %s %s", tx_hash, tx_ledger);
-                });
-
-              callback();
-            })
-          .on('error', function (m) {
-              callback(m);
-            })
-          .request();
-      },
-      function (err) {
-        if (err) {
-          done();
-        }
-        else {
-          async.forEach(obj.history, function (o, callback) {
-              opts.remote.request_account_info(obj.node.Account)
-                .ledger_index(o.tx_ledger)
-                .on('success', function (m) {
-//console.log("augment_object: ", JSON.stringify(m));
-                    o.Balance       = m.account_data.Balance;
-//                    o.account_data  = m.account_data;
-                    callback();
-                  })
-                .on('error', function (m) {
-                    o.error = m;
-                    callback();
-                  })
-                .request();
-            },
-            function (err) {
-              done(err);
-            });
-        }
-      });
-  }
-  else {
-    done();
-  }
-};
-
-if (process.argv.length < 4 || process.argv.length > 7) {
-  console.log("Usage: %s ws_ip ws_port [ [ []]]", program);
-}
-else {
-  var ws_ip   = process.argv[2];
-  var ws_port = process.argv[3];
-  var ip      = process.argv.length > 4 ? process.argv[4] : "127.0.0.1";
-  var port    = process.argv.length > 5 ? process.argv[5] : "8080";
-
-// console.log("START");
-  var self  = this;
-  
-  var remote  = (new Remote({
-                    websocket_ip: ws_ip,
-                    websocket_port: ws_port,
-                    trace: false
-                  }))
-                  .on('state', function (m) {
-                      console.log("STATE: %s", m);
-
-                      self.state   = m;
-                    })
-//                  .once('ledger_closed', callback)
-                  .connect()
-                  ;
-
-  self.base = {
-      hostname: ip,
-      port:     port,
-      remote:   remote,
-    };
-
-// console.log("SERVE");
-  var server  = http.createServer(function (req, res) {
-      var input = "";
-
-      req.setEncoding();
-
-      req.on('data', function (buffer) {
-          // console.log("DATA: %s", buffer);
-          input = input + buffer;
-        });
-
-      req.on('end', function () {
-          // console.log("URL: %s", req.url);
-          // console.log("HEADERS: %s", JSON.stringify(req.headers, undefined, 2));
-
-          var _parsed = url.parse(req.url, true);
-          var _url    = JSON.stringify(_parsed, undefined, 2);
-
-          // console.log("HEADERS: %s", JSON.stringify(_parsed, undefined, 2));
-          if (_parsed.pathname === "/account") {
-              var request = remote
-                .request_ledger_entry('account_root')
-                .ledger_index(-1)
-                .account_root(_parsed.query.a)
-                .on('success', function (m) {
-                    // console.log("account_root: %s", JSON.stringify(m, undefined, 2));
-
-                    augment_object(m, self.base, function() {
-                      httpd_response(res,
-                          {
-                            statusCode: 200,
-                            url: _url,
-                            body: ""
-                              + JSON.stringify(rewrite_object(m, self.base), undefined, 2)
-                              + "
"
-                          });
-                    });
-                  })
-                .request();
-
-          } else if (_parsed.pathname === "/ledger") {
-            var request = remote
-              .request_ledger(undefined, { expand: true, transactions: true })
-              .on('success', function (m) {
-                  // console.log("Ledger: %s", JSON.stringify(m, undefined, 2));
-
-                  httpd_response(res,
-                      {
-                        statusCode: 200,
-                        url: _url,
-                        body: ""
-                          + JSON.stringify(rewrite_object(m, self.base), undefined, 2)
-                          +"
"
-                      });
-                })
-
-            if (_parsed.query.l && _parsed.query.l.length === 64) {
-              request.ledger_hash(_parsed.query.l);
-            }
-            else if (_parsed.query.l) {
-              request.ledger_index(Number(_parsed.query.l));
-            }
-            else {
-              request.ledger_index(-1);
-            }
-
-            request.request();
-
-          } else if (_parsed.pathname === "/transaction") {
-              var request = remote
-                .request_tx(_parsed.query.h)
-//                .request_transaction_entry(_parsed.query.h)
-//              .ledger_select(_parsed.query.l)
-                .on('success', function (m) {
-                    // console.log("transaction: %s", JSON.stringify(m, undefined, 2));
-
-                    httpd_response(res,
-                        {
-                          statusCode: 200,
-                          url: _url,
-                          body: ""
-                            + JSON.stringify(rewrite_object(m, self.base), undefined, 2)
-                            +"
"
-                        });
-                  })
-                .on('error', function (m) {
-                    httpd_response(res,
-                        {
-                          statusCode: 200,
-                          url: _url,
-                          body: ""
-                            + 'ERROR: ' + JSON.stringify(m, undefined, 2)
-                            +"
"
-                        });
-                  })
-                .request();
-
-          } else {
-            var test  = build_uri({
-                type: 'account',
-                ledger: 'closed',
-                account: 'rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh',
-              }, self.base);
-
-            httpd_response(res,
-                {
-                  statusCode: req.url === "/" ? 200 : 404,
-                  url: _url,
-                });
-          }
-        });
-    });
-
-  server.listen(port, ip, undefined,
-    function () {
-      console.log("Listening at: http://%s:%s", ip, port);
-    });
-}
-
-// vim:sw=2:sts=2:ts=8:et
diff --git a/bin/ci/README.md b/bin/ci/README.md
deleted file mode 100644
index 36d4fc1d3..000000000
--- a/bin/ci/README.md
+++ /dev/null
@@ -1,24 +0,0 @@
-In this directory are two scripts, `build.sh` and `test.sh` used for building
-and testing rippled.
-
-(For now, they assume Bash and Linux. Once I get Windows containers for
-testing, I'll try them there, but if Bash is not available, then they will
-soon be joined by PowerShell scripts `build.ps` and `test.ps`.)
-
-We don't want these scripts to require arcane invocations that can only be
-pieced together from within a CI configuration. We want something that humans
-can easily invoke, read, and understand, for when we eventually have to test
-and debug them interactively. That means:
-
-(1) They should work with no arguments.
-(2) They should document their arguments.
-(3) They should expand short arguments into long arguments.
-
-While we want to provide options for common use cases, we don't need to offer
-the kitchen sink. We can rightfully expect users with esoteric, complicated
-needs to write their own scripts.
-
-To make argument-handling easy for us, the implementers, we can just take all
-arguments from environment variables. They have the nice advantage that every
-command-line uses named arguments. For the benefit of us and our users, we
-document those variables at the top of each script.
diff --git a/bin/ci/build.sh b/bin/ci/build.sh
deleted file mode 100755
index fa7a0c968..000000000
--- a/bin/ci/build.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env bash
-
-set -o xtrace
-set -o errexit
-
-# The build system. Either 'Unix Makefiles' or 'Ninja'.
-GENERATOR=${GENERATOR:-Unix Makefiles}
-# The compiler. Either 'gcc' or 'clang'.
-COMPILER=${COMPILER:-gcc}
-# The build type. Either 'Debug' or 'Release'.
-BUILD_TYPE=${BUILD_TYPE:-Debug}
-# Additional arguments to CMake.
-# We use the `-` substitution here instead of `:-` so that callers can erase
-# the default by setting `$CMAKE_ARGS` to the empty string.
-CMAKE_ARGS=${CMAKE_ARGS-'-Dwerr=ON'}
-
-# https://gitlab.kitware.com/cmake/cmake/issues/18865
-CMAKE_ARGS="-DBoost_NO_BOOST_CMAKE=ON ${CMAKE_ARGS}"
-
-if [[ ${COMPILER} == 'gcc' ]]; then
-  export CC='gcc'
-  export CXX='g++'
-elif [[ ${COMPILER} == 'clang' ]]; then
-  export CC='clang'
-  export CXX='clang++'
-fi
-
-mkdir build
-cd build
-cmake -G "${GENERATOR}" -DCMAKE_BUILD_TYPE=${BUILD_TYPE} ${CMAKE_ARGS} ..
-cmake --build . -- -j $(nproc)
diff --git a/bin/ci/test.sh b/bin/ci/test.sh
deleted file mode 100755
index 11615d732..000000000
--- a/bin/ci/test.sh
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env bash
-
-set -o xtrace
-set -o errexit
-
-# Set to 'true' to run the known "manual" tests in rippled.
-MANUAL_TESTS=${MANUAL_TESTS:-false}
-# The maximum number of concurrent tests.
-CONCURRENT_TESTS=${CONCURRENT_TESTS:-$(nproc)}
-# The path to rippled.
-RIPPLED=${RIPPLED:-build/rippled}
-# Additional arguments to rippled.
-RIPPLED_ARGS=${RIPPLED_ARGS:-}
-
-function join_by { local IFS="$1"; shift; echo "$*"; }
-
-declare -a manual_tests=(
-  'beast.chrono.abstract_clock'
-  'beast.unit_test.print'
-  'ripple.NodeStore.Timing'
-  'ripple.app.Flow_manual'
-  'ripple.app.NoRippleCheckLimits'
-  'ripple.app.PayStrandAllPairs'
-  'ripple.consensus.ByzantineFailureSim'
-  'ripple.consensus.DistributedValidators'
-  'ripple.consensus.ScaleFreeSim'
-  'ripple.tx.CrossingLimits'
-  'ripple.tx.FindOversizeCross'
-  'ripple.tx.Offer_manual'
-  'ripple.tx.OversizeMeta'
-  'ripple.tx.PlumpBook'
-)
-
-if [[ ${MANUAL_TESTS} == 'true' ]]; then
-  RIPPLED_ARGS+=" --unittest=$(join_by , "${manual_tests[@]}")"
-else
-  RIPPLED_ARGS+=" --unittest --quiet --unittest-log"
-fi
-RIPPLED_ARGS+=" --unittest-jobs ${CONCURRENT_TESTS}"
-
-${RIPPLED} ${RIPPLED_ARGS}
diff --git a/bin/ci/ubuntu/build-and-test.sh b/bin/ci/ubuntu/build-and-test.sh
deleted file mode 100755
index 7ae75f2b1..000000000
--- a/bin/ci/ubuntu/build-and-test.sh
+++ /dev/null
@@ -1,274 +0,0 @@
-#!/usr/bin/env bash
-set -ex
-
-function version_ge() { test "$(echo "$@" | tr " " "\n" | sort -rV | head -n 1)" == "$1"; }
-
-__dirname=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
-echo "using CC: ${CC}"
-"${CC}" --version
-export CC
-
-COMPNAME=$(basename $CC)
-echo "using CXX: ${CXX:-notset}"
-if [[ $CXX ]]; then
-   "${CXX}" --version
-   export CXX
-fi
-: ${BUILD_TYPE:=Debug}
-echo "BUILD TYPE: ${BUILD_TYPE}"
-
-: ${TARGET:=install}
-echo "BUILD TARGET: ${TARGET}"
-
-JOBS=${NUM_PROCESSORS:-2}
-if [[ ${TRAVIS:-false} != "true" ]]; then
-    JOBS=$((JOBS+1))
-fi
-
-if [[ ! -z "${CMAKE_EXE:-}" ]] ; then
-    export PATH="$(dirname ${CMAKE_EXE}):$PATH"
-fi
-
-if [ -x /usr/bin/time ] ; then
-    : ${TIME:="Duration: %E"}
-    export TIME
-    time=/usr/bin/time
-else
-    time=
-fi
-
-echo "Building rippled"
-: ${CMAKE_EXTRA_ARGS:=""}
-if [[ ${NINJA_BUILD:-} == true ]]; then
-    CMAKE_EXTRA_ARGS+=" -G Ninja"
-fi
-
-coverage=false
-if [[ "${TARGET}" == "coverage_report" ]] ; then
-    echo "coverage option detected."
-    coverage=true
-fi
-
-cmake --version
-CMAKE_VER=$(cmake --version | cut -d " " -f 3 | head -1)
-
-#
-# allow explicit setting of the name of the build
-# dir, otherwise default to the compiler.build_type
-#
-: "${BUILD_DIR:=${COMPNAME}.${BUILD_TYPE}}"
-BUILDARGS="--target ${TARGET}"
-BUILDTOOLARGS=""
-if version_ge $CMAKE_VER "3.12.0" ; then
-    BUILDARGS+=" --parallel"
-fi
-
-if [[ ${NINJA_BUILD:-} == false ]]; then
-    if version_ge $CMAKE_VER "3.12.0" ; then
-        BUILDARGS+=" ${JOBS}"
-    else
-        BUILDTOOLARGS+=" -j ${JOBS}"
-    fi
-fi
-
-if [[ ${VERBOSE_BUILD:-} == true ]]; then
-    CMAKE_EXTRA_ARGS+=" -DCMAKE_VERBOSE_MAKEFILE=ON"
-    if version_ge $CMAKE_VER "3.14.0" ; then
-        BUILDARGS+=" --verbose"
-    else
-        if [[ ${NINJA_BUILD:-} == false ]]; then
-            BUILDTOOLARGS+=" verbose=1"
-        else
-            BUILDTOOLARGS+=" -v"
-        fi
-    fi
-fi
-
-if [[ ${USE_CCACHE:-} == true ]]; then
-    echo "using ccache with basedir [${CCACHE_BASEDIR:-}]"
-    CMAKE_EXTRA_ARGS+=" -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
-fi
-if [ -d "build/${BUILD_DIR}" ]; then
-    rm -rf "build/${BUILD_DIR}"
-fi
-
-mkdir -p "build/${BUILD_DIR}"
-pushd "build/${BUILD_DIR}"
-
-# cleanup possible artifacts
-rm -fv CMakeFiles/CMakeOutput.log CMakeFiles/CMakeError.log
-# Clean up NIH directories which should be git repos, but aren't
-for nih_path in ${NIH_CACHE_ROOT}/*/*/*/src ${NIH_CACHE_ROOT}/*/*/src
-do
-  for dir in lz4 snappy rocksdb
-  do
-    if [ -e ${nih_path}/${dir} -a \! -e ${nih_path}/${dir}/.git ]
-    then
-      ls -la ${nih_path}/${dir}*
-      rm -rfv ${nih_path}/${dir}*
-    fi
-  done
-done
-
-# generate
-${time} cmake ../.. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} ${CMAKE_EXTRA_ARGS}
-# Display the cmake output, to help with debugging if something fails
-for file in CMakeOutput.log CMakeError.log
-do
-  if [ -f CMakeFiles/${file} ]
-  then
-    ls -l CMakeFiles/${file}
-    cat CMakeFiles/${file}
-  fi
-done
-# build
-export DESTDIR=$(pwd)/_INSTALLED_
-
-${time} eval cmake --build . ${BUILDARGS} -- ${BUILDTOOLARGS}
-
-if [[ ${TARGET} == "docs" ]]; then
-    ## mimic the standard test output for docs build
-    ## to make controlling processes like jenkins happy
-    if [ -f docs/html/index.html ]; then
-        echo "1 case, 1 test total, 0 failures"
-    else
-        echo "1 case, 1 test total, 1 failures"
-    fi
-    exit
-fi
-popd
-
-if [[ "${TARGET}" == "validator-keys" ]] ; then
-    export APP_PATH="$PWD/build/${BUILD_DIR}/validator-keys/validator-keys"
-else
-    export APP_PATH="$PWD/build/${BUILD_DIR}/rippled"
-fi
-echo "using APP_PATH: ${APP_PATH}"
-
-# See what we've actually built
-ldd ${APP_PATH}
-
-: ${APP_ARGS:=}
-
-if [[ "${TARGET}" == "validator-keys" ]] ; then
-    APP_ARGS="--unittest"
-else
-    function join_by { local IFS="$1"; shift; echo "$*"; }
-
-    # This is a list of manual tests
-    # in rippled that we want to run
-    # ORDER matters here...sorted in approximately
-    # descending execution time (longest running tests at top)
-    declare -a manual_tests=(
-        'ripple.ripple_data.reduce_relay_simulate'
-        'ripple.tx.Offer_manual'
-        'ripple.tx.CrossingLimits'
-        'ripple.tx.PlumpBook'
-        'ripple.app.Flow_manual'
-        'ripple.tx.OversizeMeta'
-        'ripple.consensus.DistributedValidators'
-        'ripple.app.NoRippleCheckLimits'
-        'ripple.ripple_data.compression'
-        'ripple.NodeStore.Timing'
-        'ripple.consensus.ByzantineFailureSim'
-        'beast.chrono.abstract_clock'
-        'beast.unit_test.print'
-    )
-    if [[ ${TRAVIS:-false} != "true" ]]; then
-        # these two tests cause travis CI to run out of memory.
-        # TODO: investigate possible workarounds.
-        manual_tests=(
-            'ripple.consensus.ScaleFreeSim'
-            'ripple.tx.FindOversizeCross'
-            "${manual_tests[@]}"
-        )
-    fi
-
-    if [[ ${MANUAL_TESTS:-} == true ]]; then
-        APP_ARGS+=" --unittest=$(join_by , "${manual_tests[@]}")"
-    else
-        APP_ARGS+=" --unittest --quiet --unittest-log"
-    fi
-    if [[ ${coverage} == false && ${PARALLEL_TESTS:-} == true ]]; then
-        APP_ARGS+=" --unittest-jobs ${JOBS}"
-    fi
-
-    if [[ ${IPV6_TESTS:-} == true ]]; then
-        APP_ARGS+=" --unittest-ipv6"
-    fi
-fi
-
-if [[ ${coverage} == true && $CC =~ ^gcc ]]; then
-    # Push the results (lcov.info) to codecov
-    codecov -X gcov # don't even try and look for .gcov files ;)
-    find . -name "*.gcda" | xargs rm -f
-fi
-
-if [[ ${SKIP_TESTS:-} == true ]]; then
-    echo "skipping tests."
-    exit
-fi
-
-ulimit -a
-corepat=$(cat /proc/sys/kernel/core_pattern)
-if [[ ${corepat} =~ ^[:space:]*\| ]] ; then
-    echo "WARNING: core pattern is piping - can't search for core files"
-    look_core=false
-else
-    look_core=true
-    coredir=$(dirname ${corepat})
-fi
-if [[ ${look_core} == true ]]; then
-    before=$(ls -A1 ${coredir})
-fi
-
-set +e
-echo "Running tests for ${APP_PATH}"
-if [[ ${MANUAL_TESTS:-} == true && ${PARALLEL_TESTS:-} != true ]]; then
-    for t in "${manual_tests[@]}" ; do
-        ${APP_PATH} --unittest=${t}
-        TEST_STAT=$?
-        if [[ $TEST_STAT -ne 0 ]] ; then
-            break
-        fi
-    done
-else
-    ${APP_PATH} ${APP_ARGS}
-    TEST_STAT=$?
-fi
-set -e
-
-if [[ ${look_core} == true ]]; then
-    after=$(ls -A1 ${coredir})
-    oIFS="${IFS}"
-    IFS=$'\n\r'
-    found_core=false
-    for l in $(diff -w --suppress-common-lines <(echo "$before") <(echo "$after")) ; do
-        if [[ "$l" =~ ^[[:space:]]*\>[[:space:]]*(.+)$ ]] ; then
-            corefile="${BASH_REMATCH[1]}"
-            echo "FOUND core dump file at '${coredir}/${corefile}'"
-            gdb_output=$(/bin/mktemp /tmp/gdb_output_XXXXXXXXXX.txt)
-            found_core=true
-            gdb \
-                -ex "set height 0" \
-                -ex "set logging file ${gdb_output}" \
-                -ex "set logging on" \
-                -ex "print 'ripple::BuildInfo::versionString'" \
-                -ex "thread apply all backtrace full" \
-                -ex "info inferiors" \
-                -ex quit \
-                "$APP_PATH" \
-                "${coredir}/${corefile}" &> /dev/null
-
-            echo -e "CORE INFO: \n\n $(cat ${gdb_output}) \n\n)"
-        fi
-    done
-    IFS="${oIFS}"
-fi
-
-if [[ ${found_core} == true ]]; then
-    exit -1
-else
-    exit $TEST_STAT
-fi
-
diff --git a/bin/ci/ubuntu/build-in-docker.sh b/bin/ci/ubuntu/build-in-docker.sh
deleted file mode 100755
index feeabb118..000000000
--- a/bin/ci/ubuntu/build-in-docker.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env bash
-# run our build script in a docker container
-# using travis-ci hosts
-set -eux
-
-function join_by { local IFS="$1"; shift; echo "$*"; }
-
-set +x
-echo "VERBOSE_BUILD=true" > /tmp/co.env
-matchers=(
-   'TRAVIS.*' 'CI' 'CC' 'CXX'
-   'BUILD_TYPE' 'TARGET' 'MAX_TIME'
-   'CODECOV.+' 'CMAKE.*' '.+_TESTS'
-   '.+_OPTIONS' 'NINJA.*' 'NUM_.+'
-   'NIH_.+' 'BOOST.*' '.*CCACHE.*')
-
-matchstring=$(join_by '|' "${matchers[@]}")
-echo "MATCHSTRING IS:: $matchstring"
-env | grep -E "^(${matchstring})=" >> /tmp/co.env
-set -x
-# need to eliminate TRAVIS_CMD...don't want to pass it to the container
-cat /tmp/co.env | grep -v TRAVIS_CMD > /tmp/co.env.2
-mv /tmp/co.env.2 /tmp/co.env
-cat /tmp/co.env
-mkdir -p -m 0777 ${TRAVIS_BUILD_DIR}/cores
-echo "${TRAVIS_BUILD_DIR}/cores/%e.%p" | sudo tee /proc/sys/kernel/core_pattern
-docker run \
-    -t --env-file /tmp/co.env \
-    -v ${TRAVIS_HOME}:${TRAVIS_HOME} \
-    -w ${TRAVIS_BUILD_DIR} \
-    --cap-add SYS_PTRACE \
-    --ulimit "core=-1" \
-    $DOCKER_IMAGE \
-    /bin/bash -c 'if [[ $CC =~ ([[:alpha:]]+)-([[:digit:].]+) ]] ; then sudo update-alternatives --set ${BASH_REMATCH[1]} /usr/bin/$CC; fi; bin/ci/ubuntu/build-and-test.sh'
-
-
diff --git a/bin/ci/ubuntu/travis-cache-start.sh b/bin/ci/ubuntu/travis-cache-start.sh
deleted file mode 100755
index 6811acb90..000000000
--- a/bin/ci/ubuntu/travis-cache-start.sh
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env bash
-# some cached files create churn, so save them here for
-# later restoration before packing the cache
-set -eux
-clean_cache="travis_clean_cache"
-if [[ ! ( "${TRAVIS_JOB_NAME}" =~ "windows" || \
-    "${TRAVIS_JOB_NAME}" =~ "prereq-keep" ) ]] && \
-    ( [[ "${TRAVIS_COMMIT_MESSAGE}" =~ "${clean_cache}" ]] || \
-        ( [[ -v TRAVIS_PULL_REQUEST_SHA && \
-            "${TRAVIS_PULL_REQUEST_SHA}" != "" ]] && \
-          git log -1 "${TRAVIS_PULL_REQUEST_SHA}" | grep -cq "${clean_cache}" -
-        )
-    )
-then
-    find ${TRAVIS_HOME}/_cache -maxdepth 2 -type d
-    rm -rf ${TRAVIS_HOME}/_cache
-    mkdir -p ${TRAVIS_HOME}/_cache
-fi
-
-pushd ${TRAVIS_HOME}
-if [ -f cache_ignore.tar ] ; then
-    rm -f cache_ignore.tar
-fi
-
-if [ -d _cache/nih_c ] ; then
-    find _cache/nih_c -name "build.ninja" | tar rf cache_ignore.tar --files-from -
-    find _cache/nih_c -name ".ninja_deps" | tar rf cache_ignore.tar --files-from -
-    find _cache/nih_c -name ".ninja_log" | tar rf cache_ignore.tar --files-from -
-    find _cache/nih_c -name "*.log" | tar rf cache_ignore.tar --files-from -
-    find _cache/nih_c -name "*.tlog" | tar rf cache_ignore.tar --files-from -
-    # show .a files in the cache, for sanity checking
-    find _cache/nih_c -name "*.a" -ls
-fi
-
-if [ -d _cache/ccache ] ; then
-    find _cache/ccache -name "stats" | tar rf cache_ignore.tar --files-from -
-fi
-
-if [ -f cache_ignore.tar ] ; then
-    tar -tf cache_ignore.tar
-fi
-popd
-
-
diff --git a/bin/debug_local_sign.js b/bin/debug_local_sign.js
deleted file mode 100644
index 24f9aab48..000000000
--- a/bin/debug_local_sign.js
+++ /dev/null
@@ -1,64 +0,0 @@
-var ripple = require('ripple-lib');
-
-var v = {
-  seed: "snoPBrXtMeMyMHUVTgbuqAfg1SUTb",
-  addr: "rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh"
-};
-
-var remote = ripple.Remote.from_config({
-  "trusted" : true,
-  "websocket_ip" : "127.0.0.1",
-  "websocket_port" : 5006,
-  "websocket_ssl" : false,
-  "local_signing" : true
-});
-
-var tx_json = {
-	"Account" : v.addr,
-	"Amount" : "10000000",
-	"Destination" : "rEu2ULPiEQm1BAL8pYzmXnNX1aFX9sCks",
-	"Fee" : "10",
-	"Flags" : 0,
-	"Sequence" : 3,
-	"TransactionType" : "Payment"
-
-  //"SigningPubKey": '0396941B22791A448E5877A44CE98434DB217D6FB97D63F0DAD23BE49ED45173C9'
-};
-
-remote.on('connected', function () {
-  var req = remote.request_sign(v.seed, tx_json);
-  req.message.debug_signing = true;
-  req.on('success', function (result) {
-    console.log("SERVER RESULT");
-    console.log(result);
-
-    var sim = {};
-    var tx = remote.transaction();
-    tx.tx_json = tx_json;
-    tx._secret = v.seed;
-    tx.complete();
-    var unsigned = tx.serialize().to_hex();
-    tx.sign();
-
-    sim.tx_blob = tx.serialize().to_hex();
-    sim.tx_json = tx.tx_json;
-    sim.tx_signing_hash = tx.signing_hash().to_hex();
-    sim.tx_unsigned = unsigned;
-
-    console.log("\nLOCAL RESULT");
-    console.log(sim);
-
-    remote.connect(false);
-  });
-  req.on('error', function (err) {
-    if (err.error === "remoteError" && err.remote.error === "srcActNotFound") {
-      console.log("Please fund account "+v.addr+" to run this test.");
-    } else {
-      console.log('error', err);
-    }
-    remote.connect(false);
-  });
-  req.request();
-
-});
-remote.connect();
diff --git a/bin/email_hash.js b/bin/email_hash.js
deleted file mode 100755
index ab4f97c47..000000000
--- a/bin/email_hash.js
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/node
-//
-// Returns a Gravatar style hash as per: http://en.gravatar.com/site/implement/hash/
-//
-
-if (3 != process.argv.length) {
-  process.stderr.write("Usage: " + process.argv[1] + " email_address\n\nReturns gravatar style hash.\n");
-  process.exit(1);
-
-} else {
-  var md5 = require('crypto').createHash('md5');
-
-  md5.update(process.argv[2].trim().toLowerCase());
-
-  process.stdout.write(md5.digest('hex') + "\n");
-}
-
-// vim:sw=2:sts=2:ts=8:et
diff --git a/bin/flash_policy.js b/bin/flash_policy.js
deleted file mode 100755
index e1361d46d..000000000
--- a/bin/flash_policy.js
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/node
-//
-// This program allows IE 9 ripple-clients to make websocket connections to
-// rippled using flash.  As IE 9 does not have websocket support, this required
-// if you wish to support IE 9 ripple-clients.
-//
-// http://www.lightsphere.com/dev/articles/flash_socket_policy.html
-//
-// For better security, be sure to set the Port below to the port of your
-// [websocket_public_port].
-//
-
-var net	    = require("net"),
-    port    = "*",
-    domains = ["*:"+port]; // Domain:Port
-
-net.createServer(
-  function(socket) {
-    socket.write("\n");
-    socket.write("\n");
-    socket.write("\n");
-    domains.forEach(
-      function(domain) {
-        var parts = domain.split(':');
-        socket.write("\t\n");
-      }
-    );
-    socket.write("\n");
-    socket.end();
-  }
-).listen(843);
diff --git a/bin/getRippledInfo b/bin/getRippledInfo
deleted file mode 100755
index abfa449ba..000000000
--- a/bin/getRippledInfo
+++ /dev/null
@@ -1,150 +0,0 @@
-#!/usr/bin/env bash
-
-# This script generates information about your rippled installation
-# and system. It can be used to help debug issues that you may face
-# in your installation. While this script endeavors to not display any 
-# sensitive information, it is recommended that you read the output
-# before sharing with any third parties.
-
-
-rippled_exe=/opt/ripple/bin/rippled
-conf_file=/etc/opt/ripple/rippled.cfg
-
-while getopts ":e:c:" opt; do
-    case $opt in
-        e)
-            rippled_exe=${OPTARG}
-            ;;
-        c)
-            conf_file=${OPTARG}
-            ;;
-        \?)
-            echo "Invalid option: -$OPTARG"
-            exit -1
-    esac
-done
-
-tmp_loc=$(mktemp -d --tmpdir ripple_info.XXXXX)
-chmod 751 ${tmp_loc}
-awk_prog=${tmp_loc}/cfg.awk
-summary_out=${tmp_loc}/rippled_info.md
-printf "# rippled report info\n\n> generated at %s\n" "$(date -R)" > ${summary_out}
-
-function log_section {
-    printf "\n## %s\n" "$*" >> ${summary_out}
-
-    while read -r l; do
-        echo "    $l" >> ${summary_out}
-    done > ${awk_prog}
-    BEGIN {FS="[[:space:]]*=[[:space:]]*"; skip=0; db_path=""; print > OUT_FILE; split(exl,exa,"|")}
-    /^#/ {next}
-    save==2 && /^[[:space:]]*$/ {next}
-    /^\[.+\]$/ {
-      section=tolower(gensub(/^\[[[:space:]]*([a-zA-Z_]+)[[:space:]]*\]$/, "\\1", "g"))
-      skip = 0
-      for (i in exa) {
-        if (section == exa[i])
-          skip = 1
-      }
-      if (section == "database_path")
-        save = 1
-    }
-    skip==1 {next}
-    save==2 {save=0; db_path=$0}
-    save==1 {save=2}
-    $1 ~ /password/ {$0=$1"="}
-    {print >> OUT_FILE}
-    END {print db_path}
-EOP
-
-    db=$(\
-        sed -r -e 's/\//g;s/^[[:space:]]*//;s/[[:space:]]*$//' ${conf_file} |\
-        awk -v OUT_FILE=${cleaned_conf} -v exl="$(join_by '|' "${exclude[@]}")" -f ${awk_prog})
-    rm ${awk_prog}
-    cat ${cleaned_conf} | log_section "cleaned config file"
-    rm ${cleaned_conf}
-    echo "${db}"  | log_section "database path"
-    df ${db}      | log_section "df: database"
-fi
-
-# Send output from this script to a log file
-## this captures any messages
-## or errors from the script itself
-
-log_file=${tmp_loc}/get_info.log
-exec 3>&1 1>>${log_file} 2>&1
-
-## Send all stdout files to /tmp
-
-if [[ -x ${rippled_exe} ]] ; then
-    pgrep rippled && \
-    ${rippled_exe} --conf ${conf_file} \
-    -- server_info                  | log_section "server info"
-fi
-
-cat /proc/meminfo                   | log_section "meminfo"
-cat /proc/swaps                     | log_section "swap space"
-ulimit -a                           | log_section "ulimit"
-
-if command -v lshw >/dev/null 2>&1 ; then
-    lshw    2>/dev/null             | log_section "hardware info"
-else
-    lscpu                           >  ${tmp_loc}/hw_info.txt
-    hwinfo                          >> ${tmp_loc}/hw_info.txt
-    lspci                           >> ${tmp_loc}/hw_info.txt
-    lsblk                           >> ${tmp_loc}/hw_info.txt
-    cat ${tmp_loc}/hw_info.txt | log_section "hardware info"
-    rm ${tmp_loc}/hw_info.txt
-fi
-
-if command -v iostat >/dev/null 2>&1 ; then
-    iostat -t -d -x 2 6             | log_section "iostat"
-fi
-
-df -h                               | log_section "free disk space"
-drives=($(df | awk '$1 ~ /^\/dev\// {print $1}' | xargs -n 1 basename))
-block_devs=($(ls /sys/block/))
-for d in "${drives[@]}"; do
-    for dev in "${block_devs[@]}"; do
-        #echo "D: [$d], DEV: [$dev]"
-        if [[ $d =~ $dev ]]; then
-            # this file (if exists) has 0 for SSD and 1 for HDD
-            if [[ "$(cat /sys/block/${dev}/queue/rotational 2>/dev/null)" == 0 ]] ; then
-                echo "${d} : SSD" >> ${tmp_loc}/is_ssd.txt
-            else
-                echo "${d} : NO SSD" >> ${tmp_loc}/is_ssd.txt
-            fi
-        fi
-    done
-done
-
-if [[ -f ${tmp_loc}/is_ssd.txt ]] ; then
-    cat ${tmp_loc}/is_ssd.txt | log_section "SSD"
-    rm ${tmp_loc}/is_ssd.txt
-fi
-
-cat ${log_file} | log_section "script log"
-
-cat << MSG | tee /dev/fd/3
-####################################################
-  rippled info has been gathered. Please copy the
-  contents of ${summary_out}
-  to a github gist at https://gist.github.com/
-
-  PLEASE REVIEW THIS FILE FOR ANY SENSITIVE DATA
-  BEFORE POSTING! We have tried our best to omit
-  any sensitive information from this file, but you
-  should verify before posting.
-####################################################
-MSG
-
diff --git a/bin/hexify.js b/bin/hexify.js
deleted file mode 100755
index 1e2fb7000..000000000
--- a/bin/hexify.js
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/node
-//
-// Returns hex of lowercasing a string.
-//
-
-var stringToHex = function (s) {
-  return Array.prototype.map.call(s, function (c) {
-      var b = c.charCodeAt(0);
-
-      return b < 16 ? "0" + b.toString(16) : b.toString(16);
-    }).join("");
-};
-
-if (3 != process.argv.length) {
-  process.stderr.write("Usage: " + process.argv[1] + " string\n\nReturns hex of lowercasing string.\n");
-  process.exit(1);
-
-} else {
-
-  process.stdout.write(stringToHex(process.argv[2].toLowerCase()) + "\n");
-}
-
-// vim:sw=2:sts=2:ts=8:et
diff --git a/bin/jsonrpc_request.js b/bin/jsonrpc_request.js
deleted file mode 100755
index 0b9c08666..000000000
--- a/bin/jsonrpc_request.js
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/node
-//
-// This is a tool to issue JSON-RPC requests from the command line.
-//
-// This can be used to test a JSON-RPC server.
-//
-// Requires: npm simple-jsonrpc
-//
-
-var jsonrpc   = require('simple-jsonrpc');
-
-var program   = process.argv[1];
-
-if (5 !== process.argv.length) {
-  console.log("Usage: %s   ", program);
-}
-else {
-  var url       = process.argv[2];
-  var method    = process.argv[3];
-  var json_raw  = process.argv[4];
-  var json;
-
-  try {
-    json      = JSON.parse(json_raw);
-  }
-  catch (e) {
-      console.log("JSON parse error: %s", e.message);
-      throw e;
-  }
-
-  var client  = jsonrpc.client(url);
-
-  client.call(method, json,
-    function (result) {
-      console.log(JSON.stringify(result, undefined, 2));
-    },
-    function (error) {
-      console.log(JSON.stringify(error, undefined, 2));
-    });
-}
-
-// vim:sw=2:sts=2:ts=8:et
diff --git a/bin/jsonrpc_server.js b/bin/jsonrpc_server.js
deleted file mode 100755
index 4cd3ffb95..000000000
--- a/bin/jsonrpc_server.js
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/node
-//
-// This is a tool to listen for JSON-RPC requests at an IP and port.
-//
-// This will report the request to console and echo back the request as the response.
-//
-
-var http      = require("http");
-
-var program   = process.argv[1];
-
-if (4 !== process.argv.length) {
-  console.log("Usage: %s  ", program);
-}
-else {
-  var ip      = process.argv[2];
-  var port    = process.argv[3];
-
-  var server  = http.createServer(function (req, res) {
-      console.log("CONNECT");
-      var input = "";
-
-      req.setEncoding();
-
-      req.on('data', function (buffer) {
-          // console.log("DATA: %s", buffer);
-          input = input + buffer;
-        });
-
-      req.on('end', function () {
-          // console.log("END");
-
-          var json_req;
-
-          console.log("URL: %s", req.url);
-          console.log("HEADERS: %s", JSON.stringify(req.headers, undefined, 2));
-
-          try {
-            json_req = JSON.parse(input);
-
-            console.log("REQ: %s", JSON.stringify(json_req, undefined, 2));
-          }
-          catch (e) {
-            console.log("BAD JSON: %s", e.message);
-
-            json_req = { error : e.message }
-          }
-
-          res.statusCode = 200;
-          res.end(JSON.stringify({
-              jsonrpc: "2.0",
-              result: { request : json_req },
-              id: req.id
-            }));
-        });
-
-      req.on('close', function () {
-          console.log("CLOSE");
-        });
-    });
-
-  server.listen(port, ip, undefined,
-    function () {
-      console.log("Listening at: %s:%s", ip, port);
-    });
-}
-
-// vim:sw=2:sts=2:ts=8:et
diff --git a/bin/rlint.js b/bin/rlint.js
deleted file mode 100755
index ce12e9560..000000000
--- a/bin/rlint.js
+++ /dev/null
@@ -1,252 +0,0 @@
-#!/usr/bin/node
-
-var async       = require('async');
-var Remote      = require('ripple-lib').Remote;
-var Transaction = require('ripple-lib').Transaction;
-var UInt160     = require('ripple-lib').UInt160;
-var Amount      = require('ripple-lib').Amount;
-
-var book_key = function (book) {
-  return book.taker_pays.currency
-    + ":" + book.taker_pays.issuer
-    + ":" + book.taker_gets.currency
-    + ":" + book.taker_gets.issuer;
-};
-
-var book_key_cross = function (book) {
-  return book.taker_gets.currency
-    + ":" + book.taker_gets.issuer
-    + ":" + book.taker_pays.currency
-    + ":" + book.taker_pays.issuer;
-};
-
-var ledger_verify = function (ledger) {
-  var dir_nodes = ledger.accountState.filter(function (entry) {
-      return entry.LedgerEntryType === 'DirectoryNode'    // Only directories
-        && entry.index === entry.RootIndex                // Only root nodes
-        && 'TakerGetsCurrency' in entry;                  // Only offer directories
-    });
-
-  var books = {};
-
-  dir_nodes.forEach(function (node) {
-      var book = {
-        taker_gets: {
-            currency: UInt160.from_generic(node.TakerGetsCurrency).to_json(),
-            issuer: UInt160.from_generic(node.TakerGetsIssuer).to_json()
-          },
-        taker_pays: {
-          currency: UInt160.from_generic(node.TakerPaysCurrency).to_json(),
-          issuer: UInt160.from_generic(node.TakerPaysIssuer).to_json()
-        },
-        quality: Amount.from_quality(node.RootIndex),
-        index: node.RootIndex
-      };
-
-      books[book_key(book)] = book;
-
-//      console.log(JSON.stringify(node, undefined, 2));
-    });
-
-//  console.log(JSON.stringify(dir_entry, undefined, 2));
-  console.log("#%s books: %s", ledger.ledger_index, Object.keys(books).length);
-
-  Object.keys(books).forEach(function (key) {
-      var book        = books[key];
-      var key_cross   = book_key_cross(book);
-      var book_cross  = books[key_cross];
-
-      if (book && book_cross && !book_cross.done)
-      {
-        var book_cross_quality_inverted = Amount.from_json("1.0/1/1").divide(book_cross.quality);
-
-        if (book_cross_quality_inverted.compareTo(book.quality) >= 0)
-        {
-          // Crossing books
-          console.log("crossing: #%s :: %s :: %s :: %s :: %s :: %s :: %s", ledger.ledger_index, key, book.quality.to_text(), book_cross.quality.to_text(), book_cross_quality_inverted.to_text(),
-            book.index, book_cross.index);
-        }
-
-        book_cross.done = true;
-      }
-    });
-
-  var ripple_selfs  = {};
-
-  var accounts  = {};
-  var counts    = {};
-
-  ledger.accountState.forEach(function (entry) {
-      if (entry.LedgerEntryType === 'Offer')
-      {
-        counts[entry.Account] = (counts[entry.Account] || 0) + 1;
-      }
-      else if (entry.LedgerEntryType === 'RippleState')
-      {
-        if (entry.Flags & (0x10000 | 0x40000))
-        {
-          counts[entry.LowLimit.issuer]   = (counts[entry.LowLimit.issuer] || 0) + 1;
-        }
-
-        if (entry.Flags & (0x20000 | 0x80000))
-        {
-          counts[entry.HighLimit.issuer]  = (counts[entry.HighLimit.issuer] || 0) + 1;
-        }
-
-        if (entry.HighLimit.issuer === entry.LowLimit.issuer)
-          ripple_selfs[entry.Account] = entry;
-      }
-      else if (entry.LedgerEntryType == 'AccountRoot')
-      {
-        accounts[entry.Account] = entry;
-      }
-    });
-
-  var low               = 0;  // Accounts with too low a count.
-  var high              = 0;
-  var missing_accounts  = 0;  // Objects with no referencing account.
-  var missing_objects   = 0;  // Accounts specifying an object but having none.
-
-  Object.keys(counts).forEach(function (account) {
-      if (account in accounts)
-      {
-        if (counts[account] !== accounts[account].OwnerCount)
-        {
-          if (counts[account] < accounts[account].OwnerCount)
-          {
-            high  += 1;
-            console.log("%s: high count %s/%s", account, counts[account], accounts[account].OwnerCount);
-          }
-          else
-          {
-            low   += 1;
-            console.log("%s: low count %s/%s", account, counts[account], accounts[account].OwnerCount);
-          }
-        }
-      }
-      else
-      {
-        missing_accounts  += 1;
-
-        console.log("%s: missing : count %s", account, counts[account]);
-      }
-    });
-
-  Object.keys(accounts).forEach(function (account) {
-      if (!('OwnerCount' in accounts[account]))
-      {
-          console.log("%s: bad entry : %s", account, JSON.stringify(accounts[account], undefined, 2));
-      }
-      else if (!(account in counts) && accounts[account].OwnerCount)
-      {
-          missing_objects += 1;
-
-          console.log("%s: no objects : %s/%s", account, 0, accounts[account].OwnerCount);
-      }
-    });
-
-  if (low)
-    console.log("counts too low = %s", low);
-
-  if (high)
-    console.log("counts too high = %s", high);
-
-  if (missing_objects)
-    console.log("missing_objects = %s", missing_objects);
-
-  if (missing_accounts)
-    console.log("missing_accounts = %s", missing_accounts);
-
-  if (Object.keys(ripple_selfs).length)
-    console.log("RippleState selfs = %s", Object.keys(ripple_selfs).length);
-
-};
-
-var ledger_request = function (remote, ledger_index, done) {
- remote.request_ledger(undefined, {
-      accounts: true,
-      expand: true,
-    })
-  .ledger_index(ledger_index)
-  .on('success', function (m) {
-      // console.log("ledger: ", ledger_index);
-      // console.log("ledger: ", JSON.stringify(m, undefined, 2));
-      done(m.ledger);
-    })
-  .on('error', function (m) {
-      console.log("error");
-      done();
-    })
-  .request();
-};
-
-var usage = function () {
-  console.log("rlint.js _websocket_ip_ _websocket_port_ ");
-};
-
-var finish = function (remote) {
-  remote.disconnect();
-
-  // XXX Because remote.disconnect() doesn't work:
-  process.exit();
-};
-
-console.log("args: ", process.argv.length);
-console.log("args: ", process.argv);
-
-if (process.argv.length < 4) {
-  usage();
-}
-else {
-  var remote  = Remote.from_config({
-        websocket_ip:   process.argv[2],
-        websocket_port: process.argv[3],
-      })
-    .once('ledger_closed', function (m) {
-        console.log("ledger_closed: ", JSON.stringify(m, undefined, 2));
-
-        if (process.argv.length === 5) {
-          var ledger_index  = process.argv[4];
-
-          ledger_request(remote, ledger_index, function (l) {
-              if (l) {
-                ledger_verify(l);
-              }
-
-              finish(remote);
-            });
-
-        } else if (process.argv.length === 6) {
-          var ledger_start  = Number(process.argv[4]);
-          var ledger_end    = Number(process.argv[5]);
-          var ledger_cursor = ledger_end;
-
-          async.whilst(
-            function () {
-              return ledger_start <= ledger_cursor && ledger_cursor <=ledger_end;
-            },
-            function (callback) {
-              // console.log(ledger_cursor);
-
-              ledger_request(remote, ledger_cursor, function (l) {
-                  if (l) {
-                    ledger_verify(l);
-                  }
-
-                  --ledger_cursor;
-
-                  callback();
-                });
-            },
-            function (error) {
-              finish(remote);
-            });
-
-        } else {
-          finish(remote);
-        }
-      })
-    .connect();
-}
-
-// vim:sw=2:sts=2:ts=8:et
diff --git a/bin/sh/install-vcpkg.sh b/bin/sh/install-vcpkg.sh
deleted file mode 100755
index 8cf8f2d08..000000000
--- a/bin/sh/install-vcpkg.sh
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env bash
-set -exu
-
-: ${TRAVIS_BUILD_DIR:=""}
-: ${VCPKG_DIR:=".vcpkg"}
-export VCPKG_ROOT=${VCPKG_DIR}
-: ${VCPKG_DEFAULT_TRIPLET:="x64-windows-static"}
-
-export VCPKG_DEFAULT_TRIPLET
-
-EXE="vcpkg"
-if [[ -z ${COMSPEC:-} ]]; then
-    EXE="${EXE}.exe"
-fi
-
-if [[ -d "${VCPKG_DIR}" && -x "${VCPKG_DIR}/${EXE}" && -d "${VCPKG_DIR}/installed" ]] ; then
-    echo "Using cached vcpkg at ${VCPKG_DIR}"
-    ${VCPKG_DIR}/${EXE} list
-else
-    if [[ -d "${VCPKG_DIR}" ]] ; then
-        rm -rf "${VCPKG_DIR}"
-    fi
-    git clone --branch 2021.04.30 https://github.com/Microsoft/vcpkg.git ${VCPKG_DIR}
-    pushd ${VCPKG_DIR}
-    BSARGS=()
-    if [[ "$(uname)" == "Darwin" ]] ; then
-        BSARGS+=(--allowAppleClang)
-    fi
-    if [[ -z ${COMSPEC:-} ]]; then
-        chmod +x ./bootstrap-vcpkg.sh
-        time ./bootstrap-vcpkg.sh "${BSARGS[@]}"
-    else
-        time ./bootstrap-vcpkg.bat
-    fi
-    popd
-fi
-
-# TODO: bring boost in this way as well ?
-# NOTE: can pin specific ports to a commit/version like this:
-#    git checkout  ports/boost
-if [ $# -eq 0 ]; then
-    echo "No extra packages specified..."
-    PKGS=()
-else
-    PKGS=( "$@" )
-fi
-for LIB in "${PKGS[@]}"; do
-    time ${VCPKG_DIR}/${EXE} --clean-after-build install ${LIB}
-done
-
-
diff --git a/bin/sh/setup-msvc.sh b/bin/sh/setup-msvc.sh
deleted file mode 100755
index 8d61c9757..000000000
--- a/bin/sh/setup-msvc.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-
-# NOTE: must be sourced from a shell so it can export vars
-
-cat << BATCH > ./getenv.bat
-CALL %*
-ENV
-BATCH
-
-while read line ; do
-  IFS='"' read x path arg <<<"${line}"
-  if [ -f "${path}" ] ; then
-    echo "FOUND: $path"
-    export VCINSTALLDIR=$(./getenv.bat "${path}" ${arg} | grep "^VCINSTALLDIR=" | sed -E "s/^VCINSTALLDIR=//g")
-    if [ "${VCINSTALLDIR}" != "" ] ; then
-      echo "USING ${VCINSTALLDIR}"
-      export LIB=$(./getenv.bat "${path}" ${arg} | grep "^LIB=" | sed -E "s/^LIB=//g")
-      export LIBPATH=$(./getenv.bat "${path}" ${arg} | grep "^LIBPATH=" | sed -E "s/^LIBPATH=//g")
-      export INCLUDE=$(./getenv.bat "${path}" ${arg} | grep "^INCLUDE=" | sed -E "s/^INCLUDE=//g")
-      ADDPATH=$(./getenv.bat "${path}" ${arg} | grep "^PATH=" | sed -E "s/^PATH=//g")
-      export PATH="${ADDPATH}:${PATH}"
-      break
-    fi
-  fi
-done <= 7
-
-import argparse
-import asyncio
-import configparser
-import contextlib
-import json
-import logging
-import os
-from pathlib import Path
-import platform
-import subprocess
-import time
-import urllib.error
-import urllib.request
-
-# Enable asynchronous subprocesses on Windows. The default changed in 3.8.
-# https://docs.python.org/3.7/library/asyncio-platforms.html#subprocess-support-on-windows
-if (platform.system() == 'Windows' and sys.version_info.major == 3
-        and sys.version_info.minor < 8):
-    asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
-
-DEFAULT_EXE = 'rippled'
-DEFAULT_CONFIGURATION_FILE = 'rippled.cfg'
-# Number of seconds to wait before forcefully terminating.
-PATIENCE = 120
-# Number of contiguous seconds in a sync state to be considered synced.
-DEFAULT_SYNC_DURATION = 60
-# Number of seconds between polls of state.
-DEFAULT_POLL_INTERVAL = 5
-SYNC_STATES = ('full', 'validating', 'proposing')
-
-
-def read_config(config_file):
-    # strict = False: Allow duplicate keys, e.g. [rpc_startup].
-    # allow_no_value = True: Allow keys with no values. Generally, these
-    # instances use the "key" as the value, and the section name is the key,
-    # e.g. [debug_logfile].
-    # delimiters = ('='): Allow ':' as a character in Windows paths. Some of
-    # our "keys" are actually values, and we don't want to split them on ':'.
-    config = configparser.ConfigParser(
-        strict=False,
-        allow_no_value=True,
-        delimiters=('='),
-    )
-    config.read(config_file)
-    return config
-
-
-def to_list(value, separator=','):
-    """Parse a list from a delimited string value."""
-    return [s.strip() for s in value.split(separator) if s]
-
-
-def find_log_file(config_file):
-    """Try to figure out what log file the user has chosen. Raises all kinds
-    of exceptions if there is any possibility of ambiguity."""
-    config = read_config(config_file)
-    values = list(config['debug_logfile'].keys())
-    if len(values) < 1:
-        raise ValueError(
-            f'no [debug_logfile] in configuration file: {config_file}')
-    if len(values) > 1:
-        raise ValueError(
-            f'too many [debug_logfile] in configuration file: {config_file}')
-    return values[0]
-
-
-def find_http_port(config_file):
-    config = read_config(config_file)
-    names = list(config['server'].keys())
-    for name in names:
-        server = config[name]
-        if 'http' in to_list(server.get('protocol', '')):
-            return int(server['port'])
-    raise ValueError(f'no server in [server] for "http" protocol')
-
-
-@contextlib.asynccontextmanager
-async def rippled(exe=DEFAULT_EXE, config_file=DEFAULT_CONFIGURATION_FILE):
-    """A context manager for a rippled process."""
-    # Start the server.
-    process = await asyncio.create_subprocess_exec(
-        str(exe),
-        '--conf',
-        str(config_file),
-        stdout=subprocess.DEVNULL,
-        stderr=subprocess.DEVNULL,
-    )
-    logging.info(f'rippled started with pid {process.pid}')
-    try:
-        yield process
-    finally:
-        # Ask it to stop.
-        logging.info(f'asking rippled (pid: {process.pid}) to stop')
-        start = time.time()
-        process.terminate()
-
-        # Wait nicely.
-        try:
-            await asyncio.wait_for(process.wait(), PATIENCE)
-        except asyncio.TimeoutError:
-            # Ask the operating system to kill it.
-            logging.warning(f'killing rippled ({process.pid})')
-            try:
-                process.kill()
-            except ProcessLookupError:
-                pass
-
-        code = await process.wait()
-        end = time.time()
-        logging.info(
-            f'rippled stopped after {end - start:.1f} seconds with code {code}'
-        )
-
-
-async def sync(
-        port,
-        *,
-        duration=DEFAULT_SYNC_DURATION,
-        interval=DEFAULT_POLL_INTERVAL,
-):
-    """Poll rippled on an interval until it has been synced for a duration."""
-    start = time.perf_counter()
-    while (time.perf_counter() - start) < duration:
-        await asyncio.sleep(interval)
-
-        request = urllib.request.Request(
-            f'http://127.0.0.1:{port}',
-            data=json.dumps({
-                'method': 'server_state'
-            }).encode(),
-            headers={'Content-Type': 'application/json'},
-        )
-        with urllib.request.urlopen(request) as response:
-            try:
-                body = json.loads(response.read())
-            except urllib.error.HTTPError as cause:
-                logging.warning(f'server_state returned not JSON: {cause}')
-                start = time.perf_counter()
-                continue
-
-        try:
-            state = body['result']['state']['server_state']
-        except KeyError as cause:
-            logging.warning(f'server_state response missing key: {cause.key}')
-            start = time.perf_counter()
-            continue
-        logging.info(f'server_state: {state}')
-        if state not in SYNC_STATES:
-            # Require a contiguous sync state.
-            start = time.perf_counter()
-
-
-async def loop(test,
-               *,
-               exe=DEFAULT_EXE,
-               config_file=DEFAULT_CONFIGURATION_FILE):
-    """
-    Start-test-stop rippled in an infinite loop.
-
-    Moves log to a different file after each iteration.
-    """
-    log_file = find_log_file(config_file)
-    id = 0
-    while True:
-        logging.info(f'iteration: {id}')
-        async with rippled(exe, config_file) as process:
-            start = time.perf_counter()
-            exited = asyncio.create_task(process.wait())
-            tested = asyncio.create_task(test())
-            # Try to sync as long as the process is running.
-            done, pending = await asyncio.wait(
-                {exited, tested},
-                return_when=asyncio.FIRST_COMPLETED,
-            )
-            if done == {exited}:
-                code = exited.result()
-                logging.warning(
-                    f'server halted for unknown reason with code {code}')
-            else:
-                assert done == {tested}
-                assert tested.exception() is None
-            end = time.perf_counter()
-            logging.info(f'synced after {end - start:.0f} seconds')
-        os.replace(log_file, f'debug.{id}.log')
-        id += 1
-
-
-logging.basicConfig(
-    format='%(asctime)s %(levelname)-8s %(message)s',
-    level=logging.INFO,
-    datefmt='%Y-%m-%d %H:%M:%S',
-)
-
-parser = argparse.ArgumentParser(
-    formatter_class=argparse.ArgumentDefaultsHelpFormatter)
-parser.add_argument(
-    'rippled',
-    type=Path,
-    nargs='?',
-    default=DEFAULT_EXE,
-    help='Path to rippled.',
-)
-parser.add_argument(
-    '--conf',
-    type=Path,
-    default=DEFAULT_CONFIGURATION_FILE,
-    help='Path to configuration file.',
-)
-parser.add_argument(
-    '--duration',
-    type=int,
-    default=DEFAULT_SYNC_DURATION,
-    help='Number of contiguous seconds required in a synchronized state.',
-)
-parser.add_argument(
-    '--interval',
-    type=int,
-    default=DEFAULT_POLL_INTERVAL,
-    help='Number of seconds to wait between polls of state.',
-)
-args = parser.parse_args()
-
-port = find_http_port(args.conf)
-
-
-def test():
-    return sync(port, duration=args.duration, interval=args.interval)
-
-
-try:
-    asyncio.run(loop(test, exe=args.rippled, config_file=args.conf))
-except KeyboardInterrupt:
-    # Squelch the message. This is a normal mode of exit.
-    pass
diff --git a/bin/stop-test.js b/bin/stop-test.js
deleted file mode 100644
index 45aa15e27..000000000
--- a/bin/stop-test.js
+++ /dev/null
@@ -1,133 +0,0 @@
-/* -------------------------------- REQUIRES -------------------------------- */
-
-var child = require("child_process");
-var assert = require("assert");
-
-/* --------------------------------- CONFIG --------------------------------- */
-
-if (process.argv[2] == null) {
-  [
-   'Usage: ',
-   '',
-   '  `node bin/stop-test.js i,j [rippled_path] [rippled_conf]`',
-   '',
-   '  Launch rippled and stop it after n seconds for all n in [i, j}',
-   '  For all even values of n launch rippled with `--fg`',
-   '  For values of n where n % 3 == 0 launch rippled with `--fg`\n',
-   'Examples: ',
-   '',
-   '  $ node bin/stop-test.js 5,10',
-   ('  $ node bin/stop-test.js 1,4 ' +
-      'build/clang.debug/rippled $HOME/.confs/rippled.cfg')
-   ]
-      .forEach(function(l){console.log(l)});
-
-  process.exit();
-} else {
-  var testRange = process.argv[2].split(',').map(Number);
-  var rippledPath = process.argv[3] || 'build/rippled'
-  var rippledConf = process.argv[4] || 'rippled.cfg'
-}
-
-var options = {
-  env: process.env,
-  stdio: 'ignore' // we could dump the child io when it fails abnormally
-};
-
-// default args
-var conf_args = ['--conf='+rippledConf];
-var start_args  = conf_args.concat([/*'--net'*/])
-var stop_args = conf_args.concat(['stop']);
-
-/* --------------------------------- HELPERS -------------------------------- */
-
-function start(args) {
-    return child.spawn(rippledPath, args, options);
-}
-function stop(rippled) { child.execFile(rippledPath, stop_args, options)}
-function secs_l8r(ms, f) {setTimeout(f, ms * 1000); }
-
-function show_results_and_exit(results) {
-  console.log(JSON.stringify(results, undefined, 2));
-  process.exit();
-}
-
-var timeTakes = function (range) {
-  function sumRange(n) {return (n+1) * n /2}
-  var ret = sumRange(range[1]);
-  if (range[0] > 1) {
-    ret = ret - sumRange(range[0] - 1)
-  }
-  var stopping = (range[1] - range[0]) * 0.5;
-  return ret + stopping;
-}
-
-/* ---------------------------------- TEST ---------------------------------- */
-
-console.log("Test will take ~%s seconds", timeTakes(testRange));
-
-(function oneTest(n /* seconds */, results) {
-  if (n >= testRange[1]) {
-    // show_results_and_exit(results);
-    console.log(JSON.stringify(results, undefined, 2));
-    oneTest(testRange[0], []);
-    return;
-  }
-
-  var args = start_args;
-  if (n % 2 == 0) {args = args.concat(['--fg'])}
-  if (n % 3 == 0) {args = args.concat(['--net'])}
-
-  var result = {args: args, alive_for: n};
-  results.push(result);
-
-  console.log("\nLaunching `%s` with `%s` for %d seconds",
-                rippledPath, JSON.stringify(args), n);
-
-  rippled = start(args);
-  console.log("Rippled pid: %d", rippled.pid);
-
-  // defaults
-  var b4StopSent = false;
-  var stopSent = false;
-  var stop_took = null;
-
-  rippled.once('exit', function(){
-    if (!stopSent && !b4StopSent) {
-      console.warn('\nRippled exited itself b4 stop issued');
-      process.exit();
-    };
-
-    // The io handles close AFTER exit, may have implications for
-    // `stdio:'inherit'` option to `child.spawn`.
-    rippled.once('close', function() {
-      result.stop_took = (+new Date() - stop_took) / 1000; // seconds
-      console.log("Stopping after %d seconds took %s seconds",
-                   n, result.stop_took);
-      oneTest(n+1, results);
-    });
-  });
-
-  secs_l8r(n, function(){
-    console.log("Stopping rippled after %d seconds", n);
-
-    // possible race here ?
-    // seems highly unlikely, but I was having issues at one point
-    b4StopSent=true;
-    stop_took = (+new Date());
-    // when does `exit` actually get sent?
-    stop();
-    stopSent=true;
-
-    // Sometimes we want to attach with a debugger.
-    if (process.env.ABORT_TESTS_ON_STALL != null) {
-      // We wait 30 seconds, and if it hasn't stopped, we abort the process
-      secs_l8r(30, function() {
-        if (result.stop_took == null) {
-          console.log("rippled has stalled");
-          process.exit();
-        };
-      });
-    }
-  })
-}(testRange[0], []));
\ No newline at end of file
diff --git a/bin/update_binformat.js b/bin/update_binformat.js
deleted file mode 100644
index 7987f72c8..000000000
--- a/bin/update_binformat.js
+++ /dev/null
@@ -1,119 +0,0 @@
-/**
- * bin/update_bintypes.js
- *
- * This unholy abomination of a script generates the JavaScript file
- * src/js/bintypes.js from various parts of the C++ source code.
- *
- * This should *NOT* be part of any automatic build process unless the C++
- * source data are brought into a more easily parseable format. Until then,
- * simply run this script manually and fix as needed.
- */
-
-// XXX: Process LedgerFormats.(h|cpp) as well.
-
-var filenameProto = __dirname + '/../src/cpp/ripple/SerializeProto.h',
-    filenameTxFormatsH = __dirname + '/../src/cpp/ripple/TransactionFormats.h',
-    filenameTxFormats = __dirname + '/../src/cpp/ripple/TransactionFormats.cpp';
-
-var fs = require('fs');
-
-var output = [];
-
-// Stage 1: Get the field types and codes from SerializeProto.h
-var types = {},
-    fields = {};
-String(fs.readFileSync(filenameProto)).split('\n').forEach(function (line) {
-  line = line.replace(/^\s+|\s+$/g, '').replace(/\s+/g, '');
-  if (!line.length || line.slice(0, 2) === '//' || line.slice(-1) !== ')') return;
-
-  var tmp = line.slice(0, -1).split('('),
-      type = tmp[0],
-      opts = tmp[1].split(',');
-
-  if (type === 'TYPE') types[opts[1]] = [opts[0], +opts[2]];
-  else if (type === 'FIELD') fields[opts[0]] = [types[opts[1]][0], +opts[2]];
-});
-
-output.push('var ST = require("./serializedtypes");');
-output.push('');
-output.push('var REQUIRED = exports.REQUIRED = 0,');
-output.push('    OPTIONAL = exports.OPTIONAL = 1,');
-output.push('    DEFAULT  = exports.DEFAULT  = 2;');
-output.push('');
-
-function pad(s, n) { while (s.length < n) s += ' '; return s; }
-function padl(s, n) { while (s.length < n) s = ' '+s; return s; }
-
-Object.keys(types).forEach(function (type) {
-  output.push(pad('ST.'+types[type][0]+'.id', 25) + ' = '+types[type][1]+';');
-});
-output.push('');
-
-// Stage 2: Get the transaction type IDs from TransactionFormats.h
-var ttConsts = {};
-String(fs.readFileSync(filenameTxFormatsH)).split('\n').forEach(function (line) {
-  var regex = /tt([A-Z_]+)\s+=\s+([0-9-]+)/;
-  var match = line.match(regex);
-  if (match) ttConsts[match[1]] = +match[2];
-});
-
-// Stage 3: Get the transaction formats from TransactionFormats.cpp
-var base = [],
-    sections = [],
-    current = base;
-String(fs.readFileSync(filenameTxFormats)).split('\n').forEach(function (line) {
-  line = line.replace(/^\s+|\s+$/g, '').replace(/\s+/g, '');
-
-  var d_regex = /DECLARE_TF\(([A-Za-z]+),tt([A-Z_]+)/;
-  var d_match = line.match(d_regex);
-
-  var s_regex = /SOElement\(sf([a-z]+),SOE_(REQUIRED|OPTIONAL|DEFAULT)/i;
-  var s_match = line.match(s_regex);
-
-  if (d_match) sections.push(current = [d_match[1], ttConsts[d_match[2]]]);
-  else if (s_match) current.push([s_match[1], s_match[2]]);
-});
-
-function removeFinalComma(arr) {
-  arr[arr.length-1] = arr[arr.length-1].slice(0, -1);
-}
-
-output.push('var base = [');
-base.forEach(function (field) {
-  var spec = fields[field[0]];
-  output.push('  [ '+
-              pad("'"+field[0]+"'", 21)+', '+
-              pad(field[1], 8)+', '+
-              padl(""+spec[1], 2)+', '+
-              'ST.'+pad(spec[0], 3)+
-              ' ],');
-});
-removeFinalComma(output);
-output.push('];');
-output.push('');
-
-
-output.push('exports.tx = {');
-sections.forEach(function (section) {
-  var name = section.shift(),
-      ttid = section.shift();
-
-  output.push('  '+name+': ['+ttid+'].concat(base, [');
-  section.forEach(function (field) {
-    var spec = fields[field[0]];
-    output.push('    [ '+
-                pad("'"+field[0]+"'", 21)+', '+
-                pad(field[1], 8)+', '+
-                padl(""+spec[1], 2)+', '+
-                'ST.'+pad(spec[0], 3)+
-                ' ],');
-  });
-  removeFinalComma(output);
-  output.push('  ]),');
-});
-removeFinalComma(output);
-output.push('};');
-output.push('');
-
-console.log(output.join('\n'));
-
diff --git a/cfg/genesis.json b/cfg/genesis.json
index 45eda37ed..fe7bdb445 100644
--- a/cfg/genesis.json
+++ b/cfg/genesis.json
@@ -4,13 +4,13 @@
     "accountState": [
       {
         "Account": "rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh",
-        "Balance": "100000000000000000",
+        "Balance": "100000000000000",
         "Flags": 0,
         "LedgerEntryType": "AccountRoot",
         "OwnerCount": 0,
         "PreviousTxnID": "A92EF82C3C68F771927E3892A2F708F12CBD492EF68A860F042E4053C8EC6C8D",
-        "PreviousTxnLgrSeq": 3,
-        "Sequence": 4,
+        "PreviousTxnLgrSeq": 0,
+        "Sequence": 1,
         "index": "2B6AC232AA4C4BE41BF49D2459FA4A0347E1B543A4C92FCEE0821C0201E2E9A8"
       },
       {
@@ -31,6 +31,7 @@
           "C4483A1896170C66C098DEA5B0E024309C60DC960DE5F01CD7AF986AA3D9AD37",
           "8F81B066ED20DAECA20DF57187767685EEF3980B228E0667A650BAF24426D3B4",
           "621A0B264970359869E3C0363A899909AAB7A887C8B73519E4ECF952D33258A8",
+          "30CD365592B8EE40489BA01AE2F7555CAC9C983145871DC82A42A31CF5BAE7D9",
           "89308AF3B8B10B7192C4E613E1D2E4D9BA64B2EE2D5232402AE82A6A7220D953",
           "00C1FC4A53E60AB02C864641002B3172F38677E29C26C5406685179B37E1EDAC",
           "25BA44241B3BD880770BFA4DA21C7180576831855368CBEC6A3154FDE4A7676E",
@@ -44,7 +45,7 @@
           "98DECF327BF79997AEC178323AD51A830E457BFC6D454DAF3E46E5EC42DC619F",
           "B2A4DB846F0891BF2C76AB2F2ACC8F5B4EC64437135C6E56F3F859DE5FFD5856",
           "32A122F1352A4C7B3A6D790362CC34749C5E57FCE896377BFDC6CCD14F6CD627",
-          "F1ED6B4A411D8B872E65B9DCB4C8B100375B0DD3D62D07192E011D6D7F339013",
+          "DF8B4536989BDACE3F934F29423848B9F1D76D09BE6A1FCFE7E7F06AA26ABEAD",
           "75A7E01C505DD5A179DFE3E000A9B6F1EDDEB55A12F95579A23E15B15DC8BE5A",
           "47C3002ABA31628447E8E9A8B315FAA935CE30183F9A9B86845E469CA2CDC3DF",
           "93E516234E35E08CA689FA33A6D38E103881F8DCB53023F728C307AA89D515A7",
@@ -55,34 +56,180 @@
           "42F8B586B357ABBAAAA1C733C3E7D3B75761395340D0CDF600179E8737E22478",
           "919857E4B902A20216E4819B9BD9FD1FD19A66ECF63151C18A4C48C873DB9578",
           "ECF412BE0964EC2E71DCF807EEEA6EA8470D3DB15173D46F28AB6E234860AC32",
+          "F5751842D26FC057B92CAA435ABF4F1428C2BCC4180D18775ADE92CB2643BBA3",
+          "D686F2538F410C9D0D856788E98E3579595DAF7B38D38887F81ECAC934B06040",
           "86E83A7D2ECE3AD5FA87AB2195AE015C950469ABF0B72EAACED318F74886AE90",
           "3C43D9A973AA4443EF3FC38E42DD306160FBFFDAB901CD8BAA15D09F2597EB87",
           "0285B7E5E08E1A8E4C15636F0591D87F73CB6A7B6452A932AD72BBC8E5D1CBE3",
+          "6E739F4F8B07BED29FC9FF440DA3C301CD14A180DF45819F658FEC2F7DE31427",
           "36799EA497B1369B170805C078AEFE6188345F9B3E324C21E9CA3FF574E3C3D6"
         ],
         "Flags": 0,
         "LedgerEntryType": "Amendments",
         "index": "7DB0788C020F02780A673DC74757F23823FA3014C1866E72CC4CD8B226CD6EF4"
+      },
+      {
+        "BaseFee": "A",
+        "Flags": 0,
+        "LedgerEntryType": "FeeSettings",
+        "ReferenceFeeUnits": 10,
+        "ReserveBase": 1000000,
+        "ReserveIncrement": 200000,
+        "AccountCount": 0,
+        "XahauActivationLgrSeq": 0,
+        "index": "4BC50C9B0D8515D3EAAE1E74B29A95804346C491EE1A95BF25E4AAB854A6A651"
+      },
+      {
+        "CreateCode": "0061736D0100000001610E60017F017E60027F7F017F6000017E60037F7F7E017E60037F7F7F017E60027F7F017E60047F7F7F7F017E60037E7F7F017E60017E017E60037E7E7F017E60097F7F7F7F7F7F7F7F7F017E60027F7E017E60027E7E017E60057F7F7F7F7F017E02DE031D03656E760C6574786E5F72657365727665000003656E76025F67000103656E76096F74786E5F74797065000203656E7606616363657074000303656E760A6F74786E5F6669656C64000403656E760C686F6F6B5F6163636F756E74000503656E76057374617465000603656E7608726F6C6C6261636B000303656E7609666C6F61745F696E74000703656E760A666C6F61745F7369676E000803656E7609666C6F61745F6F6E65000203656E760D666C6F61745F636F6D70617265000903656E760B7574696C5F6B65796C6574000A03656E7608736C6F745F736574000403656E760D736C6F745F7375626669656C64000403656E7604736C6F74000403656E76106C65646765725F6C6173745F74696D65000203656E760974726163655F6E756D000303656E760A6C65646765725F736571000203656E7609666C6F61745F736574000B03656E760C666C6F61745F646976696465000C03656E760E666C6F61745F6D756C7469706C79000C03656E76096F74786E5F736C6F74000003656E760A736C6F745F666C6F6174000003656E760A736C6F745F636F756E74000003656E76057472616365000D03656E760C6574786E5F64657461696C73000503656E760D6574786E5F6665655F62617365000503656E7604656D697400060302010005030100020608017F0141A093040B07080104686F6F6B001D0AEB8E0001E78E0002027F067E230041E03E6B22002400410110001A4101410110011A100242E200520440419E10411E42EA0010031A0B200041C03E6A41144181802010041A200041A03E6A411410051A20002903A03E20002903C03E510440024020002802B03E20002802D03E4720002903A83E20002903C83E52720D0041BC10411D42F60010031A0B0B200042D5AA81AAE2F4F5E5D3003703983E2000428080AAE3FA95CF84D6003703903E200041983E6A410841D910410210061A200041903E6A410841DC10410210061A024020002903983E420159044020002903903E22034200550D010B41DF10412C42820110071A20002903903E21030B0240200341004100100822034200590440024020002903983E10094200520D0020002903983E100A4104100B4200520D0020002903903E100A4102100B500D020B0B418B1141CD0042890110071A0B200041E03D6A220241224103200041C03E6A41144100410041004100100C1A200241224101100D1A410141E4800C4102100E420252044041D811412142940110031A0B410141E380084103100E1A410141E480084104100E1A4101418280184105100E1A410141E280084106100E1A410041004106100F2104101020047D22042003530440418E10418E102D0000200320047D220342C0843D7F420A81A76A3A0000418F10418F102D0000200342A08D067F420A81A76A3A00004190104190102D000020034290CE007F420A81A76A3A00004191104191102D0000200342E8077F420A81A76A3A00004192104192102D0000200342E4007F420A81A76A3A00004193104193102D00002003420A7F2204420A81A76A3A00004194104194102D000020032004420A7E7DA76A3A0000418010411E42AD0110071A0B410041004102100F2103410041004103100F2104410041004104100F2105410041004105100F210641F911410B200310111A4185124105200410111A418B124104200510111A2004420159410020054200551B450440419012411B42BC0110071A0B101220047D22044200570440419012411B42C40110071A0B1012210741AB124103200642C0FFFFFFFFFFFFFF1F83220642C0843D80220810111A41F911410B200310111A41F911410B2003200720057D220520087E420020054200551B4200200642BF843D561B7C220310111A41002003101322034200570440419012411B42D90110071A0B41002004101322044200570440419012411B42DC0110071A0B200320041014210341AF12410A20002903983E20031015220310111A200341064101100822034214802104410A1016420A520440419012411B42EA0110071A0B410A41888018410B100E420B520440419012411B42ED0110071A0B410B101722054201590440200541064101100820037C21030B41AA0F20033C000041A90F20034208883C000041A80F20034210883C000041A70F20034218883C000041A60F20034220883C000041A50F20034228883C000041A40F20034230883C000041A30F2003423888A7413F7141C000723A000041F10941C00F29030037030041D90941A80F29030037030041D10941A00F29030037030041DE0941144181802010041A41AA0F20043C000041A90F20044208883C000041A80F20044210883C000041A70F20044218883C000041A60F20044220883C000041A50F20044228883C000041A40F20044230883C000041A30F2004423888A741C000723A000041F309210141D00F41224101100D42015104400240410141DF803C4101100E4201520D00200041306A41843C4101100F4201530D00200041003A00004101101822034280012003428001541B2104200041D7006A2101420021030340419D8280807841810110011A200320045A45044041BA12410420014114410110191A200041012001411410064201510440024020002D0000220241144B0D00200041C03C6A20024103746A42013703000B0B2001413C6A2101200342017C21030C010B0B41F309210141002102034041A882808078411510011A200020023A002F200241FF017141134B2001419B0F4F720D01200041C03C6A20002D002F22024103746A29030050450440200141A00F290300370300200141A80F290300370308200141C00F290300370320200141226A20012001410D6A41142000412F6A410110064214511B210120002D002F21020B200241016A21020C000B000B0B200141F1013A000041DB0841F400101A1A419A08418008200141FF076B2202101B22034238862003423888423F8384200342288642808080808080C0FF0083842003421886428080808080E03F838420034208864280808080F01F8384200342088842808080F80F83842003421888428080FC07838420034228884280FE03838442C000843703004198081012220442057C22033C0000419208200442017C22043C000041970820034208883C000041960820034210883C000041950820034218883C000041910820044208883C000041900820044210883C0000418F0820044218883C000041BE1241064180082002410110191A41C412410B200041204180082002101C220310111A2003427F57044041D012411E42D80210071A0B41EE12412942DB0210031A200041E03E6A240020030B0BBB0407004180080B2412006022800000002400000000201A00000000201B0000000068400000000000000073210041C5080B168114B5F762798A53D543A014CAF8B297CFF8F2F937E80041CF090B02F0600041A00F0B0DE06061000000000000000083140041C10F0B31E10000000000004D43000000000000005261E32E7A24A238F1C619D5F9DDCC41A94B33B66C0163F7EFCC8A19C9FD6F28DC004180100B1D596F75206D75737420776169742030303030303030207365636F6E647300419E100BF8025265776172643A2050617373696E67206E6F6E2D636C61696D2074786E005265776172643A2050617373696E67206F7574676F696E672074786E005252005244005265776172643A2052657761726473206172652064697361626C656420627920676F7665726E616E63652E005265776172643A205265776172647320696E636F72726563746C7920636F6E6669677572656420627920676F7665726E616E6365206F7220756E7265636F76657261626C65206572726F722E005265776172643A2050617373696E67207265776172642073657475702074786E00616363756D756C61746F72006669727374006C617374005265776172643A20417373657274696F6E206661696C7572652E0062616C0078666C5F7265776172640061763A00656D69743A00656D69745F726573756C74005265776172643A20456D6974206C6F6F706261636B206661696C65642E005265776172643A20456D6974746564207265776172642074786E207375636365737366756C6C792E",
+        "Fee": "8620",
+        "Flags": 0,
+        "HookApiVersion": 0,
+        "HookHash": "610F33B8EBF7EC795F822A454FB852156AEFE50BE0CB8326338A81CD74801864",
+        "HookNamespace": "0000000000000000000000000000000000000000000000000000000000000000",
+        "HookOn": "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFBFFFFFFFFFFFFFFFFFFBFFFFF",
+        "HookParameters": [],
+        "HookSetTxnID": "6B4BE82F808A27CF7AFA27394D98BF5A9AC2AF33CF02B3F6274A8A04B1A3762F",
+        "LedgerEntryType": "HookDefinition",
+        "ReferenceCount": "1",
+        "index": "052BCC5B9525C99521AFD9E1A4AAFCE1A517C648560702CD3120EC4BC3A1E78F"
+      },
+      {
+        "CreateCode": "0061736D0100000001420960027F7F017F60017F017E60047F7F7F7F017E60037F7F7E017E6000017E60037F7F7F017E60027F7F017E60057F7F7F7F7F017E60097F7F7F7F7F7F7F7F7F017E02DE021603656E76025F67000003656E760C6574786E5F72657365727665000103656E760A6F74786E5F706172616D000203656E760974726163655F6E756D000303656E76096F74786E5F74797065000403656E7606616363657074000303656E760A6F74786E5F6669656C64000503656E760C686F6F6B5F6163636F756E74000603656E76057472616365000703656E76057374617465000203656E760A686F6F6B5F706172616D000203656E7608726F6C6C6261636B000303656E760973746174655F736574000203656E760A6C65646765725F736571000403656E760C6574786E5F64657461696C73000603656E760D6574786E5F6665655F62617365000603656E7604656D6974000203656E760B7574696C5F6B65796C6574000803656E7608736C6F745F736574000503656E760D736C6F745F7375626669656C64000503656E760D736C6F745F7375626172726179000503656E7604736C6F7400050302010105030100020608017F0141C09B040B07080104686F6F6B00160AE9AE0001E5AE0003097F057E027C230041B00A6B220024004101410110001A410110011A20004180016A410241940841011002420251044041960841062000310081012000310080014208868410031A0B100442E300520440419C0841CC0042810110051A0B200041900A6A410C72220241144181802010061A200041F0096A410C722208411410071A024020002903FC09220B200029039C0A510440200028028C0A20002802AC0A4720002903840A20002903A40A52720D0120004180016A41144183802010064214510440024020002903FC0920002903800151044020002903840A200029038801510440200028028C0A200028029001460D020B0B41E8084121428F0110051A0B0B20002903FC09210B0B0B027F418008290300200B510440200028028C0A4190082802004720002903840A4188082903005272450440410121034189090C020B0B41BC090B413341004100410010081A4100410041EF0941021009220B427B51044020004190096A410141F2094103100A427F57044041F609413A42A301100B1A0B41B00A410320003100900910031A20004190096A410141EF094102100C420057044041B40A411A42A701100B1A0B41CE0A410C20002D0090092206AD42FF0183220B10031A20002D009009220145044041DB0A412E42AE01100B1A20002D00900921010B200141FF017141154F044041890B413D42B101100B1A0B200304400240200041B0096A410841C60B4103100A427F57044041CA0B413942B601100B1A0B200041206A410841830C4103100A427F57044041870C413742B901100B1A0B200029032050044041BE0C412E42BC01100B1A0B200041B0096A410841EC0C4102100C420057044041B40A411A42BF01100B1A0B200041206A410841EF0C4102100C4200550D0041B40A411A42C201100B1A0B0B41002101034041C581808078411510001A0240200020013A000020002D0000220120064F0D00200020013A0052200041C9A6013B005020004180016A4114200041D0006A4103100A421452044041F20C413F42CA01100B1A0B41B10D410820004180016A22014114410110081A2001411420004101100C421452044041B40A411A42D001100B1A0B2000410120004180016A4114100C420152044041B40A411A42D401100B1A0B20002D000041016A21010C010B0B41B90D412A42D70110051A0B41CE0A410C200B10031A41004100200241141009427F57044041E30D41C50042E301100B1A0B200041EE096A410241A80E41011002210A20002D00EE092102200020002D00EF0922013A00ED090240200A42025104400240200241C8006B2206410B4B0D004101200674418118710D020B0B41AA0E413D42F201100B1A20002D00ED0921010B0240200241D300460440200141144F044041E70E413042F601100B1A41002106410121040C020B0B200241C8004604402001410B4F044041970F412F42F901100B1A41012104410121060C020B0B200241C80046210641012104200241D200470D0041002104200141C40046200141D20046720D0041C60F413C42FC01100B1A0B200041013A00EC0920034504400240200041EC096A410141821041011002420152044041841041C100428402100B1A0B41C510410120003100EC0910031A20002D00EC09220141016B41FF017141024F044041C7104130428902100B1A20002D00EC0921010B20014102472004720D0041F71041C500428D02100B1A0B0B4120412041144108200241D300461B20061B22016B220541FC01712207200041B0096A6A2204200141BC11410110022001AD220A52044041BE1141C300429B02100B1A0B41002106024020002903B00950044020002903B80942005220002903C009420052720D0120002903C8095021060B0B4181124110200041B0096A22094138410110081A419112410F2005AD42FF018310031A41A012410C200A10031A41AC12410C20042001410110081A200020023A00910A200041D6003A00900A200020002D00ED093A00920A200020002D00EC093A00930A20004190096A220520076A2001200041900A6A41201009210C41B812411420054120410110081A41CC12410B20094120410110081A41D7124114200C10031A41EB12410B200A10031A200A200C510440024020002903900920002903B0095220002903980920002903B809527220002903A00920002903C0095220002903A80920002903C8095272720D0041F612413F42BB0210051A0B0B20042001200041900A6A4120100C200A52044041B40A411A42C202100B1A0B41002105200C42015904400240200020023A009109200041C3003A009009200041003A008001200020002D00ED093A009209200020002D00EC093A00930920004180016A410120004190096A41201009420152044041B40A411A42CF02100B1A0B20002D008001220545044041B40A411A42D002100B1A20002D00800121050B2000200541016B22073A00800120004180016A4100200741FF017122071B200741004720004190096A4120100C427F550D0041B40A411A42D402100B1A0B0B200041003A008F0920002903B009210A200041C3003A00B009200020002D00EC093A00B309200020002D00ED093A00B209200020023A00B1092000418F096A22074101200041B0096A2209412010091A200020002D008F0941016A3A008F092007410120094120100C420057044041B40A411A42E402100B1A0B2000200A3703B00941B513410F2006AD220E10031A41C5134105200031008F0910031A41CE0A410C200B10031A41CB134106200041EE096A4102410110081A200BB9220F4452B81E85EB51E03FA222109944000000000000E04363047E2010B005428080808080808080807F0B210A2003417F7320002D00EC09410247712107200F449A9999999999E93FA2220F9944000000000000E04363047E200FB005428080808080808080807F0B210C024020070440200031008F09210C200A4202200A4202551B220D2005AD42FF01835704400240200C200D590D00418A14412E41004100410010081A4201210A0C030B0B4200210A200C200D590D0141B8144135428F0310051A054200210A200031008F09200C4202200C4202551B200B200241D300461B590D0141D113413942870310051A0B0B41ED14411141FF144110410010081A20002D00EC09410147200372450440100D210C2000419E016A4280808080B08EC0C014370000200041E880013B009901200041A0363B009301200041A0343B008D012000410036008901200041243A00880120004292808C938210370380012000410036009B012000200CA7220541056A22033A009801200020034108763A009701200020034110763A009601200020034118763A0095012000200541016A22033A009201200020034108763A009101200020034110763A009001200020034118763A008F01200020002903FC093703A60120004183293B01BA01200020002903840A3703AE012000200028028C0A3602B60120004180082903003703BC0120004188082903003703C40120004190082802003602CC01200041D0016A2203418004100EA720036A220341F0B208360208200342F0A780BF818EC680D4003703002003410B6A20023A0000200341166A20013A0000200341156A41193A00002003410D6A42E1C1DF8087A380ABF0003703002003410C6A20002D00ED093A0000200341176A2103200A5004402003200429030037030020032004290308370308200341106A2004290310370300200341186A20042903183703000B200120036A220341E1E3033B0100200320004180016A22036B2105200020032005100F220A3C00A101200041E8003A0099012000200A4208883C00A0012000200A4210883C009F012000200A4218883C009E012000200A4220883C009D012000200A4228883C009C012000200A4230883C009B012000200A423888A7413F7141C000723A009A01418F15412220032005410110081A41B1154118200041206A4120200320051010220A10031A200A422051044041C915412A42E40310051A0B41F315412542E603100B1A0B02400240200241D2006B220341014D044020034101460D01419816410620042001200041EE096A4102100C220A10031A200A420057044041B40A411A42F003100B1A0B20002D00ED0941D200460440419F16412942F20310051A0B41C816412A42F40310051A05200241C800470D020B200041D0006A22014122410120084114410041004100410010111A20014122410510121A4105418B803C410610131A410620002D00ED0941071014420751044002404107419F8014410810134208520D0020004180016A412041081015422052044041B40A411A428B04100B1A0B20002903800120002903B0095220002903880120002903B809527220002903900120002903C0095220002903980120002903C8095272720D0041F216413D428F0410051A0B0B20064504400240200041D0006A220141224118200041B0096A4120410041004100410010111A20014122410910124209510D0041AF1741C300429A04100B1A0B0B200041206A20002D00ED094102746A417F200041B0096A20061B360200100D210A2000411410071A2000419E016A4280808080B08EC0C014370000200041E880013B009901200041A0363B009301200041A0343B008D012000410036008901200041243A0088012000429280D8908210370380012000410036009B012000200AA7220241056A22013A009801200020014108763A009701200020014110763A009601200020014118763A0095012000200241016A22013A009201200020014108763A009101200020014110763A009001200020014118763A008F01200020002903003703A601200020002903083703AE01200020002802103602B601200041BA016A220141C607100EA720016A220141FBDD033B0000200141026A210120002802202202044020014122360000200141046A41013A0000200141066A2104200141056A21012002417F47047F200141D03E3B0000200120022903003703022001200229030837030A200120022903103703122001200229031837031A200441216A05200141FB003B0000200441016A0B21010B200141E1013A000020002802242102200141EE013A00012002047F200141013A0006200141223600022002417F47047F200141D03E3B000720012002290300370309200120022903083703112001200229031037031920012002290318370321200141296A05200141FB003B0007200141096A0B05200141026A0B220141E1013A000020002802282102200141EE013A00012002047F200141013A0006200141223600022002417F47047F200141D03E3B000720012002290300370309200120022903083703112001200229031037031920012002290318370321200141296A05200141FB003B0007200141096A0B05200141026A0B220141E1013A0000200028022C2102200141EE013A00012002047F200141013A0006200141223600022002417F47047F200141D03E3B000720012002290300370309200120022903083703112001200229031037031920012002290318370321200141296A05200141FB003B0007200141096A0B05200141026A0B220141E1013A000020002802302102200141EE013A00012002047F200141013A0006200141223600022002417F47047F200141D03E3B000720012002290300370309200120022903083703112001200229031037031920012002290318370321200141296A05200141FB003B0007200141096A0B05200141026A0B220141E1013A000020002802342102200141EE013A00012002047F200141013A0006200141223600022002417F47047F200141D03E3B000720012002290300370309200120022903083703112001200229031037031920012002290318370321200141296A05200141FB003B0007200141096A0B05200141026A0B220141E1013A000020002802382102200141EE013A00012002047F200141013A0006200141223600022002417F47047F200141D03E3B000720012002290300370309200120022903083703112001200229031037031920012002290318370321200141296A05200141FB003B0007200141096A0B05200141026A0B220141E1013A0000200028023C2102200141EE013A00012002047F200141013A0006200141223600022002417F47047F200141D03E3B000720012002290300370309200120022903083703112001200229031037031920012002290318370321200141296A05200141FB003B0007200141096A0B05200141026A0B220141E1013A000020002802402102200141EE013A00012002047F200141013A0006200141223600022002417F47047F200141D03E3B000720012002290300370309200120022903083703112001200229031037031920012002290318370321200141296A05200141FB003B0007200141096A0B05200141026A0B220141E1013A000020002802442102200141EE013A00012002047F200141013A0006200141223600022002417F47047F200141D03E3B000720012002290300370309200120022903083703112001200229031037031920012002290318370321200141296A05200141FB003B0007200141096A0B05200141026A0B220141E1E3033B0000200120004180016A22016B41026A2102200020012002100F220A3C00A101200041E8003A0099012000200A4208883C00A0012000200A4210883C009F012000200A4218883C009E012000200A4220883C009D012000200A4228883C009C012000200A4230883C009B012000200A423888A7413F7141C000723A009A0141F217410B20012002410110081A41FD17410B20004120200120021010220A10031A200A4220520440418918412F42B504100B1A0B41B818410F20004120410110081A41C718411B42B80410051A0B20004180016A410C7222034114200041ED096A41011009220A421451044041E218411420004180016A4120410110081A0B200041B0096A410C722102200029038C0120002903BC095104400240200028029C0120002802CC094720002903940120002903C40952720D0041F61841CD0042C70410051A0B0B41004100200241141009220C423F88A74101732101200C420059044041C319413041004100410010081A0B2001200A4214522205410274200641017472722204AD210D200441036B220841044D044002400240200841016B0E03010101000B41B40A411A42D304100B1A0B0B41F3194102200D10031A41F61941022005AD10031A41F8194102200E10031A41FA1941022001AD10031A0240200441016B220141034D044002400240200141026B0E020300010B200B42017C210B0C020B200B42017D210B0B0B41FC194110200A421451AD10031A41B513410F200E10031A41CE0A410C200B10031A200B420157044041B40A411A42F504100B1A0B2000200B3C0020200041206A410141EF094102100C420152044041B40A411A42F804100B1A0B200C420059044002402000200C3C002041004100200041206A4101100C5045044041B40A411A428205100B1A0B4100410020024114100C500D0041B40A411A428505100B1A0B0B200A42145104400240200041D6003A008001410121010340418E85808078412110001A024020014120460D00200041D20041C80041D3002001410C4922041B20014102491B3A008101200041C40041FE0141F40120041B20016A20014101461B3A008201200041206A412020004180016A412010094220510440200041C3003A0020200041003A0050200020002F0081013B0021200041D0006A4101200041206A41201009420151044020002D0050220441014B04402000200441016B3A0050200041D0006A4101200041206A4120100C420152044041B40A411A42AB05100B1A0B41AA1A4118200031005010031A0541004100200041206A4120100C5045044041B40A411A42A505100B1A0B418D1A411D200031005010031A0B0B4100410020004180016A4120100C5045044041B40A411A42B105100B1A0B41C21A4113200041206A4120410110081A0B200141016A21010C010B0B41004100200041ED096A4101100C5045044041B40A411A42B805100B1A0B4100410020034114100C500D0041B40A411A42BB05100B1A0B0B2006450440024020024114200041ED096A4101100C421452044041B40A411A42C205100B1A0B200041ED096A410120024114100C4201510D0041B40A411A42C505100B1A0B0B41D51A412242C80510051A0B41F71A412242CC05100B1A200041B00A6A2400200B0B0BA61302004180080B14B5F762798A53D543A014CAF8B297CFF8F2F937E8004194080B841344004442474C4E00476F7665726E616E63653A2050617373696E67206E6F6E2D496E766F6B652074786E2E20486F6F6B4F6E2073686F756C64206265206368616E67656420746F2061766F696420746869732E00476F766572616E63653A2050617373696E67206F7574676F696E672074786E2E00476F7665726E616E63653A205374617274696E6720676F7665726E616E6365206C6F676963206F6E204C31207461626C652E00476F7665726E616E63653A205374617274696E6720676F7665726E616E6365206C6F676963206F6E204C32207461626C652E004D4300494D4300476F7665726E616E63653A20496E697469616C204D656D62657220436F756E7420506172616D65746572206D697373696E672028494D43292E00696D6300476F7665726E3A20417373657274696F6E206661696C65642E006D656D6265725F636F756E7400476F7665726E616E63653A20496E697469616C204D656D62657220436F756E74206D757374206265203E20302E00476F7665726E616E63653A20496E697469616C204D656D62657220436F756E74206D757374206265203C3D205365617420436F756E7420283230292E0049525200476F7665726E616E63653A20496E697469616C20526577617264205261746520506172616D65746572206D697373696E672028495252292E0049524400476F7665726E616E63653A20496E697469616C205265776172642044656C617920506172616D65746572206D6973732028495244292E00476F7665726E616E63653A20496E697469616C205265776172642044656C6179206D757374206265203E20302E00525200524400476F7665726E616E63653A204F6E65206F72206D6F726520696E697469616C206D656D626572206163636F756E742049442773206973206D697373696E67004D656D6265723A00476F7665726E616E63653A20536574757020636F6D706C65746564207375636365737366756C6C792E00476F7665726E616E63653A20596F7520617265206E6F742063757272656E746C79206120676F7665726E616E6365206D656D6265722061742074686973207461626C652E005400476F7665726E616E63653A2056616C696420544F504943206D75737420626520737065636966696564206173206F74786E20706172616D657465722E00476F7665726E616E63653A2056616C6964207365617420746F70696373206172652030207468726F7567682031392E00476F7665726E616E63653A2056616C696420686F6F6B20746F70696373206172652030207468726F75676820392E00476F7665726E616E63653A2056616C69642072657761726420746F706963732061726520522028726174652920616E642044202864656C6179292E004C00476F7665726E616E63653A204D697373696E67204C20706172616D657465722E205768696368206C617965722061726520796F7520766F74696E6720666F723F006C00476F7665726E616E63653A204C6179657220706172616D65746572206D75737420626520273127206F72202732272E00476F7665726E616E63653A204C32732063616E6E6F7420766F7465206F6E2052522F5244206174204C322C2064696420796F75206D65616E20746F20736574204C3D313F005600476F7665726E616E63653A204D697373696E67206F7220696E636F72726563742073697A65206F6620564F5445206461746120666F7220544F50494320747970652E00746F7069635F646174615F7261773A00746F7069635F70616464696E673A00746F7069635F73697A653A00746F7069635F646174613A0070726576696F75735F746F7069635F6461746100746F7069635F646174610070726576696F75735F746F7069635F73697A6500746F7069635F73697A6500476F7665726E616E63653A20596F757220766F746520697320616C7265616479206361737420746869732077617920666F72207468697320746F7069632E00746F7069635F646174615F7A65726F00766F74657300746F70696300476F7665726E616E63653A20566F7465207265636F72642E204E6F742079657420656E6F75676820766F74657320746F20616374696F6E2E00476F7665726E616E63653A204D616A6F72697479206C6F73742C20756E646F696E67204C3120766F74652E2E2E00476F7665726E616E63653A204E6F742079657420656E6F75676820766F74657320746F20616374696F6E204C3120766F74652E2E0022416374696F6E696E6720766F7465732200416374696F6E696E6720766F74657300476F7665726E616E63653A20456D697474696E6720696E766F6B6520746F204C3100476F7665726E616E63653A20456D697420726573756C7400476F7665726E616E63653A205375636365737366756C6C7920656D6974746564204C3120766F74652E00476F7665726E616E63653A204C3120766F746520656D697373696F6E206661696C65642E00726573756C7400476F7665726E616E63653A205265776172642072617465206368616E676520616374696F6E65642100476F7665726E616E63653A205265776172642064656C6179206368616E676520616374696F6E65642100476F766572616E63653A2054617267657420686F6F6B20697320616C7265616479207468652073616D6520617320616374696F6E656420686F6F6B2E00476F766572616E63653A20486F6F6B204861736820646F65736E2774206578697374206F6E206C6564676572207768696C6520616374696F6E696E6720686F6F6B2E00456D697474656454786E00656D69745F726573756C7400476F7665726E616E63653A20456D6974206661696C656420647572696E6720686F6F6B20616374696F6E696E672E00456D697474656454786E4861736800476F7665726E616E63653A20486F6F6B20616374696F6E65642E0050726576696F75732070726573656E743D3D3A00476F7665726E616E63653A20416374696F6E696E672073656174206368616E67652C20627574207365617420616C726561647920636F6E7461696E7320746865206E6577206D656D6265722E00476F7665726E616E63653A204D6F76696E67206578697374696E67206D656D62657220746F206E657720736561742E006F700045005A004D0070726576696F75735F70726573656E740044656372656D656E7420766F746520636F756E742064656C657465640044656372656D656E7420766F746520636F756E7420746F00566F746520656E7472792064656C6574656400476F7665726E616E63653A20416374696F6E206D656D626572206368616E67652E00476F7665726E616E63653A20496E7465726E616C206C6F676963206572726F722E",
+        "Fee": "9603",
+        "Flags": 0,
+        "HookApiVersion": 0,
+        "HookHash": "78CA3F5BD3D4F7B32A6BEBB3844380A9345C9BA496EFEB30314BDDF405D7B4B3",
+        "HookNamespace": "0000000000000000000000000000000000000000000000000000000000000000",
+        "HookOn": "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF7FFFFFFFFFFFFFFFFFFBFFFFF",
+        "HookParameters": [
+          {
+            "HookParameter": {
+              "HookParameterName": "495252",
+              "HookParameterValue": "80780D25A6D7CB53"
+            }
+          },
+          {
+            "HookParameter": {
+              "HookParameterName": "495244",
+              "HookParameterValue": "0080C6A47E8D0355"
+            }
+          },
+          {
+            "HookParameter": {
+              "HookParameterName": "495300",
+              "HookParameterValue": "AE123A8556F3CF91154711376AFB0F894F832B3D"
+            }
+          },
+          {
+            "HookParameter": {
+              "HookParameterName": "495301",
+              "HookParameterValue": "F51DFC2A09D62CBBA1DFBDD4691DAC96AD98B90F"
+            }
+          },
+          {
+            "HookParameter": {
+              "HookParameterName": "495302",
+              "HookParameterValue": "B389FBCED0AF9DCDFF62900BFAEFA3EB872D8A96"
+            }
+          },
+          {
+            "HookParameter": {
+              "HookParameterName": "495303",
+              "HookParameterValue": "AA266540F7DACC27E264B75ED0A5ED7330BFB614"
+            }
+          },
+          {
+            "HookParameter": {
+              "HookParameterName": "495304",
+              "HookParameterValue": "D91B8EE5C7ABF632469D4C0907C5E40C8B8F79B3"
+            }
+          },
+          {
+            "HookParameter": {
+              "HookParameterName": "494D43",
+              "HookParameterValue": "05"
+            }
+          }
+        ],
+        "HookSetTxnID": "6B4BE82F808A27CF7AFA27394D98BF5A9AC2AF33CF02B3F6274A8A04B1A3762F",
+        "LedgerEntryType": "HookDefinition",
+        "ReferenceCount": "4",
+        "index": "7AF044D5C3658518E1AE02D4597C15B65E5F5C7AC2D1E770EF4DA46058475ED8"
+      },
+      {
+        "Flags": 0,
+        "Indexes": [
+          "469372BEE8814EC52CA2AECB5374AB57A47B53627E3C0E2ACBE3FDC78DBFEC7B"
+        ],
+        "LedgerEntryType": "DirectoryNode",
+        "Owner": "rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh",
+        "RootIndex": "D8120FC732737A2CF2E9968FDF3797A43B457F2A81AA06D2653171A1EA635204",
+        "index": "D8120FC732737A2CF2E9968FDF3797A43B457F2A81AA06D2653171A1EA635204"
+      },
+      {
+        "Account": "rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh",
+        "Flags": 0,
+        "Hooks": [
+          {
+            "Hook": {
+              "HookHash": "78CA3F5BD3D4F7B32A6BEBB3844380A9345C9BA496EFEB30314BDDF405D7B4B3"
+            }
+          },
+          {
+            "Hook": {
+              "HookHash": "610F33B8EBF7EC795F822A454FB852156AEFE50BE0CB8326338A81CD74801864"
+            }
+          },
+          {
+            "Hook": {}
+          },
+          {
+            "Hook": {}
+          },
+          {
+            "Hook": {}
+          },
+          {
+            "Hook": {}
+          },
+          {
+            "Hook": {}
+          },
+          {
+            "Hook": {}
+          },
+          {
+            "Hook": {}
+          },
+          {
+            "Hook": {}
+          }
+        ],
+        "LedgerEntryType": "Hook",
+        "OwnerNode": "0",
+        "PreviousTxnID": "6B4BE82F808A27CF7AFA27394D98BF5A9AC2AF33CF02B3F6274A8A04B1A3762F",
+        "PreviousTxnLgrSeq": 6869678,
+        "index": "469372BEE8814EC52CA2AECB5374AB57A47B53627E3C0E2ACBE3FDC78DBFEC7B"
       }
     ],
     "account_hash": "5DF3A98772FB73E782B8740E87885C6BAD9BA486422E3626DEF968AD2CB2C514",
     "close_flags": 0,
-    "close_time": 733708800,
-    "close_time_human": "2023-Apr-02 00:00:00.000000",
+    "close_time": 0,
+    "close_time_human": "2000-Jan-01 00:00:00.000000",
     "close_time_resolution": 10,
     "closed": true,
     "hash": "56DA0940767AC2F17F0E384F04816002403D0756432B9D503DDA20128A2AAF11",
     "ledger_hash": "56DA0940767AC2F17F0E384F04816002403D0756432B9D503DDA20128A2AAF11",
-    "ledger_index": "5",
-    "parent_close_time": 733708800,
+    "ledger_index": "1",
+    "parent_close_time": 0,
     "parent_hash": "56DA0940767AC2F17F0E384F04816002403D0756432B9D503DDA20128A2AAF11",
-    "seqNum": "5",
-    "totalCoins": "100000000000000000",
-    "total_coins": "100000000000000000",
+    "seqNum": "1",
+    "totalCoins": "100000000000000",
+    "total_coins": "100000000000000",
     "transaction_hash": "9A77D1D1A4B36DA77B9C4DC63FDEB8F821741D157802F9C42A6ED86003D8B4A0",
     "transactions": []
   },
-  "ledger_current_index": 5,
+  "ledger_current_index": 1,
   "status": "success",
   "validated": true
 }
\ No newline at end of file
diff --git a/cfg/initdb.sh b/cfg/initdb.sh
deleted file mode 100755
index 9ca02ed56..000000000
--- a/cfg/initdb.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-
-# Execute this script with a running Postgres server on the current host.
-# It should work with the most generic installation of Postgres,
-# and is necessary for rippled to store data in Postgres.
-
-# usage: sudo -u postgres ./initdb.sh
-psql -c "CREATE USER rippled"
-psql -c "CREATE DATABASE rippled WITH OWNER = rippled"
-
diff --git a/cfg/rippled-standalone.cfg b/cfg/rippled-standalone.cfg
index 8963b027a..79c5c9004 100755
--- a/cfg/rippled-standalone.cfg
+++ b/cfg/rippled-standalone.cfg
@@ -127,13 +127,20 @@ B6B3EEDC0267AB50491FDC450A398AF30DBCD977CECED8BEF2499CAB5DAC19E2 fixRmSmallIncre
 98DECF327BF79997AEC178323AD51A830E457BFC6D454DAF3E46E5EC42DC619F CheckCashMakesTrustLine
 B2A4DB846F0891BF2C76AB2F2ACC8F5B4EC64437135C6E56F3F859DE5FFD5856 ExpandedSignerList
 32A122F1352A4C7B3A6D790362CC34749C5E57FCE896377BFDC6CCD14F6CD627 NonFungibleTokensV1_1
+DF8B4536989BDACE3F934F29423848B9F1D76D09BE6A1FCFE7E7F06AA26ABEAD fixRemoveNFTokenAutoTrustLine
+75A7E01C505DD5A179DFE3E000A9B6F1EDDEB55A12F95579A23E15B15DC8BE5A ImmediateOfferKilled
+47C3002ABA31628447E8E9A8B315FAA935CE30183F9A9B86845E469CA2CDC3DF DisallowIncoming
+93E516234E35E08CA689FA33A6D38E103881F8DCB53023F728C307AA89D515A7 XRPFees
 2E2FB9CF8A44EB80F4694D38AADAE9B8B7ADAFD2F092E10068E61C98C4F092B0 fixUniversalNumber
 73761231F7F3D94EC3D8C63D91BDD0D89045C6F71B917D1925C01253515A6669 fixNonFungibleTokensV1_2
 AE35ABDEFBDE520372B31C957020B34A7A4A9DC3115A69803A44016477C84D6E fixNFTokenRemint
-ECF412BE0964EC2E71DCF807EEEA6EA8470D3DB15173D46F28AB6E234860AC32 Hooks
+ECE6819DBA5DB528F1A241695F5A9811EF99467CDE22510954FD357780BBD078 Hooks
 42F8B586B357ABBAAAA1C733C3E7D3B75761395340D0CDF600179E8737E22478 BalanceRewards
 919857E4B902A20216E4819B9BD9FD1FD19A66ECF63151C18A4C48C873DB9578 PaychanAndEscrowForTokens
 ECF412BE0964EC2E71DCF807EEEA6EA8470D3DB15173D46F28AB6E234860AC32 URIToken
+F5751842D26FC057B92CAA435ABF4F1428C2BCC4180D18775ADE92CB2643BBA3 Import
+6E739F4F8B07BED29FC9FF440DA3C301CD14A180DF45819F658FEC2F7DE31427 XahauGenesis
+D686F2538F410C9D0D856788E98E3579595DAF7B38D38887F81ECAC934B06040 HooksUpdate1
 86E83A7D2ECE3AD5FA87AB2195AE015C950469ABF0B72EAACED318F74886AE90 CryptoConditionsSuite
 3C43D9A973AA4443EF3FC38E42DD306160FBFFDAB901CD8BAA15D09F2597EB87 NonFungibleTokensV1
 0285B7E5E08E1A8E4C15636F0591D87F73CB6A7B6452A932AD72BBC8E5D1CBE3 fixNFTokenDirV1
diff --git a/cfg/validators-example.txt b/cfg/validators-example.txt
index 8f7c04729..6c2314ebd 100644
--- a/cfg/validators-example.txt
+++ b/cfg/validators-example.txt
@@ -42,6 +42,15 @@
 #    ED2677ABFFD1B33AC6FBC3062B71F1E8397C1505E1C42C64D11AD1B28FF73F4734
 #    ED307A760EE34F2D0CAA103377B1969117C38B8AA0AA1E2A24DAC1F32FC97087ED
 #
+# [import_vl_keys]
+#
+#   This section is used to import the public keys of trusted validator list publishers.
+#   The keys are used to authenticate and accept new lists of trusted validators.
+#   In this example, the key for the publisher "vl.xrplf.org" is imported.
+#   Each key is represented as a hexadecimal string.
+# 
+#   Examples:
+#    ED2677ABFFD1B33AC6FBC3062B71F1E8397C1505E1C42C64D11AD1B28FF73F4734
 
 # The default validator list publishers that the rippled instance
 # trusts.
@@ -62,6 +71,10 @@ ED2677ABFFD1B33AC6FBC3062B71F1E8397C1505E1C42C64D11AD1B28FF73F4734
 # vl.xrplf.org
 ED45D1840EE724BE327ABE9146503D5848EFD5F38B6D5FEDE71E80ACCE5E6E738B
 
+[import_vl_keys]
+# vl.xrplf.org
+ED2677ABFFD1B33AC6FBC3062B71F1E8397C1505E1C42C64D11AD1B28FF73F4734
+
 # To use the test network (see https://xrpl.org/connect-your-rippled-to-the-xrp-test-net.html),
 # use the following configuration instead:
 #
@@ -70,3 +83,6 @@ ED45D1840EE724BE327ABE9146503D5848EFD5F38B6D5FEDE71E80ACCE5E6E738B
 #
 # [validator_list_keys]
 # ED264807102805220DA0F312E71FC2C69E1552C9C5790F6C25E3729DEB573D5860
+#
+# [import_vl_keys]
+# ED264807102805220DA0F312E71FC2C69E1552C9C5790F6C25E3729DEB573D5860
diff --git a/docker/Dockerfile b/docker/Dockerfile
deleted file mode 100644
index dfa99ba6a..000000000
--- a/docker/Dockerfile
+++ /dev/null
@@ -1,43 +0,0 @@
-# Use the official image as a parent image.
-FROM centos
-
-# Set the working directory.
-WORKDIR /opt/xrpld-hooks/
-
-# Copy the file from your host to your current location.
-COPY docker/screenrc /root/.screenrc
-COPY docker/wasm2wat /usr/bin/
-COPY rippled .
-COPY testnet.cfg .
-COPY testnetvalidators.txt .
-COPY docker/libboost/libboost_coroutine.so.1.70.0 /usr/lib/
-COPY docker/libboost/libboost_context.so.1.70.0 /usr/lib
-COPY docker/libboost/libboost_filesystem.so.1.70.0 /usr/lib
-COPY docker/libboost/libboost_program_options.so.1.70.0 /usr/lib
-COPY docker/libboost/libboost_regex.so.1.70.0 /usr/lib
-COPY docker/libboost/libboost_system.so.1.70.0 /usr/lib
-COPY docker/libboost/libboost_thread.so.1.70.0 /usr/lib
-COPY docker/libboost/libboost_chrono.so.1.70.0 /usr/lib
-COPY docker/libboost/libboost_date_time.so.1.70.0 /usr/lib
-COPY docker/libboost/libboost_atomic.so.1.70.0 /usr/lib
-COPY docker/js/ ./
-# Run the command inside your image filesystem.
-RUN dnf install epel-release -y
-RUN yum install -y vim screen python3-setuptools-wheel python3-pip-wheel python3 python3-pip curl make nodejs
-RUN curl https://cmake.org/files/v3.17/cmake-3.17.1-Linux-x86_64.sh --output cmake-3.17.1-Linux-x86_64.sh \
-    &&  mkdir /opt/cmake \
-    &&  printf "y\nn\n" | sh cmake-3.17.1-Linux-x86_64.sh --prefix=/opt/cmake > /dev/null \
-    &&  ln -s /opt/cmake/bin/cmake /usr/local/bin/cmake
-RUN curl https://raw.githubusercontent.com/wasienv/wasienv/master/install.sh | sh
-RUN echo 'PATH=$PATH:/root/.wasienv/bin/' >> /root/.bash_rc
-RUN rm -f cmake-3.17.1-Linux-x86_64.sh
-RUN mkdir /etc/opt/ripple
-RUN ln -s /opt/xrpld-hooks/testnet.cfg /etc/opt/ripple/rippled.cfg
-RUN ln -s /opt/xrpld-hooks/testnetvalidators.txt /etc/opt/ripple/testnetvalidators.txt
-
-# Add metadata to the image to describe which port the container is listening on at runtime.
-EXPOSE 6005
-EXPOSE 5005
-
-# Run the specified command within the container.
-CMD ./rippled --conf testnet.cfg --net >> log 2>> log
diff --git a/docker/build_docker_xrpllabs.sh b/docker/build_docker_xrpllabs.sh
deleted file mode 100755
index 2caa7b342..000000000
--- a/docker/build_docker_xrpllabs.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-cp -r ../hook-api-examples docker/js #docker doesnt like symlinks?
-/usr/bin/cp /root/wabt/bin/wasm2wat docker/ 
-docker build --tag xrpllabsofficial/xrpld-hooks-testnet:latest . && docker create xrpllabsofficial/xrpld-hooks-testnet
-rm -rf docker/js
-docker push xrpllabsofficial/xrpld-hooks-testnet:latest
diff --git a/docs/.gitignore b/docs/.gitignore
deleted file mode 100644
index b3ee85720..000000000
--- a/docs/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-html
-temp
-out.txt
diff --git a/docs/0001-negative-unl/README.md b/docs/0001-negative-unl/README.md
deleted file mode 100644
index 606b30aab..000000000
--- a/docs/0001-negative-unl/README.md
+++ /dev/null
@@ -1,597 +0,0 @@
-# Negative UNL Engineering Spec
-
-## The Problem Statement
-
-The moment-to-moment health of the XRP Ledger network depends on the health and
-connectivity of a small number of computers (nodes). The most important nodes
-are validators, specifically ones listed on the unique node list
-([UNL](#Question-What-are-UNLs)). Ripple publishes a recommended UNL that most
-network nodes use to determine which peers in the network are trusted. Although
-most validators use the same list, they are not required to. The XRP Ledger
-network progresses to the next ledger when enough validators reach agreement
-(above the minimum quorum of 80%) about what transactions to include in the next
-ledger.
-
-As an example, if there are 10 validators on the UNL, at least 8 validators have
-to agree with the latest ledger for it to become validated. But what if enough
-of those validators are offline to drop the network below the 80% quorum? The
-XRP Ledger network favors safety/correctness over advancing the ledger. Which
-means if enough validators are offline, the network will not be able to validate
-ledgers.
-
-Unfortunately validators can go offline at any time for many different reasons.
-Power outages, network connectivity issues, and hardware failures are just a few
-scenarios where a validator would appear "offline". Given that most of these
-events are temporary, it would make sense to temporarily remove that validator
-from the UNL. But the UNL is updated infrequently and not every node uses the
-same UNL. So instead of removing the unreliable validator from the Ripple
-recommended UNL, we can create a second negative UNL which is stored directly on
-the ledger (so the entire network has the same view). This will help the network
-see which validators are **currently** unreliable, and adjust their quorum
-calculation accordingly.
-
-*Improving the liveness of the network is the main motivation for the negative UNL.*
-
-### Targeted Faults
-
-In order to determine which validators are unreliable, we need clearly define
-what kind of faults to measure and analyze. We want to deal with the faults we
-frequently observe in the production network. Hence we will only monitor for
-validators that do not reliably respond to network messages or send out
-validations disagreeing with the locally generated validations. We will not
-target other byzantine faults.
-
-To track whether or not a validator is responding to the network, we could
-monitor them with a “heartbeat” protocol. Instead of creating a new heartbeat
-protocol, we can leverage some existing protocol messages to mimic the
-heartbeat. We picked validation messages because validators should send one and
-only one validation message per ledger. In addition, we only count the
-validation messages that agree with the local node's validations.
-
-With the negative UNL, the network could keep making forward progress safely
-even if the number of remaining validators gets to 60%. Say we have a network
-with 10 validators on the UNL and everything is operating correctly. The quorum
-required for this network would be 8 (80% of 10). When validators fail, the
-quorum required would be as low as 6 (60% of 10), which is the absolute
-***minimum quorum***. We need the absolute minimum quorum to be strictly greater
-than 50% of the original UNL so that there cannot be two partitions of
-well-behaved nodes headed in different directions. We arbitrarily choose 60% as
-the minimum quorum to give a margin of safety.
-
-Consider these events in the absence of negative UNL:
-1. 1:00pm - validator1 fails, votes vs. quorum: 9 >= 8, we have quorum
-1. 3:00pm - validator2 fails, votes vs. quorum: 8 >= 8, we have quorum
-1. 5:00pm - validator3 fails, votes vs. quorum: 7 < 8, we don’t have quorum
-    * **network cannot validate new ledgers with 3 failed validators**
-
-We're below 80% agreement, so new ledgers cannot be validated. This is how the
-XRP Ledger operates today, but if the negative UNL was enabled, the events would
-happen as follows. (Please note that the events below are from a simplified
-version of our protocol.)
-
-1. 1:00pm - validator1 fails, votes vs. quorum: 9 >= 8, we have quorum
-1. 1:40pm - network adds validator1 to negative UNL, quorum changes to ceil(9 * 0.8), or 8
-1. 3:00pm - validator2 fails, votes vs. quorum: 8 >= 8, we have quorum
-1. 3:40pm - network adds validator2 to negative UNL, quorum changes to ceil(8 * 0.8), or 7
-1. 5:00pm - validator3 fails, votes vs. quorum: 7 >= 7, we have quorum
-1. 5:40pm - network adds validator3 to negative UNL, quorum changes to ceil(7 * 0.8), or 6
-1. 7:00pm - validator4 fails, votes vs. quorum: 6 >= 6, we have quorum
-    * **network can still validate new ledgers with 4 failed validators**
-
-## External Interactions
-
-### Message Format Changes
-This proposal will:
-1. add a new pseudo-transaction type
-1. add the negative UNL to the ledger data structure.
-
-Any tools or systems that rely on the format of this data will have to be
-updated.
-
-### Amendment
-This feature **will** need an amendment to activate.
-
-## Design
-
-This section discusses the following topics about the Negative UNL design:
-
-* [Negative UNL protocol overview](#Negative-UNL-Protocol-Overview)
-* [Validator reliability measurement](#Validator-Reliability-Measurement)
-* [Format Changes](#Format-Changes)
-* [Negative UNL maintenance](#Negative-UNL-Maintenance)
-* [Quorum size calculation](#Quorum-Size-Calculation)
-* [Filter validation messages](#Filter-Validation-Messages)
-* [High level sequence diagram of code
-  changes](#High-Level-Sequence-Diagram-of-Code-Changes)
-
-### Negative UNL Protocol Overview
-
-Every ledger stores a list of zero or more unreliable validators. Updates to the
-list must be approved by the validators using the consensus mechanism that
-validators use to agree on the set of transactions. The list is used only when
-checking if a ledger is fully validated. If a validator V is in the list, nodes
-with V in their UNL adjust the quorum and V’s validation message is not counted
-when verifying if a ledger is fully validated. V’s flow of messages and network
-interactions, however, will remain the same.
-
-We define the ***effective UNL** = original UNL - negative UNL*, and the
-***effective quorum*** as the quorum of the *effective UNL*. And we set
-*effective quorum = Ceiling(80% * effective UNL)*.
-
-### Validator Reliability Measurement
-
-A node only measures the reliability of validators on its own UNL, and only
-proposes based on local observations. There are many metrics that a node can
-measure about its validators, but we have chosen ledger validation messages.
-This is because every validator shall send one and only one signed validation
-message per ledger. This keeps the measurement simple and removes
-timing/clock-sync issues. A node will measure the percentage of agreeing
-validation messages (*PAV*) received from each validator on the node's UNL. Note
-that the node will only count the validation messages that agree with its own
-validations.
-
-We define the **PAV** as the **P**ercentage of **A**greed **V**alidation
-messages received for the last N ledgers, where N = 256 by default.
-
-When the PAV drops below the ***low-water mark***, the validator is considered
-unreliable, and is a candidate to be disabled by being added to the negative
-UNL. A validator must have a PAV higher than the ***high-water mark*** to be
-re-enabled. The validator is re-enabled by removing it from the negative UNL. In
-the implementation, we plan to set the low-water mark as 50% and the high-water
-mark as 80%.
-
-### Format Changes
-
-The negative UNL component in a ledger contains three fields.
-* ***NegativeUNL***: The current negative UNL, a list of unreliable validators.
-* ***ToDisable***: The validator to be added to the negative UNL on the next
-  flag ledger.
-* ***ToReEnable***: The validator to be removed from the negative UNL on the
-  next flag ledger.
-
-All three fields are optional. When the *ToReEnable* field exists, the
-*NegativeUNL* field cannot be empty.
-
-A new pseudo-transaction, ***UNLModify***, is added. It has three fields
-* ***Disabling***: A flag indicating whether the modification is to disable or
-  to re-enable a validator.
-* ***Seq***: The ledger sequence number.
-* ***Validator***: The validator to be disabled or re-enabled.
-
-There would be at most one *disable* `UNLModify` and one *re-enable* `UNLModify`
-transaction per flag ledger. The full machinery is described further on.
-
-### Negative UNL Maintenance
-
-The negative UNL can only be modified on the flag ledgers. If a validator's
-reliability status changes, it takes two flag ledgers to modify the negative
-UNL. Let's see an example of the algorithm:
-
-* Ledger seq = 100: A validator V goes offline.
-* Ledger seq = 256: This is a flag ledger, and V's reliability measurement *PAV*
-  is lower than the low-water mark. Other validators add `UNLModify`
-  pseudo-transactions `{true, 256, V}` to the transaction set which goes through
-  the consensus. Then the pseudo-transaction is applied to the negative UNL
-  ledger component by setting `ToDisable = V`.
-* Ledger seq = 257 ~ 511: The negative UNL ledger component is copied from the
-  parent ledger.
-* Ledger seq=512: This is a flag ledger, and the negative UNL is updated
-  `NegativeUNL = NegativeUNL + ToDisable`.
-
-The negative UNL may have up to `MaxNegativeListed = floor(original UNL * 25%)`
-validators. The 25% is because of 75% * 80% = 60%, where 75% = 100% - 25%, 80%
-is the quorum of the effective UNL, and 60% is the absolute minimum quorum of
-the original UNL. Adding more than 25% validators to the negative UNL does not
-improve the liveness of the network, because adding more validators to the
-negative UNL cannot lower the effective quorum.
-
-The following is the detailed algorithm:
-
-* **If** the ledger seq = x is a flag ledger
-
-    1. Compute `NegativeUNL = NegativeUNL + ToDisable - ToReEnable` if they
-    exist in the parent ledger
-
-		1. Try to find a candidate to disable if `sizeof NegativeUNL < MaxNegativeListed`
-
-		1. Find a validator V that has a *PAV* lower than the low-water
-		mark, but is not in `NegativeUNL`.
-
-        1. If two or more are found, their public keys are XORed with the hash
-        of the parent ledger and the one with the lowest XOR result is chosen.
-				
-        1. If V is found, create a `UNLModify` pseudo-transaction
-        `TxDisableValidator = {true, x, V}`
-				
-    1. Try to find a candidate to re-enable if `sizeof NegativeUNL > 0`:
-		
-        1. Find a validator U that is in `NegativeUNL` and has a *PAV* higher
-        than the high-water mark.
-				
-        1. If U is not found, try to find one in `NegativeUNL` but not in the
-        local *UNL*.
-				
-        1. If two or more are found, their public keys are XORed with the hash
-        of the parent ledger and the one with the lowest XOR result is chosen.
-				
-        1. If U is found, create a `UNLModify` pseudo-transaction
-        `TxReEnableValidator = {false, x, U}`
-				
-    1. If any `UNLModify` pseudo-transactions are created, add them to the
-    transaction set. The transaction set goes through the consensus algorithm.
-		
-    1. If have enough support, the `UNLModify` pseudo-transactions remain in the
-    transaction set agreed by the validators. Then the pseudo-transactions are
-    applied to the ledger:
-		
-        1. If have `TxDisableValidator`, set `ToDisable=TxDisableValidator.V`.
-        Else clear `ToDisable`.
-				
-        1. If have `TxReEnableValidator`, set
-        `ToReEnable=TxReEnableValidator.U`. Else clear `ToReEnable`.
-				
-* **Else** (not a flag ledger)
-
-    1. Copy the negative UNL ledger component from the parent ledger
-
-The negative UNL is stored on each ledger because we don't know when a validator
-may reconnect to the network. If the negative UNL was stored only on every flag
-ledger, then a new validator would have to wait until it acquires the latest
-flag ledger to know the negative UNL. So any new ledgers created that are not
-flag ledgers copy the negative UNL from the parent ledger.
-
-Note that when we have a validator to disable and a validator to re-enable at
-the same flag ledger, we create two separate `UNLModify` pseudo-transactions. We
-want either one or the other or both to make it into the ledger on their own
-merits.
-
-Readers may have noticed that we defined several rules of creating the
-`UNLModify` pseudo-transactions but did not describe how to enforce the rules.
-The rules are actually enforced by the existing consensus algorithm. Unless
-enough validators propose the same pseudo-transaction it will not be included in
-the transaction set of the ledger.
-
-### Quorum Size Calculation
-
-The effective quorum is 80% of the effective UNL. Note that because at most 25%
-of the original UNL can be on the negative UNL, the quorum should not be lower
-than the absolute minimum quorum (i.e. 60%) of the original UNL. However,
-considering that different nodes may have different UNLs, to be safe we compute
-`quorum = Ceiling(max(60% * original UNL, 80% * effective UNL))`.
-
-### Filter Validation Messages
-
-If a validator V is in the negative UNL, it still participates in consensus
-sessions in the same way, i.e. V still follows the protocol and publishes
-proposal and validation messages. The messages from V are still stored the same
-way by everyone, used to calculate the new PAV for V, and could be used in
-future consensus sessions if needed. However V's ledger validation message is
-not counted when checking if the ledger is fully validated.
-
-### High Level Sequence Diagram of Code Changes
-
-The diagram below is the sequence of one round of consensus. Classes and
-components with non-trivial changes are colored green.
-
-* The `ValidatorList` class is modified to compute the quorum of the effective
-  UNL.
-
-* The `Validations` class provides an interface for querying the validation
-  messages from trusted validators.
-
-* The `ConsensusAdaptor` component:
-
-    * The `RCLConsensus::Adaptor` class is modified for creating `UNLModify`
-      Pseudo-Transactions.
-		
-    * The `Change` class is modified for applying `UNLModify`
-      Pseudo-Transactions.
-		
-    * The `Ledger` class is modified for creating and adjusting the negative UNL
-      ledger component.
-		
-    * The `LedgerMaster` class is modified for filtering out validation messages
-      from negative UNL validators when verifying if a ledger is fully
-      validated.
-
-
-
-
-## Roads Not Taken
-
-### Use a Mechanism Like Fee Voting to Process UNLModify Pseudo-Transactions
-
-The previous version of the negative UNL specification used the same mechanism
-as the [fee voting](https://xrpl.org/fee-voting.html#voting-process.) for
-creating the negative UNL, and used the negative UNL as soon as the ledger was
-fully validated. However the timing of fully validation can differ among nodes,
-so different negative UNLs could be used, resulting in different effective UNLs
-and different quorums for the same ledger. As a result, the network's safety is
-impacted.
-
-This updated version does not impact safety though operates a bit more slowly.
-The negative UNL modifications in the *UNLModify* pseudo-transaction approved by
-the consensus will take effect at the next flag ledger. The extra time of the
-256 ledgers should be enough for nodes to be in sync of the negative UNL
-modifications.
-
-### Use an Expiration Approach to Re-enable Validators
-
-After a validator disabled by the negative UNL becomes reliable, other
-validators explicitly vote for re-enabling it. An alternative approach to
-re-enable a validator is the expiration approach, which was considered in the
-previous version of the specification. In the expiration approach, every entry
-in the negative UNL has a fixed expiration time. One flag ledger interval was
-chosen as the expiration interval. Once expired, the other validators must
-continue voting to keep the unreliable validator on the negative UNL. The
-advantage of this approach is its simplicity. But it has a requirement. The
-negative UNL protocol must be able to vote multiple unreliable validators to be
-disabled at the same flag ledger. In this version of the specification, however,
-only one unreliable validator can be disabled at a flag ledger. So the
-expiration approach cannot be simply applied.
-
-### Validator Reliability Measurement and Flag Ledger Frequency
-
-If the ledger time is about 4.5 seconds and the low-water mark is 50%, then in
-the worst case, it takes 48 minutes *((0.5 * 256 + 256 + 256) * 4.5 / 60 = 48)*
-to put an offline validator on the negative UNL. We considered lowering the flag
-ledger frequency so that the negative UNL can be more responsive. We also
-considered decoupling the reliability measurement and flag ledger frequency to
-be more flexible. In practice, however, their benefits are not clear.
-
-
-## New Attack Vectors
-
-A group of malicious validators may try to frame a reliable validator and put it
-on the negative UNL. But they cannot succeed. Because:
-
-1. A reliable validator sends a signed validation message every ledger. A
-sufficient peer-to-peer network will propagate the validation messages to other
-validators. The validators will decide if another validator is reliable or not
-only by its local observation of the validation messages received. So an honest
-validator’s vote on another validator’s reliability is accurate.
-
-1. Given the votes are accurate, and one vote per validator, an honest validator
-will not create a UNLModify transaction of a reliable validator.
-
-1. A validator can be added to a negative UNL only through a UNLModify
-transaction.
-
-Assuming the group of malicious validators is less than the quorum, they cannot
-frame a reliable validator.
-
-## Summary
-
-The bullet points below briefly summarize the current proposal:
-
-* The motivation of the negative UNL is to improve the liveness of the network.
-
-* The targeted faults are the ones frequently observed in the production
-  network.
-
-* Validators propose negative UNL candidates based on their local measurements.
-
-* The absolute minimum quorum is 60% of the original UNL.
-
-* The format of the ledger is changed, and a new *UNLModify* pseudo-transaction
-  is added. Any tools or systems that rely on the format of these data will have
-  to be updated.
-
-* The negative UNL can only be modified on the flag ledgers.
-
-* At most one validator can be added to the negative UNL at a flag ledger.
-
-* At most one validator can be removed from the negative UNL at a flag ledger.
-
-* If a validator's reliability status changes, it takes two flag ledgers to
-  modify the negative UNL.
-
-* The quorum is the larger of 80% of the effective UNL and 60% of the original
-  UNL.
-
-* If a validator is on the negative UNL, its validation messages are ignored
-  when the local node verifies if a ledger is fully validated.
-
-## FAQ
-
-### Question: What are UNLs?
-
-Quote from the [Technical FAQ](https://xrpl.org/technical-faq.html): "They are
-the lists of transaction validators a given participant believes will not
-conspire to defraud them."
-
-### Question: How does the negative UNL proposal affect network liveness?
-
-The network can make forward progress when more than a quorum of the trusted
-validators agree with the progress. The lower the quorum size is, the easier for
-the network to progress. If the quorum is too low, however, the network is not
-safe because nodes may have different results. So the quorum size used in the
-consensus protocol is a balance between the safety and the liveness of the
-network. The negative UNL reduces the size of the effective UNL, resulting in a
-lower quorum size while keeping the network safe.
-
- Question: How does a validator get into the negative UNL? How is a
-validator removed from the negative UNL? 
-
-A validator’s reliability is measured by other validators. If a validator
-becomes unreliable, at a flag ledger, other validators propose *UNLModify*
-pseudo-transactions which vote the validator to add to the negative UNL during
-the consensus session. If agreed, the validator is added to the negative UNL at
-the next flag ledger. The mechanism of removing a validator from the negative
-UNL is the same.
-
-### Question: Given a negative UNL, what happens if the UNL changes?
-
-Answer: Let’s consider the cases: 
-
-1. A validator is added to the UNL, and it is already in the negative UNL. This
-case could happen when not all the nodes have the same UNL. Note that the
-negative UNL on the ledger lists unreliable nodes that are not necessarily the
-validators for everyone.
-
-    In this case, the liveness is affected negatively. Because the minimum
-    quorum could be larger but the usable validators are not increased.
-
-1. A validator is removed from the UNL, and it is in the negative UNL.
-
-    In this case, the liveness is affected positively. Because the quorum could
-    be smaller but the usable validators are not reduced.
-
-1. A validator is added to the UNL, and it is not in the negative UNL.
-1. A validator is removed from the UNL, and it is not in the negative UNL.
-	
-    Case 3 and 4 are not affected by the negative UNL protocol.
-
-### Question: Can we simply lower the quorum to 60% without the negative UNL? 
-
-Answer: No, because the negative UNL approach is safer.
-
-First let’s compare the two approaches intuitively, (1) the *negative UNL*
-approach, and (2) *lower quorum*: simply lowering the quorum from 80% to 60%
-without the negative UNL. The negative UNL approach uses consensus to come up
-with a list of unreliable validators, which are then removed from the effective
-UNL temporarily. With this approach, the list of unreliable validators is agreed
-to by a quorum of validators and will be used by every node in the network to
-adjust its UNL. The quorum is always 80% of the effective UNL. The lower quorum
-approach is a tradeoff between safety and liveness and against our principle of
-preferring safety over liveness. Note that different validators don't have to
-agree on which validation sources they are ignoring.
-
-Next we compare the two approaches quantitatively with examples, and apply
-Theorem 8 of [Analysis of the XRP Ledger Consensus
-Protocol](https://arxiv.org/abs/1802.07242) paper:
-
-*XRP LCP guarantees fork safety if **Oi,j > nj / 2 +
-ni − qi + ti,j** for every pair of nodes
-Pi, Pj,*
-
-where *Oi,j* is the overlapping requirement, nj and
-ni are UNL sizes, qi is the quorum size of Pi,
-*ti,j = min(ti, tj, Oi,j)*, and
-ti and tj are the number of faults can be tolerated by
-Pi and Pj.
-
-We denote *UNLi* as *Pi's UNL*, and *|UNLi|* as
-the size of *Pi's UNL*.
-
-Assuming *|UNLi| = |UNLj|*, let's consider the following
-three cases:
-
-1. With 80% quorum and 20% faults, *Oi,j > 100% / 2 + 100% - 80% +
-20% = 90%*. I.e. fork safety requires > 90% UNL overlaps. This is one of the
-results in the analysis paper.
-
-1. If the quorum is 60%, the relationship between the overlapping requirement
-and the faults that can be tolerated is *Oi,j > 90% +
-ti,j*. Under the same overlapping condition (i.e. 90%), to guarantee
-the fork safety, the network cannot tolerate any faults. So under the same
-overlapping condition, if the quorum is simply lowered, the network can tolerate
-fewer faults.
-
-1. With the negative UNL approach, we want to argue that the inequation
-*Oi,j > nj / 2 + ni − qi +
-ti,j* is always true to guarantee fork safety, while the negative UNL
-protocol runs, i.e. the effective quorum is lowered without weakening the
-network's fault tolerance. To make the discussion easier, we rewrite the
-inequation as *Oi,j > nj / 2 + (ni −
-qi) + min(ti, tj)*, where Oi,j is
-dropped from the definition of ti,j because *Oi,j >
-min(ti, tj)* always holds under the parameters we will
-use. Assuming a validator V is added to the negative UNL, now let's consider the
-4 cases:
-
-    1. V is not on UNLi nor UNLj
-
-        The inequation holds because none of the variables change.
-
-    1. V is on UNLi but not on UNLj
-
-        The value of *(ni − qi)* is smaller. The value of
-        *min(ti, tj)* could be smaller too. Other
-        variables do not change. Overall, the left side of the inequation does
-        not change, but the right side is smaller. So the inequation holds.
-    
-    1. V is not on UNLi but on UNLj
-
-        The value of *nj / 2* is smaller. The value of
-        *min(ti, tj)* could be smaller too. Other
-        variables do not change. Overall, the left side of the inequation does
-        not change, but the right side is smaller. So the inequation holds.
-    
-    1. V is on both UNLi and UNLj
-
-        The value of *Oi,j* is reduced by 1. The values of
-        *nj / 2*, *(ni − qi)*, and
-        *min(ti, tj)* are reduced by 0.5, 0.2, and 1
-        respectively. The right side is reduced by 1.7. Overall, the left side
-        of the inequation is reduced by 1, and the right side is reduced by 1.7.
-        So the inequation holds.
-
-    The inequation holds for all the cases. So with the negative UNL approach,
-    the network's fork safety is preserved, while the quorum is lowered that
-    increases the network's liveness.
-
- Question: We have observed that occasionally a validator wanders off on its
-own chain. How is this case handled by the negative UNL algorithm? 
-
-Answer: The case that a validator wanders off on its own chain can be measured
-with the validations agreement. Because the validations by this validator must
-be different from other validators' validations of the same sequence numbers.
-When there are enough disagreed validations, other validators will vote this
-validator onto the negative UNL.
-
-In general by measuring the agreement of validations, we also measured the
-"sanity". If two validators have too many disagreements, one of them could be
-insane. When enough validators think a validator is insane, that validator is
-put on the negative UNL.
-
- Question: Why would there be at most one disable UNLModify and one
-re-enable UNLModify transaction per flag ledger? 
-
-Answer: It is a design choice so that the effective UNL does not change too
-quickly. A typical targeted scenario is several validators go offline slowly
-during a long weekend. The current design can handle this kind of cases well
-without changing the effective UNL too quickly.
-
-## Appendix
-
-### Confidence Test
-
-We will use two test networks, a single machine test network with multiple IP
-addresses and the QE test network with multiple machines. The single machine
-network will be used to test all the test cases and to debug. The QE network
-will be used after that. We want to see the test cases still pass with real
-network delay. A test case specifies:
-
-1. a UNL with different number of validators for different test cases,
-1. a network with zero or more non-validator nodes, 
-1. a sequence of validator reliability change events (by killing/restarting
-   nodes, or by running modified rippled that does not send all validation
-   messages),
-1. the correct outcomes. 
-
-For all the test cases, the correct outcomes are verified by examining logs. We
-will grep the log to see if the correct negative UNLs are generated, and whether
-or not the network is making progress when it should be. The ripdtop tool will
-be helpful for monitoring validators' states and ledger progress. Some of the
-timing parameters of rippled will be changed to have faster ledger time. Most if
-not all test cases do not need client transactions.
-
-For example, the test cases for the prototype:
-1. A 10-validator UNL.
-1. The network does not have other nodes.
-1. The validators will be started from the genesis. Once they start to produce
-   ledgers, we kill five validators, one every flag ledger interval. Then we
-   will restart them one by one.
-1. A sequence of events (or the lack of events) such as a killed validator is
-   added to the negative UNL.
-
-#### Roads Not Taken: Test with Extended CSF 
-
-We considered testing with the current unit test framework, specifically the
-[Consensus Simulation
-Framework](https://github.com/ripple/rippled/blob/develop/src/test/csf/README.md)
-(CSF). However, the CSF currently can only test the generic consensus algorithm
-as in the paper: [Analysis of the XRP Ledger Consensus
-Protocol](https://arxiv.org/abs/1802.07242).
\ No newline at end of file
diff --git a/docs/0001-negative-unl/negativeUNLSqDiagram.puml b/docs/0001-negative-unl/negativeUNLSqDiagram.puml
deleted file mode 100644
index 8cb491af6..000000000
--- a/docs/0001-negative-unl/negativeUNLSqDiagram.puml
+++ /dev/null
@@ -1,79 +0,0 @@
-@startuml negativeUNL_highLevel_sequence
-
-skinparam sequenceArrowThickness 2
-skinparam roundcorner 20
-skinparam maxmessagesize 160
-
-actor "Rippled Start" as RS
-participant "Timer" as T 
-participant "NetworkOPs" as NOP 
-participant "ValidatorList" as VL #lightgreen
-participant "Consensus" as GC
-participant "ConsensusAdaptor" as CA #lightgreen
-participant "Validations" as RM #lightgreen
-
-RS -> NOP: begin consensus
-activate NOP
-NOP -[#green]> VL: update negative UNL
-hnote over VL#lightgreen: store a copy of\nnegative UNL
-VL -> NOP
-NOP -> VL: update trusted validators
-activate VL
-VL -> VL: re-calculate quorum
-hnote over VL#lightgreen: ignore negative listed validators\nwhen calculate quorum 
-VL -> NOP
-deactivate VL
-NOP -> GC: start round
-activate GC
-GC -> GC: phase = OPEN
-GC -> NOP
-deactivate GC
-deactivate NOP
-
-loop at regular frequency
-T -> GC: timerEntry
-activate GC
-end
-
-alt phase == OPEN
-    alt should close ledger 
-        GC -> GC: phase = ESTABLISH
-        GC -> CA: onClose
-        activate CA
-            alt sqn%256==0 
-            CA -[#green]> RM: getValidations
-            CA -[#green]> CA: create UNLModify Tx 
-            hnote over CA#lightgreen: use validatations of the last 256 ledgers\nto figure out UNLModify Tx candidates.\nIf any, create UNLModify Tx, and add to TxSet.               
-            end
-        CA -> GC
-        GC -> CA: propose
-        deactivate CA
-    end
-else phase == ESTABLISH
-    hnote over GC: receive peer postions
-    GC -> GC : update our position
-    GC -> CA : propose \n(if position changed)
-    GC -> GC : check if have consensus
-    alt consensus reached
-        GC -> GC: phase = ACCEPT
-        GC -> CA : onAccept
-        activate CA
-            CA -> CA : build LCL
-            hnote over CA #lightgreen: copy negative UNL from parent ledger
-            alt sqn%256==0
-                CA -[#green]> CA: Adjust negative UNL 
-                CA -[#green]> CA: apply UNLModify Tx
-            end
-            CA -> CA : validate and send validation message
-            activate NOP
-                CA -> NOP : end consensus and\nbegin next consensus round
-            deactivate NOP
-        deactivate CA        
-        hnote over RM: receive validations
-    end
-else phase == ACCEPTED
-    hnote over GC: timerEntry hash nothing to do at this phase
-end
-deactivate GC
-
-@enduml
\ No newline at end of file
diff --git a/docs/0001-negative-unl/negativeUNL_highLevel_sequence.png b/docs/0001-negative-unl/negativeUNL_highLevel_sequence.png
deleted file mode 100644
index b962693b4..000000000
Binary files a/docs/0001-negative-unl/negativeUNL_highLevel_sequence.png and /dev/null differ
diff --git a/docs/0010-ledger-replay/README.md b/docs/0010-ledger-replay/README.md
deleted file mode 100644
index 170fd15c4..000000000
--- a/docs/0010-ledger-replay/README.md
+++ /dev/null
@@ -1,88 +0,0 @@
-# Ledger Replay
-
-`LedgerReplayer` is a new `Stoppable` for replaying ledgers.
-Patterned after two other `Stoppable`s under `JobQueue`---`InboundLedgers`
-and `InboundTransactions`---it acts like a factory for creating
-state-machine workers, and a network message demultiplexer for those workers.
-Think of these workers like asynchronous functions.
-Like functions, they each take a set of parameters.
-The `Stoppable` memoizes these functions. It maintains a table for each
-worker type, mapping sets of arguments to the worker currently working
-on that argument set.
-Whenever the `Stoppable` is asked to construct a worker, it first searches its
-table to see if there is an existing worker with the same or overlapping
-argument set.
-If one exists, then it is used. If not, then a new one is created,
-initialized, and added to the table.
-
-For `LedgerReplayer`, there are three worker types: `LedgerReplayTask`,
-`SkipListAcquire`, and `LedgerDeltaAcquire`.
-Each is derived from `TimeoutCounter` to give it a timeout.
-For `LedgerReplayTask`, the parameter set
-is {reason, finish ledger ID, number of ledgers}. For `SkipListAcquire` and
-`LedgerDeltaAcquire`, there is just one parameter: a ledger ID.
-
-Each `Stoppable` has an entry point. For `LedgerReplayer`, it is `replay`.
-`replay` creates two workers: a `LedgerReplayTask` and a `SkipListAcquire`.
-`LedgerDeltaAcquire`s are created in the callback for when the skip list
-returns.
-
-For `SkipListAcquire` and `LedgerDeltaAcquire`, initialization fires off the
-underlying asynchronous network request and starts the timeout. The argument
-set identifying the worker is included in the network request, and copied to
-the network response. `SkipListAcquire` sends a request for a proof path for
-the skip list of the desired ledger. `LedgerDeltaAcquire` sends a request for
-the transaction set of the desired ledger.
-
-`LedgerReplayer` is also a network message demultiplexer.
-When a response arrives for a request that was sent by a `SkipListAcquire` or
-`LedgerDeltaAcquire` worker, the `Peer` object knows to send it to the
-`LedgerReplayer`, which looks up the worker waiting for that response based on
-the identifying argument set included in the response.
-
-`LedgerReplayTask` may ask `InboundLedgers` to send requests to acquire
-the start ledger, but there is no way to attach a callback or be notified when
-the `InboundLedger` worker completes. All the responses for its messages will
-be directed to `InboundLedgers`, not `LedgerReplayer`. Instead,
-`LedgerReplayTask` checks whether the start ledger has arrived every time its
-timeout expires.
-
-Like a promise, each worker keeps track of whether it is pending (`!isDone()`)
-or whether it has resolved successfully (`complete_ == true`) or unsuccessfully
-(`failed_ == true`). It will never exist in both resolved states at once, nor
-will it return to a pending state after reaching a resolved state.
-
-Like promises, some workers can accept continuations to be called when they
-reach a resolved state, or immediately if they are already resolved.
-`SkipListAcquire` and `LedgerDeltaAcquire` both accept continuations of a type
-specific to their payload, both via a method named `addDataCallback()`. Continuations
-cannot be removed explicitly, but they are held by `std::weak_ptr` so they can
-be removed implicitly.
-
-`LedgerReplayTask` is simultaneously:
-
-1. an asynchronous function,
-1. a continuation to one `SkipListAcquire` asynchronous function,
-1. a continuation to zero or more `LedgerDeltaAcquire` asynchronous functions, and
-1. a continuation to its own timeout.
-
-Each of these roles corresponds to different entry points:
-
-1. `init()`
-1. the callback added to `SkipListAcquire`, which calls `updateSkipList(...)` or `cancel()`
-1. the callback added to `LedgerDeltaAcquire`, which calls `deltaReady(...)` or `cancel()`
-1. `onTimer()`
-
-Each of these entry points does something unique to that entry point. They
-either (a) transition `LedgerReplayTask` to a terminal failed resolved state
-(`cancel()` and `onTimer()`) or (b) try to make progress toward the successful
-resolved state. `init()` and `updateSkipList(...)` call `trigger()` while
-`deltaReady(...)` calls `tryAdvance()`. There's a similarity between this
-pattern and the way coroutines are implemented, where every yield saves the spot
-in the code where it left off and every resume jumps back to that spot.
-
-### Sequence Diagram
-
-
-### Class Diagram
-
diff --git a/docs/0010-ledger-replay/ledger_replay_classes.png b/docs/0010-ledger-replay/ledger_replay_classes.png
deleted file mode 100644
index f4cbab629..000000000
Binary files a/docs/0010-ledger-replay/ledger_replay_classes.png and /dev/null differ
diff --git a/docs/0010-ledger-replay/ledger_replay_classes.puml b/docs/0010-ledger-replay/ledger_replay_classes.puml
deleted file mode 100644
index 4c90ef251..000000000
--- a/docs/0010-ledger-replay/ledger_replay_classes.puml
+++ /dev/null
@@ -1,98 +0,0 @@
-@startuml
-
-class TimeoutCounter {
-  #app_ : Application&
-}
-
-TimeoutCounter o-- "1" Application 
-': app_
-
-Stoppable <.. Application
-
-class Application {
-  -m_ledgerReplayer : uptr
-  -m_inboundLedgers : uptr
-}
-
-Application *-- "1" LedgerReplayer 
-': m_ledgerReplayer
-Application *-- "1" InboundLedgers 
-': m_inboundLedgers
-
-Stoppable <.. InboundLedgers
-Application "1" --o InboundLedgers 
-': app_
-
-class InboundLedgers {
-  -app_ : Application&
-}
-
-Stoppable <.. LedgerReplayer
-InboundLedgers "1" --o LedgerReplayer 
-': inboundLedgers_
-Application "1" --o LedgerReplayer 
-': app_
-
-class LedgerReplayer {
-  +createDeltas(LedgerReplayTask)
-  -app_ : Application&
-  -inboundLedgers_ : InboundLedgers&
-  -tasks_ : vector>
-  -deltas_ : hash_map>
-  -skipLists_ : hash_map>
-}
-
-LedgerReplayer *-- LedgerReplayTask 
-': tasks_
-LedgerReplayer o-- LedgerDeltaAcquire 
-': deltas_
-LedgerReplayer o-- SkipListAcquire 
-': skipLists_
-
-TimeoutCounter <.. LedgerReplayTask
-InboundLedgers "1" --o LedgerReplayTask 
-': inboundLedgers_
-LedgerReplayer "1" --o LedgerReplayTask 
-': replayer_
-
-class LedgerReplayTask {
-  -inboundLedgers_ : InboundLedgers&
-  -replayer_ : LedgerReplayer&
-  -skipListAcquirer_ : sptr
-  -deltas_ : vector>
-  +addDelta(sptr)
-}
-
-LedgerReplayTask *-- "1" SkipListAcquire 
-': skipListAcquirer_
-LedgerReplayTask *-- LedgerDeltaAcquire 
-': deltas_
-
-TimeoutCounter <.. SkipListAcquire
-InboundLedgers "1" --o SkipListAcquire 
-': inboundLedgers_
-LedgerReplayer "1" --o SkipListAcquire 
-': replayer_
-LedgerReplayTask --o SkipListAcquire : implicit via callback
-
-class SkipListAcquire {
-  +addDataCallback(callback)
-  -inboundLedgers_ : InboundLedgers&
-  -replayer_ : LedgerReplayer&
-  -dataReadyCallbacks_ : vector
-}
-
-TimeoutCounter <.. LedgerDeltaAcquire
-InboundLedgers "1" --o LedgerDeltaAcquire 
-': inboundLedgers_
-LedgerReplayer "1" --o LedgerDeltaAcquire 
-': replayer_
-LedgerReplayTask --o LedgerDeltaAcquire : implicit via callback
-
-class LedgerDeltaAcquire {
-  +addDataCallback(callback)
-  -inboundLedgers_ : InboundLedgers&
-  -replayer_ : LedgerReplayer&
-  -dataReadyCallbacks_ : vector
-}
-@enduml
\ No newline at end of file
diff --git a/docs/0010-ledger-replay/ledger_replay_sequence.png b/docs/0010-ledger-replay/ledger_replay_sequence.png
deleted file mode 100644
index 37bcbd506..000000000
Binary files a/docs/0010-ledger-replay/ledger_replay_sequence.png and /dev/null differ
diff --git a/docs/0010-ledger-replay/ledger_replay_sequence.puml b/docs/0010-ledger-replay/ledger_replay_sequence.puml
deleted file mode 100644
index 481819b5e..000000000
--- a/docs/0010-ledger-replay/ledger_replay_sequence.puml
+++ /dev/null
@@ -1,85 +0,0 @@
-@startuml
-
-autoactivate on
-
-' participant app as "Application"
-participant peer as "Peer"
-participant lr as "LedgerReplayer"
-participant lrt as "LedgerReplayTask"
-participant sla as "SkipListAcquire"
-participant lda as "LedgerDeltaAcquire"
-
-[-> lr : replay(finishId, numLedgers)
-  lr -> sla : make_shared(finishHash)
-  return skipList
-  lr -> lrt : make_shared(skipList)
-  return task
-  lr -> sla : init(numPeers=1)
-    sla -> sla : trigger(numPeers=1)
-      sla -> peer : sendRequest(ProofPathRequest)
-      return
-    return
-  return
-  lr -> lrt : init()
-    lrt -> sla : addDataCallback(callback)
-    return
-  return
-deactivate lr
-
-[-> peer : onMessage(ProofPathResponse)
-  peer -> lr : gotSkipList(ledgerHeader, item)
-    lr -> sla : processData(ledgerSeq, item)
-      sla -> sla : onSkipListAcquired(skipList, ledgerSeq)
-        sla -> sla : notify()
-        note over sla: call the callbacks added by\naddDataCallback(callback).
-          sla -> lrt : callback(ledgerId)
-            lrt -> lrt : updateSkipList(ledgerId, ledgerSeq, skipList)
-            lrt -> lr : createDeltas(this)
-            loop
-              lr -> lda : make_shared(ledgerId, ledgerSeq)
-              return delta
-              lr -> lrt : addDelta(delta)              
-                lrt -> lda : addDataCallback(callback)
-                return
-              return
-              lr -> lda : init(numPeers=1)
-                lda -> lda : trigger(numPeers=1)
-                  lda -> peer : sendRequest(ReplayDeltaRequest)
-                  return
-                return
-              return
-              end
-            return
-          return
-        return
-      return
-    return
-  return
-deactivate peer
-
-[-> peer : onMessage(ReplayDeltaResponse)
-  peer -> lr : gotReplayDelta(ledgerHeader)
-    lr -> lda : processData(ledgerHeader, txns)
-      lda -> lda : notify()
-      note over lda: call the callbacks added by\naddDataCallback(callback).
-        lda -> lrt : callback(ledgerId) 
-        lrt -> lrt : deltaReady(ledgerId)
-          lrt -> lrt : tryAdvance()
-            loop as long as child can be built
-            lrt -> lda : tryBuild(parent)
-              lda -> lda : onLedgerBuilt()
-                note over lda
-                  Schedule a job to store the built ledger.
-                end note
-              return
-            return child
-            end
-          return
-        return
-      return
-    return
-  return
-deactivate peer
-
-
-@enduml
\ No newline at end of file
diff --git a/docs/CheatSheet.md b/docs/CheatSheet.md
deleted file mode 100644
index 3b70c7c8f..000000000
--- a/docs/CheatSheet.md
+++ /dev/null
@@ -1,20 +0,0 @@
-# Code Style Cheat Sheet
-
-## Form
-
-- One class per header file.
-- Place each data member on its own line.
-- Place each ctor-initializer on its own line.
-- Create typedefs for primitive types to describe them.
-- Return descriptive local variables instead of constants.
-- Use long descriptive names instead of abbreviations.
-- Use "explicit" for single-argument ctors
-- Avoid globals especially objects with static storage duration
-- Order class declarations as types, public, protected, private, then data.
-- Prefer 'private' over 'protected'
-
-## Function
-
-- Minimize external dependencies
-  * Pass options in the ctor instead of using theConfig
-  * Use as few other classes as possible
diff --git a/docs/CodingStyle.md b/docs/CodingStyle.md
deleted file mode 100644
index 0ff50c780..000000000
--- a/docs/CodingStyle.md
+++ /dev/null
@@ -1,82 +0,0 @@
-# Coding Standards
-
-Coding standards used here gradually evolve and propagate through 
-code reviews. Some aspects are enforced more strictly than others.
-
-## Rules
-
-These rules only apply to our own code. We can't enforce any sort of 
-style on the external repositories and libraries we include. The best
-guideline is to maintain the standards that are used in those libraries.
-
-* Tab inserts 4 spaces. No tab characters.
-* Braces are indented in the [Allman style][1].
-* Modern C++ principles. No naked ```new``` or ```delete```.
-* Line lengths limited to 80 characters. Exceptions limited to data and tables.
-
-## Guidelines
-
-If you want to do something contrary to these guidelines, understand
-why you're doing it. Think, use common sense, and consider that this
-your changes will probably need to be maintained long after you've
-moved on to other projects.
-
-* Use white space and blank lines to guide the eye and keep your intent clear.
-* Put private data members at the top of a class, and the 6 public special
-members immediately after, in the following order:
-  * Destructor
-  * Default constructor
-  * Copy constructor
-  * Copy assignment
-  * Move constructor
-  * Move assignment
-* Don't over-inline by defining large functions within the class
-declaration, not even for template classes.
-
-## Formatting
-
-The goal of source code formatting should always be to make things as easy to
-read as possible. White space is used to guide the eye so that details are not
-overlooked. Blank lines are used to separate code into "paragraphs."
-
-* Always place a space before and after all binary operators,
-  especially assignments (`operator=`).
-* The `!` operator should be preceded by a space, but not followed by one.
-* The `~` operator should be preceded by a space, but not followed by one.
-* The `++` and `--` operators should have no spaces between the operator and
-  the operand.
-* A space never appears before a comma, and always appears after a comma.
-* Don't put spaces after a parenthesis. A typical member function call might
-  look like this: `foobar (1, 2, 3);`
-* In general, leave a blank line before an `if` statement.
-* In general, leave a blank line after a closing brace `}`.
-* Do not place code on the same line as any opening or
-  closing brace.
-* Do not write `if` statements all-on-one-line. The exception to this is when
-  you've got a sequence of similar `if` statements, and are aligning them all
-  vertically to highlight their similarities.
-* In an `if-else` statement, if you surround one half of the statement with
-  braces, you also need to put braces around the other half, to match.
-* When writing a pointer type, use this spacing: `SomeObject* myObject`.
-  Technically, a more correct spacing would be `SomeObject *myObject`, but
-  it makes more sense for the asterisk to be grouped with the type name,
-  since being a pointer is part of the type, not the variable name. The only
-  time that this can lead to any problems is when you're declaring multiple
-  pointers of the same type in the same statement - which leads on to the next
-  rule:
-* When declaring multiple pointers, never do so in a single statement, e.g.
-  `SomeObject* p1, *p2;` - instead, always split them out onto separate lines
-  and write the type name again, to make it quite clear what's going on, and
-  avoid the danger of missing out any vital asterisks.
-* The previous point also applies to references, so always put the `&` next to
-  the type rather than the variable, e.g. `void foo (Thing const& thing)`. And
-  don't put a space on both sides of the `*` or `&` - always put a space after
-  it, but never before it.
-* The word `const` should be placed to the right of the thing that it modifies,
-  for consistency. For example `int const` refers to an int which is const.
-  `int const*` is a pointer to an int which is const. `int *const` is a const
-  pointer to an int.
-* Always place a space in between the template angle brackets and the type
-  name. Template code is already hard enough to read!
-
-[1]: http://en.wikipedia.org/wiki/Indent_style#Allman_style
diff --git a/docs/Docker.md b/docs/Docker.md
deleted file mode 100644
index 9f67c87ee..000000000
--- a/docs/Docker.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# `rippled` Docker Image
-
-- Some info relating to Docker containers can be found here: [../Builds/containers](../Builds/containers)
-- Images for building and testing rippled can be found here: [thejohnfreeman/rippled-docker](https://github.com/thejohnfreeman/rippled-docker/)
-  - These images do not have rippled. They have all the tools necessary to build rippled.
diff --git a/docs/Dockerfile b/docs/Dockerfile
deleted file mode 100644
index d716ca213..000000000
--- a/docs/Dockerfile
+++ /dev/null
@@ -1,32 +0,0 @@
-FROM ubuntu:16.04
-
-RUN apt -y update
-RUN apt -y upgrade
-RUN apt -y install build-essential g++ git libbz2-dev wget python-dev
-RUN apt -y install cmake flex bison graphviz graphviz-dev libicu-dev
-RUN apt -y install jarwrapper java-common
-
-RUN cd /tmp
-ENV CM_INSTALLER=cmake-3.10.0-rc3-Linux-x86_64.sh
-ENV CM_VER_DIR=/opt/local/cmake-3.10.0
-RUN cd /tmp && wget https://cmake.org/files/v3.10/$CM_INSTALLER && chmod a+x $CM_INSTALLER
-RUN mkdir -p $CM_VER_DIR
-RUN ln -s $CM_VER_DIR /opt/local/cmake
-RUN /tmp/$CM_INSTALLER --prefix=$CM_VER_DIR --exclude-subdir
-RUN rm -f /tmp/$CM_INSTALLER
-
-RUN cd /tmp && wget https://ftp.stack.nl/pub/users/dimitri/doxygen-1.8.14.src.tar.gz
-RUN cd /tmp && tar xvf doxygen-1.8.14.src.tar.gz
-RUN mkdir -p /tmp/doxygen-1.8.14/build
-RUN cd /tmp/doxygen-1.8.14/build && /opt/local/cmake/bin/cmake -G "Unix Makefiles" ..
-RUN cd /tmp/doxygen-1.8.14/build && make -j2
-RUN cd /tmp/doxygen-1.8.14/build && make install
-RUN rm -f /tmp/doxygen-1.8.14.src.tar.gz
-RUN rm -rf /tmp/doxygen-1.8.14
-
-RUN mkdir -p /opt/plantuml
-RUN wget -O /opt/plantuml/plantuml.jar http://sourceforge.net/projects/plantuml/files/plantuml.jar/download
-ENV DOXYGEN_PLANTUML_JAR_PATH=/opt/plantuml/plantuml.jar
-
-ENV DOXYGEN_OUTPUT_DIRECTORY=html
-CMD cd /opt/rippled && doxygen docs/Doxyfile
diff --git a/docs/Doxyfile b/docs/Doxyfile
deleted file mode 100644
index 48a0b5d1e..000000000
--- a/docs/Doxyfile
+++ /dev/null
@@ -1,344 +0,0 @@
-#---------------------------------------------------------------------------
-# Project related configuration options
-#---------------------------------------------------------------------------
-DOXYFILE_ENCODING      = UTF-8
-PROJECT_NAME           = "rippled"
-PROJECT_NUMBER         =
-PROJECT_BRIEF          =
-PROJECT_LOGO           =
-PROJECT_LOGO           =
-OUTPUT_DIRECTORY       = $(DOXYGEN_OUTPUT_DIRECTORY)
-CREATE_SUBDIRS         = NO
-ALLOW_UNICODE_NAMES    = NO
-OUTPUT_LANGUAGE        = English
-BRIEF_MEMBER_DESC      = YES
-REPEAT_BRIEF           = YES
-ABBREVIATE_BRIEF       =
-ALWAYS_DETAILED_SEC    = NO
-INLINE_INHERITED_MEMB  = YES
-FULL_PATH_NAMES        = NO
-STRIP_FROM_PATH        = src/
-STRIP_FROM_INC_PATH    =
-SHORT_NAMES            = NO
-JAVADOC_AUTOBRIEF      = YES
-QT_AUTOBRIEF           = NO
-MULTILINE_CPP_IS_BRIEF = NO
-INHERIT_DOCS           = YES
-SEPARATE_MEMBER_PAGES  = NO
-TAB_SIZE               = 4
-ALIASES                =
-OPTIMIZE_OUTPUT_FOR_C  = NO
-OPTIMIZE_OUTPUT_JAVA   = NO
-OPTIMIZE_FOR_FORTRAN   = NO
-OPTIMIZE_OUTPUT_VHDL   = NO
-EXTENSION_MAPPING      =
-MARKDOWN_SUPPORT       = YES
-AUTOLINK_SUPPORT       = YES
-BUILTIN_STL_SUPPORT    = YES
-CPP_CLI_SUPPORT        = NO
-SIP_SUPPORT            = NO
-IDL_PROPERTY_SUPPORT   = YES
-DISTRIBUTE_GROUP_DOC   = NO
-GROUP_NESTED_COMPOUNDS = NO
-SUBGROUPING            = YES
-INLINE_GROUPED_CLASSES = NO
-INLINE_SIMPLE_STRUCTS  = NO
-TYPEDEF_HIDES_STRUCT   = NO
-LOOKUP_CACHE_SIZE      = 0
-
-#---------------------------------------------------------------------------
-# Build related configuration options
-#---------------------------------------------------------------------------
-EXTRACT_ALL            = YES
-EXTRACT_PRIVATE        = YES
-EXTRACT_PACKAGE        = NO
-EXTRACT_STATIC         = YES
-EXTRACT_LOCAL_CLASSES  = YES
-EXTRACT_LOCAL_METHODS  = YES
-EXTRACT_ANON_NSPACES   = NO
-HIDE_UNDOC_MEMBERS     = NO
-HIDE_UNDOC_CLASSES     = NO
-HIDE_FRIEND_COMPOUNDS  = NO
-HIDE_IN_BODY_DOCS      = NO
-INTERNAL_DOCS          = NO
-CASE_SENSE_NAMES       = YES
-HIDE_SCOPE_NAMES       = NO
-HIDE_COMPOUND_REFERENCE= NO
-SHOW_INCLUDE_FILES     = NO
-SHOW_GROUPED_MEMB_INC  = NO
-FORCE_LOCAL_INCLUDES   = NO
-INLINE_INFO            = NO
-SORT_MEMBER_DOCS       = NO
-SORT_BRIEF_DOCS        = NO
-SORT_MEMBERS_CTORS_1ST = YES
-SORT_GROUP_NAMES       = NO
-SORT_BY_SCOPE_NAME     = NO
-STRICT_PROTO_MATCHING  = NO
-GENERATE_TODOLIST      = NO
-GENERATE_TESTLIST      = NO
-GENERATE_BUGLIST       = NO
-GENERATE_DEPRECATEDLIST= NO
-ENABLED_SECTIONS       =
-MAX_INITIALIZER_LINES  = 30
-SHOW_USED_FILES        = NO
-SHOW_FILES             = NO
-SHOW_NAMESPACES        = YES
-FILE_VERSION_FILTER    =
-LAYOUT_FILE            =
-CITE_BIB_FILES         =
-
-#---------------------------------------------------------------------------
-# Configuration options related to warning and progress messages
-#---------------------------------------------------------------------------
-QUIET                  = NO
-WARNINGS               = YES
-WARN_IF_UNDOCUMENTED   = YES
-WARN_IF_DOC_ERROR      = YES
-WARN_NO_PARAMDOC       = NO
-WARN_AS_ERROR          = NO
-WARN_FORMAT            = "$file:$line: $text"
-WARN_LOGFILE           =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the input files
-#---------------------------------------------------------------------------
-INPUT                  = \
-    docs \
-    src/ripple \
-    src/test \
-    src/README.md \
-    README.md \
-    RELEASENOTES.md \
-
-
-INPUT_ENCODING         = UTF-8
-FILE_PATTERNS          = *.h *.cpp *.md
-RECURSIVE              = YES
-EXCLUDE                =
-EXCLUDE_SYMLINKS       = NO
-EXCLUDE_PATTERNS       =
-EXCLUDE_SYMBOLS        =
-EXAMPLE_PATH           =
-EXAMPLE_PATTERNS       =
-EXAMPLE_RECURSIVE      = NO
-IMAGE_PATH             =  \
-     docs/images/            \
-     docs/images/consensus/  \
-     src/test/csf/     \
-
-INPUT_FILTER           =
-FILTER_PATTERNS        =
-FILTER_SOURCE_FILES    = NO
-FILTER_SOURCE_PATTERNS =
-USE_MDFILE_AS_MAINPAGE = src/README.md
-
-#---------------------------------------------------------------------------
-# Configuration options related to source browsing
-#---------------------------------------------------------------------------
-SOURCE_BROWSER         = YES
-INLINE_SOURCES         = NO
-STRIP_CODE_COMMENTS    = YES
-REFERENCED_BY_RELATION = NO
-REFERENCES_RELATION    = NO
-REFERENCES_LINK_SOURCE = YES
-SOURCE_TOOLTIPS        = YES
-USE_HTAGS              = NO
-VERBATIM_HEADERS       = YES
-CLANG_ASSISTED_PARSING = NO
-CLANG_OPTIONS          =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the alphabetical class index
-#---------------------------------------------------------------------------
-ALPHABETICAL_INDEX     = YES
-COLS_IN_ALPHA_INDEX    = 5
-IGNORE_PREFIX          =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the HTML output
-#---------------------------------------------------------------------------
-GENERATE_HTML          = YES
-HTML_OUTPUT            = html
-HTML_FILE_EXTENSION    = .html
-HTML_HEADER            =
-HTML_FOOTER            =
-HTML_STYLESHEET        =
-HTML_EXTRA_STYLESHEET  =
-HTML_EXTRA_FILES       =
-HTML_COLORSTYLE_HUE    = 220
-HTML_COLORSTYLE_SAT    = 100
-HTML_COLORSTYLE_GAMMA  = 80
-HTML_TIMESTAMP         = NO
-HTML_DYNAMIC_SECTIONS  = NO
-HTML_INDEX_NUM_ENTRIES = 100
-GENERATE_DOCSET        = NO
-DOCSET_FEEDNAME        = "Doxygen generated docs"
-DOCSET_BUNDLE_ID       = org.doxygen.Project
-DOCSET_PUBLISHER_ID    = org.doxygen.Publisher
-DOCSET_PUBLISHER_NAME  = Publisher
-GENERATE_HTMLHELP      = NO
-CHM_FILE               =
-HHC_LOCATION           =
-GENERATE_CHI           = NO
-CHM_INDEX_ENCODING     =
-BINARY_TOC             = NO
-TOC_EXPAND             = NO
-GENERATE_QHP           = NO
-QCH_FILE               =
-QHP_NAMESPACE          = org.doxygen.Project
-QHP_VIRTUAL_FOLDER     = doc
-QHP_CUST_FILTER_NAME   =
-QHP_CUST_FILTER_ATTRS  =
-QHP_SECT_FILTER_ATTRS  =
-QHG_LOCATION           =
-GENERATE_ECLIPSEHELP   = NO
-ECLIPSE_DOC_ID         = org.doxygen.Project
-DISABLE_INDEX          = NO
-GENERATE_TREEVIEW      = NO
-ENUM_VALUES_PER_LINE   = 4
-TREEVIEW_WIDTH         = 250
-EXT_LINKS_IN_WINDOW    = NO
-FORMULA_FONTSIZE       = 10
-FORMULA_TRANSPARENT    = YES
-USE_MATHJAX            = NO
-MATHJAX_FORMAT         = HTML-CSS
-MATHJAX_RELPATH        = http://cdn.mathjax.org/mathjax/latest
-MATHJAX_EXTENSIONS     =
-MATHJAX_CODEFILE       =
-SEARCHENGINE           = YES
-SERVER_BASED_SEARCH    = NO
-EXTERNAL_SEARCH        = NO
-SEARCHENGINE_URL       =
-SEARCHDATA_FILE        = searchdata.xml
-EXTERNAL_SEARCH_ID     =
-EXTRA_SEARCH_MAPPINGS  =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the LaTeX output
-#---------------------------------------------------------------------------
-GENERATE_LATEX         = NO
-LATEX_OUTPUT           = latex
-LATEX_CMD_NAME         =
-MAKEINDEX_CMD_NAME     = makeindex
-COMPACT_LATEX          = NO
-PAPER_TYPE             = a4
-EXTRA_PACKAGES         =
-LATEX_HEADER           =
-LATEX_FOOTER           =
-LATEX_EXTRA_STYLESHEET =
-LATEX_EXTRA_FILES      =
-PDF_HYPERLINKS         = YES
-USE_PDFLATEX           = YES
-LATEX_BATCHMODE        = NO
-LATEX_HIDE_INDICES     = NO
-LATEX_SOURCE_CODE      = NO
-LATEX_BIB_STYLE        = plain
-LATEX_TIMESTAMP        = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the RTF output
-#---------------------------------------------------------------------------
-GENERATE_RTF           = NO
-RTF_OUTPUT             = rtf
-COMPACT_RTF            = NO
-RTF_HYPERLINKS         = NO
-RTF_STYLESHEET_FILE    =
-RTF_EXTENSIONS_FILE    =
-RTF_SOURCE_CODE        = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the man page output
-#---------------------------------------------------------------------------
-GENERATE_MAN           = NO
-MAN_OUTPUT             = man
-MAN_EXTENSION          = .3
-MAN_SUBDIR             =
-MAN_LINKS              = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the XML output
-#---------------------------------------------------------------------------
-GENERATE_XML           = NO
-XML_OUTPUT             = xml
-XML_PROGRAMLISTING     = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to the DOCBOOK output
-#---------------------------------------------------------------------------
-GENERATE_DOCBOOK       = NO
-DOCBOOK_OUTPUT         = docbook
-DOCBOOK_PROGRAMLISTING = NO
-
-#---------------------------------------------------------------------------
-# Configuration options for the AutoGen Definitions output
-#---------------------------------------------------------------------------
-GENERATE_AUTOGEN_DEF   = NO
-GENERATE_PERLMOD       = NO
-PERLMOD_LATEX          = NO
-PERLMOD_PRETTY         = YES
-PERLMOD_MAKEVAR_PREFIX =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the preprocessor
-#---------------------------------------------------------------------------
-ENABLE_PREPROCESSING   = YES
-MACRO_EXPANSION        = YES
-EXPAND_ONLY_PREDEF     = YES
-SEARCH_INCLUDES        = YES
-INCLUDE_PATH           = $(DOXYGEN_INCLUDE_PATH)
-INCLUDE_FILE_PATTERNS  =
-PREDEFINED             = DOXYGEN \
-                         GENERATING_DOCS \
-                         _MSC_VER \
-                         NUDB_POSIX_FILE=1
-
-EXPAND_AS_DEFINED      =
-SKIP_FUNCTION_MACROS   = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to external references
-#---------------------------------------------------------------------------
-TAGFILES               = $(DOXYGEN_TAGFILES)
-GENERATE_TAGFILE       =
-ALLEXTERNALS           = NO
-EXTERNAL_GROUPS        = YES
-EXTERNAL_PAGES         = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to the dot tool
-#---------------------------------------------------------------------------
-CLASS_DIAGRAMS         = NO
-DIA_PATH               =
-HIDE_UNDOC_RELATIONS   = YES
-HAVE_DOT               = YES
-# DOT_NUM_THREADS = 0 means 1 for every processor.
-DOT_NUM_THREADS        = 0
-DOT_FONTNAME           = Helvetica
-DOT_FONTSIZE           = 10
-DOT_FONTPATH           =
-CLASS_GRAPH            = YES
-COLLABORATION_GRAPH    = YES
-GROUP_GRAPHS           = YES
-UML_LOOK               = NO
-UML_LIMIT_NUM_FIELDS   = 10
-TEMPLATE_RELATIONS     = NO
-INCLUDE_GRAPH          = YES
-INCLUDED_BY_GRAPH      = YES
-CALL_GRAPH             = NO
-CALLER_GRAPH           = NO
-GRAPHICAL_HIERARCHY    = YES
-DIRECTORY_GRAPH        = YES
-DOT_IMAGE_FORMAT       = png
-INTERACTIVE_SVG        = NO
-DOT_PATH               = $(DOXYGEN_DOT_PATH)
-DOTFILE_DIRS           =
-MSCFILE_DIRS           =
-DIAFILE_DIRS           =
-PLANTUML_JAR_PATH      = $(DOXYGEN_PLANTUML_JAR_PATH)
-PLANTUML_INCLUDE_PATH  =
-DOT_GRAPH_MAX_NODES    = 50
-MAX_DOT_GRAPH_DEPTH    = 0
-DOT_TRANSPARENT        = NO
-DOT_MULTI_TARGETS      = NO
-GENERATE_LEGEND        = YES
-DOT_CLEANUP            = YES
diff --git a/docs/HeapProfiling.md b/docs/HeapProfiling.md
deleted file mode 100644
index c8de1eb26..000000000
--- a/docs/HeapProfiling.md
+++ /dev/null
@@ -1,63 +0,0 @@
-## Heap profiling of rippled with jemalloc
-
-The jemalloc library provides a good API for doing heap analysis,
-including a mechanism to dump a description of the heap from within the
-running application via a function call. Details on how to perform this
-activity in general, as well as how to acquire the software, are available on
-the jemalloc site:
-[https://github.com/jemalloc/jemalloc/wiki/Use-Case:-Heap-Profiling](https://github.com/jemalloc/jemalloc/wiki/Use-Case:-Heap-Profiling)
-
-jemalloc is acquired separately from rippled, and is not affiliated
-with Ripple Labs. If you compile and install jemalloc from the
-source release with default options, it will install the library and header
-under `/usr/local/lib` and `/usr/local/include`, respectively. Heap
-profiling has been tested with rippled on a Linux platform. It should
-work on platforms on which both rippled and jemalloc are available.
-
-To link rippled with jemalloc, the argument
-`profile-jemalloc=` is provided after the optional target.
-The `` argument should be the same as that of the
-`--prefix` parameter passed to the jemalloc configure script when building.
-
-## Examples:
-
-Build rippled with jemalloc library under /usr/local/lib and
-header under /usr/local/include:
-
-    $ scons profile-jemalloc=/usr/local
-
-Build rippled using clang with the jemalloc library under /opt/local/lib
-and header under /opt/local/include:
-
-    $ scons clang profile-jemalloc=/opt/local
-
-----------------------
-
-## Using the jemalloc library from within the code
-
-The `profile-jemalloc` parameter enables a macro definition called
-`PROFILE_JEMALLOC`. Include the jemalloc header file as
-well as the api call(s) that you wish to make within preprocessor
-conditional groups, such as:
-
-In global scope:
-
-    #ifdef PROFILE_JEMALLOC
-    #include 
-    #endif
-
-And later, within a function scope:
-
-    #ifdef PROFILE_JEMALLOC
-    mallctl("prof.dump", NULL, NULL, NULL, 0);
-    #endif
-
-Fuller descriptions of how to acquire and use jemalloc's api to do memory
-analysis are available at the [jemalloc
-site.](http://www.canonware.com/jemalloc/)
-
-Linking against the jemalloc library will override
-the system's default `malloc()` and related functions with jemalloc's
-implementation. This is the case even if the code is not instrumented
-to use jemalloc's specific API.
-
diff --git a/docs/NodeStoreRefactoringCaseStudy.pdf b/docs/NodeStoreRefactoringCaseStudy.pdf
deleted file mode 100644
index 6cde8a2ee..000000000
Binary files a/docs/NodeStoreRefactoringCaseStudy.pdf and /dev/null differ
diff --git a/docs/README.md b/docs/README.md
deleted file mode 100644
index 55b9e30e0..000000000
--- a/docs/README.md
+++ /dev/null
@@ -1,63 +0,0 @@
-# Building documentation
-
-## Dependencies
-
-Install these dependencies:
-
-- [Doxygen](http://www.doxygen.nl): All major platforms have [official binary
-  distributions](http://www.doxygen.nl/download.html#srcbin), or you can
-  build from [source](http://www.doxygen.nl/download.html#srcbin).
-
-  - MacOS: We recommend installing via Homebrew: `brew install doxygen`.
-    The executable will be installed in `/usr/local/bin` which is already
-    in the default `PATH`.
-
-    If you use the official binary distribution, then you'll need to make
-    Doxygen available to your command line. You can do this by adding
-    a symbolic link from `/usr/local/bin` to the `doxygen` executable. For
-    example,
-
-    ```
-    $ ln -s /Applications/Doxygen.app/Contents/Resources/doxygen /usr/local/bin/doxygen
-    ```
-
-- [PlantUML](http://plantuml.com): 
-
-  1. Install a functioning Java runtime, if you don't already have one.
-  2. Download [`plantuml.jar`](http://sourceforge.net/projects/plantuml/files/plantuml.jar/download).
-
-- [Graphviz](https://www.graphviz.org):
-
-  - Linux: Install from your package manager.
-  - Windows: Use an [official installer](https://graphviz.gitlab.io/_pages/Download/Download_windows.html).
-  - MacOS: Install via Homebrew: `brew install graphviz`.
-
-
-## Docker
-
-Instead of installing the above dependencies locally, you can use the official
-build environment Docker image, which has all of them installed already.
-
-1. Install [Docker](https://docs.docker.com/engine/installation/)
-2. Pull the image:
-  ```
-  sudo docker pull rippleci/rippled-ci-builder:2944b78d22db
-  ```
-3. Run the image from the project folder:
-  ```
-  sudo docker run -v $PWD:/opt/rippled --rm rippleci/rippled-ci-builder:2944b78d22db
-  ```
-
-
-## Build
-
-There is a `docs` target in the CMake configuration.
-
-```
-mkdir build
-cd build
-cmake ..
-cmake --build . --target docs
-```
-
-The output will be in `build/docs/html`.
diff --git a/docs/consensus.md b/docs/consensus.md
deleted file mode 100644
index 1b0063663..000000000
--- a/docs/consensus.md
+++ /dev/null
@@ -1,683 +0,0 @@
-# Consensus and Validation
-
-**This section is a work in progress!!**
-
-Consensus is the task of reaching agreement within a distributed system in the
-presence of faulty or even malicious participants.  This document outlines the
-[XRP Ledger Consensus Algorithm](https://arxiv.org/abs/1802.07242)
-as implemented in [rippled](https://github.com/ripple/rippled), but
-focuses on its utility as a generic consensus algorithm independent of the
-detailed mechanics of the Ripple Consensus Ledger. Most notably, the algorithm
-does not require fully synchronous communication between all nodes in the
-network, or even a fixed network topology, but instead achieves consensus via
-collectively trusted subnetworks.
-
-## Distributed Agreement
-
-A challenge for distributed systems is reaching agreement on changes in shared
-state.  For the Ripple network, the shared state is the current ledger--account
-information, account balances, order books and other financial data.  We will
-refer to shared distributed state as a /ledger/ throughout the remainder of this
-document.
-
-
-
-As shown above, new ledgers are made by applying a set of transactions to the
-prior ledger.  For the Ripple network, transactions include payments,
-modification of account settings, updates to offers and more.
-
-In a centralized system, generating the next ledger is trivial since there is a
-single unique arbiter of which transactions to include and how to apply them to
-a ledger.  For decentralized systems, participants must resolve disagreements on
-the set of transactions to include, the order to apply those transactions, and
-even the resulting ledger after applying the transactions.  This is even more
-difficult when some participants are faulty or malicious.
-
-The Ripple network is a decentralized and **trust-full** network.  Anyone is free
-to join and participants are free to choose a subset of peers that are
-collectively trusted to not collude in an attempt to defraud the participant.
-Leveraging this network of trust, the Ripple algorithm has two main components.
-
-* *Consensus* in which network participants agree on the transactions to apply
-  to a prior ledger, based on the positions of their chosen peers.
-* *Validation* in which network participants agree on what ledger was
-  generated, based on the ledgers generated by chosen peers.
-
-These phases are continually repeated to process transactions submitted to the
-network, generating successive ledgers and giving rise to the blockchain ledger
-history depicted below.  In this diagram, time is flowing to the right, but
-links between ledgers point backward to the parent.  Also note the alternate
-Ledger 2 that was generated by some participants, but which failed validation
-and was abandoned.
-
-
-
-The remainder of this section describes the Consensus and Validation algorithms
-in more detail and is meant as a companion guide to understanding the generic
-implementation in `rippled`.  The document **does not** discuss correctness,
-fault-tolerance or liveness properties of the algorithms or the full details of
-how they integrate within `rippled` to support the Ripple Consensus Ledger.
-
-## Consensus Overview
-
-### Definitions
-
-* The *ledger* is the shared distributed state.  Each ledger has a unique ID to
-  distinguish it from all other ledgers.  During consensus, the *previous*,
-  *prior* or *last-closed* ledger is the most recent ledger seen by consensus
-  and is the basis upon which it will build the next ledger.
-* A *transaction* is an instruction for an atomic change in the ledger state.  A
-  unique ID distinguishes a transaction from other transactions.
-* A *transaction set* is a set of transactions under consideration by consensus.
-  The goal of consensus is to reach agreement on this set.  The generic
-  consensus algorithm does not rely on an ordering of transactions within the
-  set, nor does it specify how to apply a transaction set to a ledger to
-  generate a new ledger.  A unique ID distinguishes a set of transactions from
-  all other sets of transactions.
-* A *node* is one of the distributed actors running the consensus algorithm.  It
-  has a unique ID to distinguish it from all other nodes.
-* A *peer*  of a node is another node that it has chosen to follow and which it
-  believes will not collude with other chosen peers.  The choice of peers is not
-  symmetric, since participants can decide on their chosen sets independently.
-* A /position/ is the current belief of the next ledger's transaction set and
-  close time. Position can refer to the node's own position or the position of a
-  peer.
-* A *proposal* is one of a sequence of positions a node shares during consensus.
-  An initial proposal contains the starting position taken by a node before it
-  considers any peer positions.  If a node subsequently updates its position in
-  response to its peers, it will issue an updated proposal.  A proposal is
-  uniquely identified by the ID of the proposing node, the ID of the position
-  taken, the ID of the prior ledger the proposal is for, and the sequence number
-  of the proposal.
-* A *dispute* is a transaction that is either not part of a node's position or
-  not in a peer's position. During consensus, the node will add or remove
-  disputed transactions from its position based on that transaction's support
-  amongst its peers.
-
-Note that most types have an ID as a lightweight identifier of instances of that
-type.  Consensus often operates on the IDs directly since the underlying type is
-potentially expensive to share over the network.  For example, proposal's only
-contain the ID of the position of a peer.  Since many peers likely have the same
-position, this reduces the need to send the full transaction set multiple times.
-Instead, a node can request the transaction set from the network if necessary.
-
-### Overview 
-
-
-
-The diagram above is an overview of the consensus process from the perspective
-of a single participant.  Recall that during a single consensus round, a node is
-trying to agree with its peers on which transactions to apply to its prior
-ledger when generating the next ledger.  It also attempts to agree on the
-[network time when the ledger closed](#effective_close_time).  There are
-3 main phases to a consensus round:
-
-* A call to `startRound` places the node in the `Open` phase.  In this phase,
-the node is waiting for transactions to include in its open ledger.
-* At some point, the node will `Close` the open ledger and transition to the
-`Establish` phase.  In this phase, the node shares/receives peer proposals on
-which transactions should be accepted in the closed ledger.
-* At some point, the node determines it has reached consensus with its peers on
-which transactions to include. It transitions to the `Accept` phase. In this
-phase, the node works on applying the transactions to the prior ledger to
-generate a new closed ledger. Once the new ledger is completed, the node shares
-the validated ledger hash with the network and makes a call to `startRound` to
-start the cycle again for the next ledger.
-
-Throughout, a heartbeat timer calls `timerEntry` at a regular frequency to drive
-the process forward. Although the `startRound` call occurs at arbitrary times
-based on when the initial round began and the time it takes to apply
-transactions, the transitions from `Open` to `Establish` and `Establish` to
-`Accept` only occur during calls to `timerEntry`.  Similarly, transactions can
-arrive at arbitrary times, independent of the heartbeat timer. Transactions
-received after the `Open` to `Close` transition and not part of peer proposals
-won't be considered until the next consensus round.  They are represented above
-by the light green triangles.
-
-Peer proposals are issued by a node during a `timerEntry` call, but since peers
-do not synchronize `timerEntry` calls, they are received by other peers at
-arbitrary times. Peer proposals are only considered if received prior to the
-`Establish` to `Accept` transition, and only if the peer is working on the same
-prior ledger. Peer proposals received after consensus is reached will not be
-meaningful and are represented above by the circle with the X in it.  Only
-proposals from chosen peers are considered.
-
-### Effective Close Time ###         {#effective_close_time}
-    
-In addition to agreeing on a transaction set, each consensus round tries to
-agree on the time the ledger closed.  Each node calculates its own close time
-when it closes the open ledger.  This exact close time is rounded to the nearest
-multiple of the current *effective close time resolution*.  It is this
-*effective close time* that nodes seek to agree on. This allows servers to
-derive a common time for a ledger without the need for perfectly synchronized
-clocks. As depicted below, the 3 pink arrows represent exact close times from 3
-consensus nodes that round to the same effective close time given the current
-resolution. The purple arrow represents a peer whose estimate rounds to a
-different effective close time given the current resolution.
-
-
-
-The effective close time is part of the node's position and is shared with peers
-in its proposals.  Just like the position on the consensus transaction set, a
-node will update its close time position in response to its peers' effective
-close time positions.  Peers can agree to disagree on the close time, in which
-case the effective close time is taken as 1 second past the prior close.
-
-The close time resolution is itself dynamic, decreasing (coarser) resolution in
-subsequent consensus rounds if nodes are unable to reach consensus on an
-effective close time and increasing (finer) resolution if nodes consistently
-reach close time consensus.
-
-### Modes
-
-Internally, a node operates under one of the following consensus modes. Either
-of the first two modes may be chosen when a consensus round starts.
-
-* *Proposing* indicates the node is a full-fledged consensus participant.  It
-  takes on positions and sends proposals to its peers.
-* *Observing* indicates the node is a passive consensus participant.  It
-  maintains a position internally, but does not propose that position to its
-  peers. Instead, it receives peer proposals and updates its position
-  to track the majority of its peers.  This may be preferred if the node is only
-  being used to track the state of the network or during a start-up phase while
-  it is still synchronizing with the network.
-
-The other two modes are set internally during the consensus round when the node
-believes it is no longer working on the dominant ledger chain based on peer
-validations. It checks this on every call to `timerEntry`.
-
-* *Wrong Ledger* indicates the node is not working on the correct prior ledger
-  and does not have it available.  It requests that ledger from the network, but
-  continues to work towards consensus this round while waiting.  If it had been
-  *proposing*, it will send a special "bowout" proposal to its peers to indicate
-  its change in mode for the rest of this round. For the duration of the round,
-  it defers to peer positions for determining the consensus outcome as if it
-  were just *observing*.
-* *Switch Ledger* indicates that the node has acquired the correct prior ledger
-  from the network. Although it now has the correct prior ledger, the fact that
-  it had the wrong one at some point during this round means it is likely behind
-  and should defer to peer positions for determining the consensus outcome.
-
-
-
-Once either wrong ledger or switch ledger are reached, the node cannot
-return to proposing or observing until the next consensus round.  However,
-the node could change its view of the correct prior ledger, so going from
-switch ledger to wrong ledger and back again is possible.
-
-The distinction between the wrong and switched ledger modes arises because a
-ledger's unique identifier may be known by a node before the ledger itself. This
-reflects that fact that the data corresponding to a ledger may be large and take
-time to share over the network, whereas the smaller ID could be shared in a peer
-validation much more quickly. Distinguishing the two states allows the node to
-decide how best to generate the next ledger once it declares consensus.
-
-### Phases
-
-As depicted in the overview diagram, consensus is best viewed as a progression
-through 3 phases.  There are 4 public methods of the generic consensus algorithm
-that determine this progression
-
-* `startRound` begins a consensus round.
-* `timerEntry` is called at a regular frequency (`LEDGER_MIN_CLOSE`) and is the
-  only call to consensus that can change the  phase from `Open` to `Establish`
-  or `Accept`.
-* `peerProposal` is called whenever a peer proposal is received and is what
-  allows a node to update its position in a subsequent `timerEntry` call.
-* `gotTxSet` is called when a transaction set is received from the network. This
-  is typically in response to a prior request from the node to acquire the
-  transaction set corresponding to a disagreeing peer's position.
-
-The following subsections describe each consensus phase in more detail and what
-actions are taken in response to these calls.
-
-#### Open
-
-The `Open` phase is a quiescent period to allow transactions to build up in the
-node's open ledger.  The duration is a trade-off between latency and throughput.
-A shorter window reduces the latency to generating the next ledger, but also
-reduces transaction throughput due to fewer transactions accepted into the
-ledger.
-
-A call to `startRound` would forcibly begin the next consensus round, skipping
-completion of the current round.  This is not expected during normal operation.
-Calls to `peerProposal` or `gotTxSet` simply store the proposal or transaction
-set for use in the coming `Establish` phase.
-
-A call to `timerEntry` first checks that the node is working on the correct
-prior ledger. If not, it will update the mode and request the correct ledger.
-Otherwise, the node checks whether to switch to the `Establish` phase and close
-the ledger.
-
-##### Ledger Close
-
-Under normal circumstances, the open ledger period ends when one of the following
-is true
-
-* if there are transactions in the open ledger and more than `LEDGER_MIN_CLOSE`
-  have elapsed.  This is the typical behavior.
-* if there are no open transactions and a suitably longer idle interval has
-  elapsed.  This increases the opportunity to get some transaction into
-  the next ledger and avoids doing useless work closing an empty ledger.
-* if more than half the number of prior round peers have already closed or finished
-  this round. This indicates the node is falling behind and needs to catch up.
-
-
-When closing the ledger, the node takes its initial position based on the
-transactions in the open ledger and uses the current time as
-its initial close time estimate.  If in the proposing mode, the node shares its
-initial position with peers.  Now that the node has taken a position, it will
-consider any peer positions for this round that arrived earlier.  The node
-generates disputed transactions for each transaction not in common with a peer's
-position.  The node also records the vote of each peer for each disputed
-transaction.
-
-In the example below, we suppose our node has closed with transactions 1,2 and 3.  It creates disputes
-for transactions 2,3 and 4, since at least one peer position differs on each.
-
-##### disputes #####     {#disputes_image}
-
-
-
-#### Establish
-
-The establish phase is the active period of consensus in which the node
-exchanges proposals with peers in an attempt to reach agreement on the consensus
-transactions and effective close time.
-
-A call to `startRound` would forcibly begin the next consensus round, skipping
-completion of the current round.  This is not expected during normal operation.
-Calls to `peerProposal` or `gotTxSet` that reflect new positions will generate
-disputed transactions for any new disagreements and will update the peer's vote
-for all disputed transactions.
-
-A call to `timerEntry` first checks that the node is working from the correct
-prior ledger. If not, the node  will update the mode and request the correct
-ledger.  Otherwise, the node updates the node's position and considers whether
-to switch to the `Accepted` phase and declare consensus reached.  However, at
-least `LEDGER_MIN_CONSENSUS` time must have elapsed before doing either.  This
-allows peers an opportunity to take an initial position and share it.
-
-##### Update Position
-
-In order to achieve consensus, the node is looking for a transaction set that is
-supported by a super-majority of peers.  The node works towards this set by
-adding or removing disputed transactions from its position based on an
-increasing threshold for inclusion.
-
-
-
-By starting with a lower threshold, a node initially allows a wide set of
-transactions into its position. If the establish round continues and the node is
-"stuck", a higher threshold can focus on accepting transactions with the most
-support.  The constants that define the thresholds and durations at which the
-thresholds change are given by `AV_XXX_CONSENSUS_PCT` and
-`AV_XXX_CONSENSUS_TIME` respectively, where `XXX` is `INIT`,`MID`,`LATE` and
-`STUCK`.  The effective close time position is updated using the same
-thresholds.
-
-Given the [example disputes above](#disputes_image) and an initial threshold
-of 50%, our node would retain its position since transaction 1 was not in
-dispute and transactions 2 and 3 have 75% support.  Since its position did not
-change, it would not need to send a new proposal to peers.  Peer C would not
-change either. Peer A would add transaction 3 to its position and Peer B would
-remove transaction 4 from its position; both would then send an updated
-position.
-
-Conversely, if the diagram reflected a later call to =timerEntry= that occurs in
-the stuck region with a threshold of say 95%, our node would remove transactions
-2 and 3 from its candidate set and send an updated position.  Likewise, all the
-other peers would end up with only transaction 1 in their position.
-
-Lastly, if our node were not in the proposing mode, it would not include its own
-vote and just take the majority (>50%) position of its peers. In this example,
-our node would maintain its position of transactions 1, 2 and 3.
-
-##### Checking Consensus
-
-After updating its position, the node checks for supermajority agreement with
-its peers on its current position.  This agreement is of the exact transaction
-set, not just the support of individual transactions. That is, if our position
-is a subset of a peer's position, that counts as a disagreement. Also recall
-that effective close time agreement allows a supermajority of participants
-agreeing to disagree.
-
-Consensus is declared when the following 3 clauses are true:
-
-* `LEDGER_MIN_CONSENSUS` time has elapsed in the establish phase
-* At least 75% of the prior round proposers have proposed OR this establish
-  phase is `LEDGER_MIN_CONSENSUS` longer than the last round's establish phase
-* `minimumConsensusPercentage` of ourself and our peers share the same position
-
-The middle condition ensures slower peers have a chance to share positions, but
-prevents waiting too long on peers that have disconnected. Additionally, a node
-can declare that consensus has moved on if `minimumConsensusPercentage` peers
-have sent validations and moved on to the next ledger. This outcome indicates
-the node has fallen behind its peers and needs to catch up.
-
-If a node is not proposing, it does not include its own position when
-calculating the percent of agreeing participants but otherwise follows the above
-logic.
-
-##### Accepting Consensus
-
-Once consensus is reached (or moved on), the node switches to the `Accept` phase
-and signals to the implementing code that the round is complete. That code is
-responsible for using the consensus transaction set to generate the next ledger
-and calling `startRound` to begin the next round.  The implementation has total
-freedom on ordering transactions, deciding what to do if consensus moved on,
-determining whether to retry or abandon local transactions that did not make the
-consensus set and updating any internal state based on the consensus progress.
-
-#### Accept
-
-The `Accept` phase is the terminal phase of the consensus algorithm.  Calls to
-`timerEntry`, `peerProposal` and `gotTxSet` will not change the internal
-consensus state while in the accept phase.  The expectation is that the
-application specific code is working to generate the new ledger based on the
-consensus outcome. Once complete, that code should make a call to `startRound`
-to kick off the next consensus round. The `startRound` call includes the new
-prior ledger, prior ledger ID and whether the round should begin in the
-proposing or observing mode.  After setting some initial state, the phase
-transitions to `Open`.  The node will also check if the provided prior ledger
-and ID are correct, updating the mode and requesting the proper ledger from the
-network if necessary.
-
-## Consensus Type Requirements
-
-The consensus type requirements are given below as minimal implementation stubs.
-Actual implementations would augment these stubs with members appropriate for
-managing the details of transactions and ledgers within the larger application
-framework.
-
-### Transaction
-
-The transaction type `Tx` encapsulates a single transaction under consideration
-by consensus.
-
-```{.cpp}
-struct Tx
-{
-   using ID = ...;
-   ID const & id() const;
-
-   //... implementation specific
-};
-```
-
-### Transaction Set
-
-The transaction set type `TxSet` represents a set of `Tx`s that are collectively
-under consideration by consensus. A `TxSet` can be compared against other `TxSet`s
-(typically from peers) and can be modified to add or remove transactions via
-the mutable subtype.
-
-```{.cpp}
-struct TxSet
-{
-  using Tx = Tx;
-  using ID = ...;
-
-  ID const & id() const;
-
-  bool exists(Tx::ID const &) const;
-  Tx const * find(Tx::ID const &) const ;
-
-  // Return set of transactions that are not common with another set
-  // Bool in map is true if in our set, false if in other
-  std::map compare(TxSet const & other) const;
-
-  // A mutable view that allows changing transactions in the set
-  struct MutableTxSet
-  {
-      MutableTxSet(TxSet const &);
-      bool insert(Tx const &);
-      bool erase(Tx::ID const &);
-  };
-
-  // Construct from a mutable view.
-  TxSet(MutableTxSet const &);
-
-  // Alternatively, if the TxSet is itself mutable
-  // just alias MutableTxSet = TxSet
-
-  //... implementation specific
-};
-```
-
-### Ledger
-
-The `Ledger` type represents the state shared amongst the
-distributed participants.  Notice that the details of how the next ledger is
-generated from the prior ledger and the consensus accepted transaction set is
-not part of the interface.  Within the generic code, this type is primarily used
-to know that peers are working on the same tip of the ledger chain and to
-provide some basic timing data for consensus.
-
-```{.cpp}
-struct Ledger
-{
-  using ID = ...;
-
-  using Seq = //std::uint32_t?...;
-
-  ID const & id() const;
-
-  // Sequence number that is 1 more than the parent ledger's seq()
-  Seq seq() const;
-
-  // Whether the ledger's close time was a non-trivial consensus result
-  bool closeAgree() const;
-
-  // The close time resolution used in determining the close time
-  NetClock::duration closeTimeResolution() const;
-
-  // The (effective) close time, based on the closeTimeResolution
-  NetClock::time_point closeTime() const;
-
-  // The parent ledger's close time
-  NetClock::time_point parentCloseTime() const;
-
-  Json::Value getJson() const;
-
-  //... implementation specific
-};
-```
-
-### PeerProposal
-
-The `PeerProposal` type represents the signed position taken
-by a peer during consensus. The only type requirement is owning an instance of a
-generic `ConsensusProposal`.
-
-```{.cpp}
-// Represents our proposed position or a peer's proposed position
-// and is provided with the generic code
-template  class ConsensusProposal;
-
-struct PeerPosition
-{
-  ConsensusProposal<
-      NodeID_t,
-      typename Ledger::ID,
-      typename TxSet::ID> const &
-  proposal() const;
-
-  // ... implementation specific
-};
-```
-
-### Generic Consensus Interface
-
-The generic `Consensus` relies on `Adaptor` template class to implement a set
-of helper functions that plug the consensus algorithm into a specific application.
-The `Adaptor` class also defines the types above needed by the algorithm. Below
-are excerpts of the generic consensus implementation and of helper types that will
-interact with the concrete implementing class.
-
-```{.cpp}
-// Represents a transction under dispute this round
-template  class DisputedTx;
-
-// Represents how the node participates in Consensus this round
-enum class ConsensusMode { proposing, observing, wrongLedger, switchedLedger};
-
-// Measure duration of phases of consensus
-class ConsensusTimer
-{
-public:
-    std::chrono::milliseconds read() const;
-    // details omitted ...
-};
-
-// Initial ledger close times, not rounded by closeTimeResolution
-// Used to gauge degree of synchronization between a node and its peers
-struct ConsensusCloseTimes
-{
-    std::map peers;
-    NetClock::time_point self;
-};
-
-// Encapsulates the result of consensus.
-template 
-struct ConsensusResult
-{
-    //! The set of transactions consensus agrees go in the ledger
-    Adaptor::TxSet_t set;
-
-    //! Our proposed position on transactions/close time
-    ConsensusProposal<...> position;
-
-    //! Transactions which are under dispute with our peers
-    hash_map> disputes;
-
-    // Set of TxSet ids we have already compared/created disputes
-    hash_set compares;
-
-    // Measures the duration of the establish phase for this consensus round
-    ConsensusTimer roundTime;
-
-    // Indicates state in which consensus ended.  Once in the accept phase
-    // will be either Yes or MovedOn
-    ConsensusState state = ConsensusState::No;
-};
-
-template 
-class Consensus
-{
-public:
-    Consensus(clock_type, Adaptor &, beast::journal);
-
-    // Kick-off the next round of consensus.
-    void startRound(
-        NetClock::time_point const& now,
-        typename Ledger_t::ID const& prevLedgerID,
-        Ledger_t const& prevLedger,
-        bool proposing);
-
-    // Call periodically to drive consensus forward.
-    void timerEntry(NetClock::time_point const& now);
-
-    // A peer has proposed a new position, adjust our tracking.  Return true if the proposal
-    // was used.
-    bool peerProposal(NetClock::time_point const& now, Proposal_t const& newProposal);
-
-    // Process a transaction set acquired from the network
-    void gotTxSet(NetClock::time_point const& now, TxSet_t const& txSet);
-
-    // ... details
-};
-```
-
-### Adapting Generic Consensus
-
-The stub below shows the set of callback/helper functions required in the implementing class.
-
-```{.cpp}
-struct Adaptor
-{
-    using Ledger_t = Ledger;
-    using TxSet_t = TxSet;
-    using PeerProposal_t = PeerProposal;
-    using NodeID_t = ...; // Integer-like std::uint32_t to uniquely identify a node
-
-
-    // Attempt to acquire a specific ledger from the network.
-    boost::optional acquireLedger(Ledger::ID const & ledgerID);
-
-    // Acquire the transaction set associated with a proposed position.
-    boost::optional acquireTxSet(TxSet::ID const & setID);
-
-    // Whether any transactions are in the open ledger
-    bool hasOpenTransactions() const;
-
-    // Number of proposers that have validated the given ledger
-    std::size_t proposersValidated(Ledger::ID const & prevLedger) const;
-
-    // Number of proposers that have validated a ledger descended from the
-    // given ledger
-    std::size_t proposersFinished(Ledger::ID const & prevLedger) const;
-
-    // Return the ID of the last closed (and validated) ledger that the
-    // application thinks consensus should use as the prior ledger.
-    Ledger::ID getPrevLedger(Ledger::ID const & prevLedgerID,
-                    Ledger const & prevLedger,
-                    ConsensusMode mode);
-
-    // Called when consensus operating mode changes
-    void onModeChange(ConsensuMode before, ConsensusMode after);
-    
-    // Called when ledger closes.  Implementation should generate an initial Result
-    // with position based on the current open ledger's transactions.
-    ConsensusResult onClose(Ledger const &, Ledger const & prev, ConsensusMode mode);
-
-    // Called when ledger is accepted by consensus
-    void onAccept(ConsensusResult const & result,
-      RCLCxLedger const & prevLedger,
-      NetClock::duration closeResolution,
-      ConsensusCloseTimes const & rawCloseTimes,
-      ConsensusMode const & mode);
-
-    // Propose the position to peers.
-    void propose(ConsensusProposal<...> const & pos);
-
-    // Share a received peer proposal with other peers.
-    void share(PeerPosition_t const & pos);
-
-    // Share a disputed transaction with peers
-    void share(TxSet::Tx const & tx);
-
-    // Share given transaction set with peers
-    void share(TxSet const &s);
-
-    //... implementation specific
-};
-```
-
-The implementing class hides many details of the peer communication
-model from the generic code.
-
-* The `share` member functions are responsible for sharing the given type with a
-  node's peers, but are agnostic to the mechanism. Ideally, messages are delivered
-  faster than `LEDGER_GRANULARITY`. 
-* The generic code does not specify how transactions are submitted by clients,
-  propagated through the network or stored in the open ledger. Indeed, the open
-  ledger is only conceptual from the perspective of the generic code---the
-  initial position and transaction set are opaquely generated in a
-  `Consensus::Result` instance returned from the `onClose` callback.
-* The calls to `acquireLedger` and `acquireTxSet` only have non-trivial return
-  if the ledger or transaction set of interest is available.  The implementing
-  class is free to block while acquiring, or return the empty option while
-  servicing the request asynchronously.  Due to legacy reasons, the two calls
-  are not symmetric. `acquireTxSet` requires the host application to call
-  `gotTxSet` when an asynchronous `acquire` completes. Conversely,
-  `acquireLedger` will be called again later by the consensus code if it still
-  desires the ledger with the hope that the asynchronous acquisition is
-  complete.
-
-
-## Validation
-
-Coming Soon!
-
-
diff --git a/docs/images/consensus/EffCloseTime.png b/docs/images/consensus/EffCloseTime.png
deleted file mode 100644
index 922320de5..000000000
Binary files a/docs/images/consensus/EffCloseTime.png and /dev/null differ
diff --git a/docs/images/consensus/block_chain.png b/docs/images/consensus/block_chain.png
deleted file mode 100644
index 0c5dfee75..000000000
Binary files a/docs/images/consensus/block_chain.png and /dev/null differ
diff --git a/docs/images/consensus/consensus_modes.png b/docs/images/consensus/consensus_modes.png
deleted file mode 100644
index e575631da..000000000
Binary files a/docs/images/consensus/consensus_modes.png and /dev/null differ
diff --git a/docs/images/consensus/consensus_overview.png b/docs/images/consensus/consensus_overview.png
deleted file mode 100644
index a5b4a0049..000000000
Binary files a/docs/images/consensus/consensus_overview.png and /dev/null differ
diff --git a/docs/images/consensus/disputes.png b/docs/images/consensus/disputes.png
deleted file mode 100644
index b97b97356..000000000
Binary files a/docs/images/consensus/disputes.png and /dev/null differ
diff --git a/docs/images/consensus/ledger_chain.png b/docs/images/consensus/ledger_chain.png
deleted file mode 100644
index 05981f08f..000000000
Binary files a/docs/images/consensus/ledger_chain.png and /dev/null differ
diff --git a/docs/images/consensus/threshold.png b/docs/images/consensus/threshold.png
deleted file mode 100644
index 7a2ff12e5..000000000
Binary files a/docs/images/consensus/threshold.png and /dev/null differ
diff --git a/docs/images/xrp-text-mark-black-small@2x.png b/docs/images/xrp-text-mark-black-small@2x.png
deleted file mode 100644
index f16a20a15..000000000
Binary files a/docs/images/xrp-text-mark-black-small@2x.png and /dev/null differ
diff --git a/docs/sample_chart.doc b/docs/sample_chart.doc
deleted file mode 100644
index 631c0554b..000000000
--- a/docs/sample_chart.doc
+++ /dev/null
@@ -1,24 +0,0 @@
-/*!
-    \page somestatechart Example state diagram
-
-    \startuml SomeState "my state diagram"
-    scale 600 width
-
-    [*] -> State1
-    State1 --> State2 : Succeeded
-    State1 --> [*] : Aborted
-    State2 --> State3 : Succeeded
-    State2 --> [*] : Aborted
-    state State3 {
-      state "Accumulate Enough Data\nLong State Name" as long1
-      long1 : Just a test
-      [*] --> long1
-      long1 --> long1 : New Data
-      long1 --> ProcessData : Enough Data
-    }
-    State3 --> State3 : Failed
-    State3 --> [*] : Succeeded / Save Result
-    State3 --> [*] : Aborted
-
-    \enduml
-*/
diff --git a/external/snappy/conandata.yml b/external/snappy/conandata.yml
deleted file mode 100644
index 6c12aed54..000000000
--- a/external/snappy/conandata.yml
+++ /dev/null
@@ -1,16 +0,0 @@
-sources:
-  "1.1.9":
-    url: "https://github.com/google/snappy/archive/1.1.9.tar.gz"
-    sha256: "75c1fbb3d618dd3a0483bff0e26d0a92b495bbe5059c8b4f1c962b478b6e06e7"
-  "1.1.8":
-    url: "https://github.com/google/snappy/archive/1.1.8.tar.gz"
-    sha256: "16b677f07832a612b0836178db7f374e414f94657c138e6993cbfc5dcc58651f"
-  "1.1.7":
-    url: "https://github.com/google/snappy/archive/1.1.7.tar.gz"
-    sha256: "3dfa02e873ff51a11ee02b9ca391807f0c8ea0529a4924afa645fbf97163f9d4"
-patches:
-  "1.1.9":
-    - patch_file: "patches/1.1.9-0001-fix-inlining-failure.patch"
-    - patch_file: "patches/1.1.9-0002-no-Werror.patch"
-    - patch_file: "patches/1.1.9-0003-fix-clobber-list-older-llvm.patch"
-    - patch_file: "patches/1.1.9-0004-rtti-by-default.patch"
diff --git a/external/snappy/conanfile.py b/external/snappy/conanfile.py
deleted file mode 100644
index 23558639f..000000000
--- a/external/snappy/conanfile.py
+++ /dev/null
@@ -1,89 +0,0 @@
-from conan import ConanFile
-from conan.tools.build import check_min_cppstd
-from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
-from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, rmdir
-from conan.tools.scm import Version
-import os
-
-required_conan_version = ">=1.54.0"
-
-
-class SnappyConan(ConanFile):
-    name = "snappy"
-    description = "A fast compressor/decompressor"
-    topics = ("google", "compressor", "decompressor")
-    url = "https://github.com/conan-io/conan-center-index"
-    homepage = "https://github.com/google/snappy"
-    license = "BSD-3-Clause"
-
-    package_type = "library"
-    settings = "os", "arch", "compiler", "build_type"
-    options = {
-        "shared": [True, False],
-        "fPIC": [True, False],
-    }
-    default_options = {
-        "shared": False,
-        "fPIC": True,
-    }
-
-    def export_sources(self):
-        export_conandata_patches(self)
-
-    def config_options(self):
-        if self.settings.os == 'Windows':
-            del self.options.fPIC
-
-    def configure(self):
-        if self.options.shared:
-            self.options.rm_safe("fPIC")
-
-    def layout(self):
-        cmake_layout(self, src_folder="src")
-
-    def validate(self):
-        if self.settings.compiler.get_safe("cppstd"):
-            check_min_cppstd(self, 11)
-
-    def source(self):
-        get(self, **self.conan_data["sources"][self.version], strip_root=True)
-
-    def generate(self):
-        tc = CMakeToolchain(self)
-        tc.variables["SNAPPY_BUILD_TESTS"] = False
-        if Version(self.version) >= "1.1.8":
-            tc.variables["SNAPPY_FUZZING_BUILD"] = False
-            tc.variables["SNAPPY_REQUIRE_AVX"] = False
-            tc.variables["SNAPPY_REQUIRE_AVX2"] = False
-            tc.variables["SNAPPY_INSTALL"] = True
-        if Version(self.version) >= "1.1.9":
-            tc.variables["SNAPPY_BUILD_BENCHMARKS"] = False
-        tc.generate()
-
-    def build(self):
-        apply_conandata_patches(self)
-        cmake = CMake(self)
-        cmake.configure()
-        cmake.build()
-
-    def package(self):
-        copy(self, "COPYING", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
-        cmake = CMake(self)
-        cmake.install()
-        rmdir(self, os.path.join(self.package_folder, "lib", "cmake"))
-
-    def package_info(self):
-        self.cpp_info.set_property("cmake_file_name", "Snappy")
-        self.cpp_info.set_property("cmake_target_name", "Snappy::snappy")
-        # TODO: back to global scope in conan v2 once cmake_find_package* generators removed
-        self.cpp_info.components["snappylib"].libs = ["snappy"]
-        if not self.options.shared:
-            if self.settings.os in ["Linux", "FreeBSD"]:
-                self.cpp_info.components["snappylib"].system_libs.append("m")
-
-        # TODO: to remove in conan v2 once cmake_find_package* generators removed
-        self.cpp_info.names["cmake_find_package"] = "Snappy"
-        self.cpp_info.names["cmake_find_package_multi"] = "Snappy"
-        self.cpp_info.components["snappylib"].names["cmake_find_package"] = "snappy"
-        self.cpp_info.components["snappylib"].names["cmake_find_package_multi"] = "snappy"
-        self.cpp_info.components["snappylib"].set_property("cmake_target_name", "Snappy::snappy")
diff --git a/external/snappy/patches/1.1.9-0001-fix-inlining-failure.patch b/external/snappy/patches/1.1.9-0001-fix-inlining-failure.patch
deleted file mode 100644
index cdc119c0d..000000000
--- a/external/snappy/patches/1.1.9-0001-fix-inlining-failure.patch
+++ /dev/null
@@ -1,14 +0,0 @@
-Fixes the following error:
-error: inlining failed in call to ‘always_inline’ ‘size_t snappy::AdvanceToNextTag(const uint8_t**, size_t*)’: function body can be overwritten at link time
-
---- snappy-stubs-internal.h
-+++ snappy-stubs-internal.h
-@@ -100,7 +100,7 @@
- 
- // Inlining hints.
- #ifdef HAVE_ATTRIBUTE_ALWAYS_INLINE
--#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline))
-+#define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
- #else
- #define SNAPPY_ATTRIBUTE_ALWAYS_INLINE
- #endif
diff --git a/external/snappy/patches/1.1.9-0002-no-Werror.patch b/external/snappy/patches/1.1.9-0002-no-Werror.patch
deleted file mode 100644
index d86e4e0a9..000000000
--- a/external/snappy/patches/1.1.9-0002-no-Werror.patch
+++ /dev/null
@@ -1,12 +0,0 @@
---- CMakeLists.txt
-+++ CMakeLists.txt
-@@ -69,7 +69,7 @@
--  # Use -Werror for clang only.
-+if(0)
-   if(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
-     if(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
-       set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror")
-     endif(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
-   endif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
--
-+endif()
diff --git a/external/snappy/patches/1.1.9-0003-fix-clobber-list-older-llvm.patch b/external/snappy/patches/1.1.9-0003-fix-clobber-list-older-llvm.patch
deleted file mode 100644
index 84bc674fd..000000000
--- a/external/snappy/patches/1.1.9-0003-fix-clobber-list-older-llvm.patch
+++ /dev/null
@@ -1,12 +0,0 @@
-asm clobbers do not work for clang < 9 and apple-clang < 11 (found by SpaceIm)
---- snappy.cc
-+++ snappy.cc
-@@ -1026,7 +1026,7 @@
-   size_t literal_len = *tag >> 2;
-   size_t tag_type = *tag;
-   bool is_literal;
--#if defined(__GNUC__) && defined(__x86_64__)
-+#if defined(__GNUC__) && defined(__x86_64__) && ( (!defined(__clang__) && !defined(__APPLE__)) || (!defined(__APPLE__) && defined(__clang__) && (__clang_major__ >= 9)) || (defined(__APPLE__) && defined(__clang__) && (__clang_major__ > 11)) )
-   // TODO clang misses the fact that the (c & 3) already correctly
-   // sets the zero flag.
-   asm("and $3, %k[tag_type]\n\t"
diff --git a/external/snappy/patches/1.1.9-0004-rtti-by-default.patch b/external/snappy/patches/1.1.9-0004-rtti-by-default.patch
deleted file mode 100644
index c353a489d..000000000
--- a/external/snappy/patches/1.1.9-0004-rtti-by-default.patch
+++ /dev/null
@@ -1,20 +0,0 @@
---- a/CMakeLists.txt
-+++ b/CMakeLists.txt
-@@ -53,8 +53,6 @@ if(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
-   add_definitions(-D_HAS_EXCEPTIONS=0)
- 
-   # Disable RTTI.
--  string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
--  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
- else(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
-   # Use -Wall for clang and gcc.
-   if(NOT CMAKE_CXX_FLAGS MATCHES "-Wall")
-@@ -78,8 +76,6 @@ endif()
-   set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions")
- 
-   # Disable RTTI.
--  string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
--  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
- endif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
- 
- # BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make
diff --git a/hook/LaunchHooks.md b/hook/LaunchHooks.md
deleted file mode 100644
index 0df9a5544..000000000
--- a/hook/LaunchHooks.md
+++ /dev/null
@@ -1,68 +0,0 @@
-# Xahau Launch Hooks
-
-## Savings Hook
-When it comes to managing money, it is a common practice to maintain separate spending and savings accounts.
-Suppose you receive an income or salary to your on-ledger account several times a month. Each time you receive funds above a certain threshold you may wish to move a predefined percentage to a savings account where you will not accidentally spend them. The savings hook does exactly this.
-### Hook Parameters
-  1. Account to install on
-  2. Account to send savings to
-  3. The percentage to send
-  4. The threshold at which it is activated
-  5. Whether it applies to incoming payments only, outgoing payments only or both.
-### xApp Features
-  1. Display the send-to account
-  2. Display the percentage
-  3. Display the total amount sent
-  4. Display the conditions of sending (threshold + incoming/outgoing payments)
-  5. Allow the hook to be uninstalled
-
-## Firewall Hook
-The ledger is a messy place full of unwanted transactions and spam. To avoid being spammed with low value transactions containing unsolicitied memos you may install a Firewall hook on your account.
-### Hook Parameters
- 1. Types of transactions to allow into and out of your account (Payment, Escrow, PayChannel) etc.
- 2. Allow a minimum number of drops for an incoming txn to be allowed.
- 3. Allow a minimum amount to be specified for each of the trustline assets on the account as well.
- 4. Allow any txn with a memo larger than X bytes to be blocked regardless of other rules.
-### xApp Features
- 1. Display the current settings of the hook. Allow the settings to be changed.
- 2. Allow the hook to be uninstalled.
-
-## Blocklist Hook
-Filter outgoing and incoming payments against a known list of scam accounts maintained by a third party. This acts as a guard against accidentally sending to a scam, or being sent tainted funds by a scammer.
-### Hook Parameters
- 1. The blocklist (account) to listen to.
-### xApp Features
- 1. Number of times a transaction was blocked.
- 2. The current blocklist (account) being listened to.
- 3. Allow the hook to be uninstalled.
-
-## Direct Debit Hook
-Allow trusted third parties to pull funds from your account up to a limit you set. For example your power company can bill you and your account can automatically pay that bill.
-### Hook Parameters
- 1. One or more accounts to provide direct deposit authorization to.
- 2. A currency and a limit for each of these.
-## xApp Features
- 1. See who you've authorized.
- 2. See how much they're authorized for.
- 3. See how much they've drawn down this month.
- 4. Allow authorization to be removed.
- 5. Allow authorization limit to be changed.
- 6. Allow additional authorizations to be created.
- 7. Allow the hook to be uninstalled.
- 8. Show a list of recent direct debit transactions.
-
-
-## High-Value Payment Hook
-When sending high value transactions out of your account, require first a notification that a high valued payment will be made, followed by a time delay, followed by the high value transaction itself. This prevents accidental high value sends, adding an additional layer of security to your account.
-### Hook Parameters
- 1. Select currencies for which the hook will act.
- 2. Select the thresholds for which the hook will be triggered.
-### xApp Features
- 1. See current pending outgoing high value transactions.
- 2. State that the hook is active and for which currencies and thresholds.
- 3. Allow the hook to be uninstalled.
- 4. If installed, and a high value transaction is made from Xumm, it is redirected into the xApp.
- 5. The xApp then generates a notification transaction (ttInvoke) which is sent to the hook.
- 6. The xApp will then remind the user with an event at a later time that the transaction proper still needs to be sent.
- 7. Sending the transaction proper again will result in successful send.
-
diff --git a/hook/definitions.json b/hook/definitions.json
deleted file mode 100644
index 64e294019..000000000
--- a/hook/definitions.json
+++ /dev/null
@@ -1,2473 +0,0 @@
-{
-  "TYPES": {
-    "Done": -1,
-    "Unknown": -2,
-    "NotPresent": 0,
-    "UInt16": 1,
-    "UInt32": 2,
-    "UInt64": 3,
-    "Hash128": 4,
-    "Hash256": 5,
-    "Amount": 6,
-    "Blob": 7,
-    "AccountID": 8,
-    "STObject": 14,
-    "STArray": 15,
-    "UInt8": 16,
-    "Hash160": 17,
-    "PathSet": 18,
-    "Vector256": 19,
-    "UInt96": 20,
-    "UInt192": 21,
-    "UInt384": 22,
-    "UInt512": 23,
-    "Transaction": 10001,
-    "LedgerEntry": 10002,
-    "Validation": 10003,
-    "Metadata": 10004
-  },
-  "LEDGER_ENTRY_TYPES": {
-    "Invalid": -1,
-    "AccountRoot": 97,
-    "DirectoryNode": 100,
-    "RippleState": 114,
-    "Ticket": 84,
-    "SignerList": 83,
-    "Offer": 111,
-    "LedgerHashes": 104,
-    "Amendments": 102,
-    "FeeSettings": 115,
-    "Escrow": 117,
-    "PayChannel": 120,
-    "Check": 67,
-    "DepositPreauth": 112,
-    "NegativeUNL": 78,
-    "NFTokenPage": 80,
-    "NFTokenOffer": 55,
-    "Any": -3,
-    "Child": -2,
-    "Nickname": 110,
-    "Contract": 99,
-    "GeneratorMap": 103,
-    "Hook": 72,
-    "HookState": 118,
-    "HookDefinition": 68,
-    "EmittedTxn": 69
-  },
-  "FIELDS": [
-    [
-      "Generic",
-      {
-        "nth": 0,
-        "isVLEncoded": false,
-        "isSerialized": false,
-        "isSigningField": false,
-        "type": "Unknown"
-      }
-    ],
-    [
-      "Invalid",
-      {
-        "nth": -1,
-        "isVLEncoded": false,
-        "isSerialized": false,
-        "isSigningField": false,
-        "type": "Unknown"
-      }
-    ],
-    [
-      "ObjectEndMarker",
-      {
-        "nth": 1,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "ArrayEndMarker",
-      {
-        "nth": 1,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STArray"
-      }
-    ],
-    [
-      "hash",
-      {
-        "nth": 257,
-        "isVLEncoded": false,
-        "isSerialized": false,
-        "isSigningField": false,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "index",
-      {
-        "nth": 258,
-        "isVLEncoded": false,
-        "isSerialized": false,
-        "isSigningField": false,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "taker_gets_funded",
-      {
-        "nth": 258,
-        "isVLEncoded": false,
-        "isSerialized": false,
-        "isSigningField": false,
-        "type": "Amount"
-      }
-    ],
-    [
-      "taker_pays_funded",
-      {
-        "nth": 259,
-        "isVLEncoded": false,
-        "isSerialized": false,
-        "isSigningField": false,
-        "type": "Amount"
-      }
-    ],
-    [
-      "LedgerEntry",
-      {
-        "nth": 1,
-        "isVLEncoded": false,
-        "isSerialized": false,
-        "isSigningField": true,
-        "type": "LedgerEntry"
-      }
-    ],
-    [
-      "Transaction",
-      {
-        "nth": 1,
-        "isVLEncoded": false,
-        "isSerialized": false,
-        "isSigningField": true,
-        "type": "Transaction"
-      }
-    ],
-    [
-      "Validation",
-      {
-        "nth": 1,
-        "isVLEncoded": false,
-        "isSerialized": false,
-        "isSigningField": true,
-        "type": "Validation"
-      }
-    ],
-    [
-      "Metadata",
-      {
-        "nth": 1,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Metadata"
-      }
-    ],
-    [
-      "CloseResolution",
-      {
-        "nth": 1,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt8"
-      }
-    ],
-    [
-      "Method",
-      {
-        "nth": 2,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt8"
-      }
-    ],
-    [
-      "TransactionResult",
-      {
-        "nth": 3,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt8"
-      }
-    ],
-    [
-      "TickSize",
-      {
-        "nth": 16,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt8"
-      }
-    ],
-    [
-      "UNLModifyDisabling",
-      {
-        "nth": 17,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt8"
-      }
-    ],
-    [
-      "HookResult",
-      {
-        "nth": 18,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt8"
-      }
-    ],
-    [
-      "LedgerEntryType",
-      {
-        "nth": 1,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt16"
-      }
-    ],
-    [
-      "TransactionType",
-      {
-        "nth": 2,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt16"
-      }
-    ],
-    [
-      "SignerWeight",
-      {
-        "nth": 3,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt16"
-      }
-    ],
-    [
-      "TransferFee",
-      {
-        "nth": 4,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt16"
-      }
-    ],
-    [
-      "Version",
-      {
-        "nth": 16,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt16"
-      }
-    ],
-    [
-      "HookStateChangeCount",
-      {
-        "nth": 17,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt16"
-      }
-    ],
-    [
-      "HookEmitCount",
-      {
-        "nth": 18,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt16"
-      }
-    ],
-    [
-      "HookExecutionIndex",
-      {
-        "nth": 19,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt16"
-      }
-    ],
-    [
-      "HookApiVersion",
-      {
-        "nth": 20,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt16"
-      }
-    ],
-    [
-      "NetworkID",
-      {
-        "nth": 1,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "Flags",
-      {
-        "nth": 2,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "SourceTag",
-      {
-        "nth": 3,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "Sequence",
-      {
-        "nth": 4,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "PreviousTxnLgrSeq",
-      {
-        "nth": 5,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "LedgerSequence",
-      {
-        "nth": 6,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "CloseTime",
-      {
-        "nth": 7,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "ParentCloseTime",
-      {
-        "nth": 8,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "SigningTime",
-      {
-        "nth": 9,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "Expiration",
-      {
-        "nth": 10,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "TransferRate",
-      {
-        "nth": 11,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "WalletSize",
-      {
-        "nth": 12,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "OwnerCount",
-      {
-        "nth": 13,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "DestinationTag",
-      {
-        "nth": 14,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "HighQualityIn",
-      {
-        "nth": 16,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "HighQualityOut",
-      {
-        "nth": 17,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "LowQualityIn",
-      {
-        "nth": 18,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "LowQualityOut",
-      {
-        "nth": 19,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "QualityIn",
-      {
-        "nth": 20,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "QualityOut",
-      {
-        "nth": 21,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "StampEscrow",
-      {
-        "nth": 22,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "BondAmount",
-      {
-        "nth": 23,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "LoadFee",
-      {
-        "nth": 24,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "OfferSequence",
-      {
-        "nth": 25,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "FirstLedgerSequence",
-      {
-        "nth": 26,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "LastLedgerSequence",
-      {
-        "nth": 27,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "TransactionIndex",
-      {
-        "nth": 28,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "OperationLimit",
-      {
-        "nth": 29,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "ReferenceFeeUnits",
-      {
-        "nth": 30,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "ReserveBase",
-      {
-        "nth": 31,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "ReserveIncrement",
-      {
-        "nth": 32,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "SetFlag",
-      {
-        "nth": 33,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "ClearFlag",
-      {
-        "nth": 34,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "SignerQuorum",
-      {
-        "nth": 35,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "CancelAfter",
-      {
-        "nth": 36,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "FinishAfter",
-      {
-        "nth": 37,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "SignerListID",
-      {
-        "nth": 38,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "SettleDelay",
-      {
-        "nth": 39,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "TicketCount",
-      {
-        "nth": 40,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "TicketSequence",
-      {
-        "nth": 41,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "NFTokenTaxon",
-      {
-        "nth": 42,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "MintedNFTokens",
-      {
-        "nth": 43,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "BurnedNFTokens",
-      {
-        "nth": 44,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "HookStateCount",
-      {
-        "nth": 45,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "EmitGeneration",
-      {
-        "nth": 46,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "LockCount",
-      {
-        "nth": 47,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "RewardTime",
-      {
-        "nth": 98,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "RewardLgrFirst",
-      {
-        "nth": 99,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "RewardLgrLast",
-      {
-        "nth": 100,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt32"
-      }
-    ],
-    [
-      "IndexNext",
-      {
-        "nth": 1,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "IndexPrevious",
-      {
-        "nth": 2,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "BookNode",
-      {
-        "nth": 3,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "OwnerNode",
-      {
-        "nth": 4,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "BaseFee",
-      {
-        "nth": 5,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "ExchangeRate",
-      {
-        "nth": 6,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "LowNode",
-      {
-        "nth": 7,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "HighNode",
-      {
-        "nth": 8,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "DestinationNode",
-      {
-        "nth": 9,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "Cookie",
-      {
-        "nth": 10,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "ServerVersion",
-      {
-        "nth": 11,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "NFTokenOfferNode",
-      {
-        "nth": 12,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "EmitBurden",
-      {
-        "nth": 13,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "HookInstructionCount",
-      {
-        "nth": 17,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "HookReturnCode",
-      {
-        "nth": 18,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "ReferenceCount",
-      {
-        "nth": 19,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "RewardAccumulator",
-      {
-        "nth": 100,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "UInt64"
-      }
-    ],
-    [
-      "EmailHash",
-      {
-        "nth": 1,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash128"
-      }
-    ],
-    [
-      "TakerPaysCurrency",
-      {
-        "nth": 1,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash160"
-      }
-    ],
-    [
-      "TakerPaysIssuer",
-      {
-        "nth": 2,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash160"
-      }
-    ],
-    [
-      "TakerGetsCurrency",
-      {
-        "nth": 3,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash160"
-      }
-    ],
-    [
-      "TakerGetsIssuer",
-      {
-        "nth": 4,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash160"
-      }
-    ],
-    [
-      "LedgerHash",
-      {
-        "nth": 1,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "ParentHash",
-      {
-        "nth": 2,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "TransactionHash",
-      {
-        "nth": 3,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "AccountHash",
-      {
-        "nth": 4,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "PreviousTxnID",
-      {
-        "nth": 5,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "LedgerIndex",
-      {
-        "nth": 6,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "WalletLocator",
-      {
-        "nth": 7,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "RootIndex",
-      {
-        "nth": 8,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "AccountTxnID",
-      {
-        "nth": 9,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "NFTokenID",
-      {
-        "nth": 10,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "EmitParentTxnID",
-      {
-        "nth": 11,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "EmitNonce",
-      {
-        "nth": 12,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "EmitHookHash",
-      {
-        "nth": 13,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "BookDirectory",
-      {
-        "nth": 16,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "InvoiceID",
-      {
-        "nth": 17,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "Nickname",
-      {
-        "nth": 18,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "Amendment",
-      {
-        "nth": 19,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "HookOn",
-      {
-        "nth": 20,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "Digest",
-      {
-        "nth": 21,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "Channel",
-      {
-        "nth": 22,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "ConsensusHash",
-      {
-        "nth": 23,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "CheckID",
-      {
-        "nth": 24,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "ValidatedHash",
-      {
-        "nth": 25,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "PreviousPageMin",
-      {
-        "nth": 26,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "NextPageMin",
-      {
-        "nth": 27,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "NFTokenBuyOffer",
-      {
-        "nth": 28,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "NFTokenSellOffer",
-      {
-        "nth": 29,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "HookStateKey",
-      {
-        "nth": 30,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "HookHash",
-      {
-        "nth": 31,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "HookNamespace",
-      {
-        "nth": 32,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "HookSetTxnID",
-      {
-        "nth": 33,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "OfferID",
-      {
-        "nth": 34,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "EscrowID",
-      {
-        "nth": 35,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Hash256"
-      }
-    ],
-    [
-      "Amount",
-      {
-        "nth": 1,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "Balance",
-      {
-        "nth": 2,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "LimitAmount",
-      {
-        "nth": 3,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "TakerPays",
-      {
-        "nth": 4,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "TakerGets",
-      {
-        "nth": 5,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "LowLimit",
-      {
-        "nth": 6,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "HighLimit",
-      {
-        "nth": 7,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "Fee",
-      {
-        "nth": 8,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "SendMax",
-      {
-        "nth": 9,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "DeliverMin",
-      {
-        "nth": 10,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "MinimumOffer",
-      {
-        "nth": 16,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "RippleEscrow",
-      {
-        "nth": 17,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "DeliveredAmount",
-      {
-        "nth": 18,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "NFTokenBrokerFee",
-      {
-        "nth": 19,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "HookCallbackFee",
-      {
-        "nth": 20,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "LockedBalance",
-      {
-        "nth": 21,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Amount"
-      }
-    ],
-    [
-      "PublicKey",
-      {
-        "nth": 1,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "MessageKey",
-      {
-        "nth": 2,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "SigningPubKey",
-      {
-        "nth": 3,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "TxnSignature",
-      {
-        "nth": 4,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": false,
-        "type": "Blob"
-      }
-    ],
-    [
-      "URI",
-      {
-        "nth": 5,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "Signature",
-      {
-        "nth": 6,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": false,
-        "type": "Blob"
-      }
-    ],
-    [
-      "Domain",
-      {
-        "nth": 7,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "FundCode",
-      {
-        "nth": 8,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "RemoveCode",
-      {
-        "nth": 9,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "ExpireCode",
-      {
-        "nth": 10,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "CreateCode",
-      {
-        "nth": 11,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "MemoType",
-      {
-        "nth": 12,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "MemoData",
-      {
-        "nth": 13,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "MemoFormat",
-      {
-        "nth": 14,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "Fulfillment",
-      {
-        "nth": 16,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "Condition",
-      {
-        "nth": 17,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "MasterSignature",
-      {
-        "nth": 18,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": false,
-        "type": "Blob"
-      }
-    ],
-    [
-      "UNLModifyValidator",
-      {
-        "nth": 19,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "ValidatorToDisable",
-      {
-        "nth": 20,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "ValidatorToReEnable",
-      {
-        "nth": 21,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "HookStateData",
-      {
-        "nth": 22,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "HookReturnString",
-      {
-        "nth": 23,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "HookParameterName",
-      {
-        "nth": 24,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "HookParameterValue",
-      {
-        "nth": 25,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "Blob",
-      {
-        "nth": 26,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Blob"
-      }
-    ],
-    [
-      "Account",
-      {
-        "nth": 1,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "AccountID"
-      }
-    ],
-    [
-      "Owner",
-      {
-        "nth": 2,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "AccountID"
-      }
-    ],
-    [
-      "Destination",
-      {
-        "nth": 3,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "AccountID"
-      }
-    ],
-    [
-      "Issuer",
-      {
-        "nth": 4,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "AccountID"
-      }
-    ],
-    [
-      "Authorize",
-      {
-        "nth": 5,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "AccountID"
-      }
-    ],
-    [
-      "Unauthorize",
-      {
-        "nth": 6,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "AccountID"
-      }
-    ],
-    [
-      "RegularKey",
-      {
-        "nth": 8,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "AccountID"
-      }
-    ],
-    [
-      "NFTokenMinter",
-      {
-        "nth": 9,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "AccountID"
-      }
-    ],
-    [
-      "EmitCallback",
-      {
-        "nth": 10,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "AccountID"
-      }
-    ],
-    [
-      "HookAccount",
-      {
-        "nth": 16,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "AccountID"
-      }
-    ],
-    [
-      "Indexes",
-      {
-        "nth": 1,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Vector256"
-      }
-    ],
-    [
-      "Hashes",
-      {
-        "nth": 2,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Vector256"
-      }
-    ],
-    [
-      "Amendments",
-      {
-        "nth": 3,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Vector256"
-      }
-    ],
-    [
-      "NFTokenOffers",
-      {
-        "nth": 4,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Vector256"
-      }
-    ],
-    [
-      "HookNamespaces",
-      {
-        "nth": 5,
-        "isVLEncoded": true,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "Vector256"
-      }
-    ],
-    [
-      "Paths",
-      {
-        "nth": 1,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "PathSet"
-      }
-    ],
-    [
-      "TransactionMetaData",
-      {
-        "nth": 2,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "CreatedNode",
-      {
-        "nth": 3,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "DeletedNode",
-      {
-        "nth": 4,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "ModifiedNode",
-      {
-        "nth": 5,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "PreviousFields",
-      {
-        "nth": 6,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "FinalFields",
-      {
-        "nth": 7,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "NewFields",
-      {
-        "nth": 8,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "TemplateEntry",
-      {
-        "nth": 9,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "Memo",
-      {
-        "nth": 10,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "SignerEntry",
-      {
-        "nth": 11,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "NFToken",
-      {
-        "nth": 12,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "EmitDetails",
-      {
-        "nth": 13,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "Hook",
-      {
-        "nth": 14,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "Signer",
-      {
-        "nth": 16,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "Majority",
-      {
-        "nth": 18,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "DisabledValidator",
-      {
-        "nth": 19,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "EmittedTxn",
-      {
-        "nth": 20,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "HookExecution",
-      {
-        "nth": 21,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "HookDefinition",
-      {
-        "nth": 22,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "HookParameter",
-      {
-        "nth": 23,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "HookGrant",
-      {
-        "nth": 24,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STObject"
-      }
-    ],
-    [
-      "Signers",
-      {
-        "nth": 3,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": false,
-        "type": "STArray"
-      }
-    ],
-    [
-      "SignerEntries",
-      {
-        "nth": 4,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STArray"
-      }
-    ],
-    [
-      "Template",
-      {
-        "nth": 5,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STArray"
-      }
-    ],
-    [
-      "Necessary",
-      {
-        "nth": 6,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STArray"
-      }
-    ],
-    [
-      "Sufficient",
-      {
-        "nth": 7,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STArray"
-      }
-    ],
-    [
-      "AffectedNodes",
-      {
-        "nth": 8,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STArray"
-      }
-    ],
-    [
-      "Memos",
-      {
-        "nth": 9,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STArray"
-      }
-    ],
-    [
-      "NFTokens",
-      {
-        "nth": 10,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STArray"
-      }
-    ],
-    [
-      "Hooks",
-      {
-        "nth": 11,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STArray"
-      }
-    ],
-    [
-      "Majorities",
-      {
-        "nth": 16,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STArray"
-      }
-    ],
-    [
-      "DisabledValidators",
-      {
-        "nth": 17,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STArray"
-      }
-    ],
-    [
-      "HookExecutions",
-      {
-        "nth": 18,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STArray"
-      }
-    ],
-    [
-      "HookParameters",
-      {
-        "nth": 19,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STArray"
-      }
-    ],
-    [
-      "HookGrants",
-      {
-        "nth": 20,
-        "isVLEncoded": false,
-        "isSerialized": true,
-        "isSigningField": true,
-        "type": "STArray"
-      }
-    ]
-  ],
-  "TRANSACTION_RESULTS": {
-    "telLOCAL_ERROR": -399,
-    "telBAD_DOMAIN": -398,
-    "telBAD_PATH_COUNT": -397,
-    "telBAD_PUBLIC_KEY": -396,
-    "telFAILED_PROCESSING": -395,
-    "telINSUF_FEE_P": -394,
-    "telNO_DST_PARTIAL": -393,
-    "telCAN_NOT_QUEUE": -392,
-    "telCAN_NOT_QUEUE_BALANCE": -391,
-    "telCAN_NOT_QUEUE_BLOCKS": -390,
-    "telCAN_NOT_QUEUE_BLOCKED": -389,
-    "telCAN_NOT_QUEUE_FEE": -388,
-    "telCAN_NOT_QUEUE_FULL": -387,
-    "telWRONG_NETWORK": -386,
-    "telREQUIRES_NETWORK_ID": -385,
-    "telNETWORK_ID_MAKES_TX_NON_CANONICAL": -384,
-
-    "temMALFORMED": -299,
-    "temBAD_AMOUNT": -298,
-    "temBAD_CURRENCY": -297,
-    "temBAD_EXPIRATION": -296,
-    "temBAD_FEE": -295,
-    "temBAD_ISSUER": -294,
-    "temBAD_LIMIT": -293,
-    "temBAD_OFFER": -292,
-    "temBAD_PATH": -291,
-    "temBAD_PATH_LOOP": -290,
-    "temBAD_REGKEY": -289,
-    "temBAD_SEND_XRP_LIMIT": -288,
-    "temBAD_SEND_XRP_MAX": -287,
-    "temBAD_SEND_XRP_NO_DIRECT": -286,
-    "temBAD_SEND_XRP_PARTIAL": -285,
-    "temBAD_SEND_XRP_PATHS": -284,
-    "temBAD_SEQUENCE": -283,
-    "temBAD_SIGNATURE": -282,
-    "temBAD_SRC_ACCOUNT": -281,
-    "temBAD_TRANSFER_RATE": -280,
-    "temDST_IS_SRC": -279,
-    "temDST_NEEDED": -278,
-    "temINVALID": -277,
-    "temINVALID_FLAG": -276,
-    "temREDUNDANT": -275,
-    "temRIPPLE_EMPTY": -274,
-    "temDISABLED": -273,
-    "temBAD_SIGNER": -272,
-    "temBAD_QUORUM": -271,
-    "temBAD_WEIGHT": -270,
-    "temBAD_TICK_SIZE": -269,
-    "temINVALID_ACCOUNT_ID": -268,
-    "temCANNOT_PREAUTH_SELF": -267,
-    "temINVALID_COUNT": -266,
-    "temHOOK_DATA_TOO_LARGE": -265,
-    "temHOOK_REJECTED": -264,
-    "temUNCERTAIN": -263,
-    "temUNKNOWN": -262,
-    "temSEQ_AND_TICKET": -261,
-    "temBAD_NFTOKEN_TRANSFER_FEE": -260,
-
-    "tefFAILURE": -199,
-    "tefALREADY": -198,
-    "tefBAD_ADD_AUTH": -197,
-    "tefBAD_AUTH": -196,
-    "tefBAD_LEDGER": -195,
-    "tefCREATED": -194,
-    "tefEXCEPTION": -193,
-    "tefINTERNAL": -192,
-    "tefNO_AUTH_REQUIRED": -191,
-    "tefPAST_SEQ": -190,
-    "tefWRONG_PRIOR": -189,
-    "tefMASTER_DISABLED": -188,
-    "tefMAX_LEDGER": -187,
-    "tefBAD_SIGNATURE": -186,
-    "tefBAD_QUORUM": -185,
-    "tefNOT_MULTI_SIGNING": -184,
-    "tefBAD_AUTH_MASTER": -183,
-    "tefINVARIANT_FAILED": -182,
-    "tefTOO_BIG": -181,
-    "tefNO_TICKET": -180,
-    "tefNFTOKEN_IS_NOT_TRANSFERABLE": -179,
-
-    "terRETRY": -99,
-    "terFUNDS_SPENT": -98,
-    "terINSUF_FEE_B": -97,
-    "terNO_ACCOUNT": -96,
-    "terNO_AUTH": -95,
-    "terNO_LINE": -94,
-    "terOWNERS": -93,
-    "terPRE_SEQ": -92,
-    "terLAST": -91,
-    "terNO_RIPPLE": -90,
-    "terQUEUED": -89,
-    "terPRE_TICKET": -88,
-    "terNO_HOOK": -87,
-
-    "tesSUCCESS": 0,
-
-    "tecCLAIM": 100,
-    "tecPATH_PARTIAL": 101,
-    "tecUNFUNDED_ADD": 102,
-    "tecUNFUNDED_OFFER": 103,
-    "tecUNFUNDED_PAYMENT": 104,
-    "tecFAILED_PROCESSING": 105,
-    "tecDIR_FULL": 121,
-    "tecINSUF_RESERVE_LINE": 122,
-    "tecINSUF_RESERVE_OFFER": 123,
-    "tecNO_DST": 124,
-    "tecNO_DST_INSUF_XRP": 125,
-    "tecNO_LINE_INSUF_RESERVE": 126,
-    "tecNO_LINE_REDUNDANT": 127,
-    "tecPATH_DRY": 128,
-    "tecUNFUNDED": 129,
-    "tecNO_ALTERNATIVE_KEY": 130,
-    "tecNO_REGULAR_KEY": 131,
-    "tecOWNERS": 132,
-    "tecNO_ISSUER": 133,
-    "tecNO_AUTH": 134,
-    "tecNO_LINE": 135,
-    "tecINSUFF_FEE": 136,
-    "tecFROZEN": 137,
-    "tecNO_TARGET": 138,
-    "tecNO_PERMISSION": 139,
-    "tecNO_ENTRY": 140,
-    "tecINSUFFICIENT_RESERVE": 141,
-    "tecNEED_MASTER_KEY": 142,
-    "tecDST_TAG_NEEDED": 143,
-    "tecINTERNAL": 144,
-    "tecOVERSIZE": 145,
-    "tecCRYPTOCONDITION_ERROR": 146,
-    "tecINVARIANT_FAILED": 147,
-    "tecEXPIRED": 148,
-    "tecDUPLICATE": 149,
-    "tecKILLED": 150,
-    "tecHAS_OBLIGATIONS": 151,
-    "tecTOO_SOON": 152,
-    "tecHOOK_REJECTED": 153,
-    "tecMAX_SEQUENCE_REACHED": 154,
-    "tecNO_SUITABLE_NFTOKEN_PAGE": 155,
-    "tecNFTOKEN_BUY_SELL_MISMATCH": 156,
-    "tecNFTOKEN_OFFER_TYPE_MISMATCH": 157,
-    "tecCANT_ACCEPT_OWN_NFTOKEN_OFFER": 158,
-    "tecINSUFFICIENT_FUNDS": 159,
-    "tecOBJECT_NOT_FOUND": 160,
-    "tecINSUFFICIENT_PAYMENT": 161,
-    "tecREQUIRES_FLAG": 162,
-    "tecPRECISION_LOSS": 163
-  },
-  "TRANSACTION_TYPES": {
-    "Invalid": -1,
-    "Payment": 0,
-    "EscrowCreate": 1,
-    "EscrowFinish": 2,
-    "AccountSet": 3,
-    "EscrowCancel": 4,
-    "SetRegularKey": 5,
-    "NickNameSet": 6,
-    "OfferCreate": 7,
-    "OfferCancel": 8,
-    "Contract": 9,
-    "TicketCreate": 10,
-    "TicketCancel": 11,
-    "SignerListSet": 12,
-    "PaymentChannelCreate": 13,
-    "PaymentChannelFund": 14,
-    "PaymentChannelClaim": 15,
-    "CheckCreate": 16,
-    "CheckCash": 17,
-    "CheckCancel": 18,
-    "DepositPreauth": 19,
-    "TrustSet": 20,
-    "AccountDelete": 21,
-    "SetHook": 22,
-    "NFTokenMint": 25,
-    "NFTokenBurn": 26,
-    "NFTokenCreateOffer": 27,
-    "NFTokenCancelOffer": 28,
-    "NFTokenAcceptOffer": 29,
-    "ClaimReward": 98,
-    "Invoke": 99,
-    "EnableAmendment": 100,
-    "SetFee": 101,
-    "UNLModify": 102,
-    "EmitFailure": 103
-  }
-}
diff --git a/hook/govern.c b/hook/genesis/govern.c
similarity index 100%
rename from hook/govern.c
rename to hook/genesis/govern.c
diff --git a/hook/makefile b/hook/genesis/makefile
similarity index 100%
rename from hook/makefile
rename to hook/genesis/makefile
diff --git a/hook/mint.c b/hook/genesis/mint.c
similarity index 100%
rename from hook/mint.c
rename to hook/genesis/mint.c
diff --git a/hook/nftoken.c b/hook/genesis/nftoken.c
similarity index 100%
rename from hook/nftoken.c
rename to hook/genesis/nftoken.c
diff --git a/hook/rekey-validator.js b/hook/genesis/rekey-validator.js
similarity index 100%
rename from hook/rekey-validator.js
rename to hook/genesis/rekey-validator.js
diff --git a/hook/reward.c b/hook/genesis/reward.c
similarity index 100%
rename from hook/reward.c
rename to hook/genesis/reward.c
diff --git a/hook/utils-tests.js b/hook/utils-tests.js
deleted file mode 100644
index 376161aa4..000000000
--- a/hook/utils-tests.js
+++ /dev/null
@@ -1,637 +0,0 @@
-const fs = require('fs')
-const xrpljs = require('xrpl-hooks');
-const kp = require('ripple-keypairs');
-const crypto = require('crypto')
-
-const rbc = require('xrpl-binary-codec')
-const rac = require('ripple-address-codec');
-
-const err = (x) =>
-{
-    console.log(x); process.exit(1);
-}
-// Fails via process.exit
-module.exports = {
-    TestRig: (endpoint)=>
-    {
-        return new Promise((resolve, reject)=>
-        {
-                const api = new xrpljs.Client(endpoint);
-
-                const nftid = (acc, flags, fee, taxon, mintseq) =>
-                {
-                    if (typeof(acc.classicAddress) != "undefined")
-                        acc = acc.classicAddress;
-
-                    acc = rac.decodeAccountID(acc);
-                    const ts = mintseq;
-                    const tax =(taxon  ^ ((384160001 * ts) + 2459));
-                    const id = Buffer.from([
-                        (flags >> 8) & 0xFF,
-                        flags & 0xFF,
-                        (fee >> 8) & 0xFF,
-                        fee & 0xFF,
-                        acc[0],
-                        acc[1],
-                        acc[2],
-                        acc[3],
-                        acc[4],
-                        acc[5],
-                        acc[6],
-                        acc[7],
-                        acc[8],
-                        acc[9],
-                        acc[10],
-                        acc[11],
-                        acc[12],
-                        acc[13],
-                        acc[14],
-                        acc[15],
-                        acc[16],
-                        acc[17],
-                        acc[18],
-                        acc[19],
-                        (tax >> 24) & 0xFF,
-                        (tax >> 16) & 0xFF,
-                        (tax >> 8) & 0xFF,
-                        tax & 0xFF,
-                        (ts >> 24) & 0xFF,
-                        (ts >> 16) & 0xFF,
-                        (ts >> 8) & 0xFF,
-                        ts & 0xFF
-                    ], 'binary').toString('hex').toUpperCase()
-                    return id;
-
-                };
-
-
-
-                const fee = (tx_blob) =>
-                {
-                    return new Promise((resolve, reject) =>
-                    {
-                        let req = {command: 'fee'};
-                        if (tx_blob)
-                            req['tx_blob'] = tx_blob;
-
-                        console.log(req);
-
-                        api.request(req).then(resp =>
-                        {
-                            resolve(resp.result.drops);
-                        }).catch(e =>
-                        {
-                            reject(e);
-                        });
-                    });
-                };
-
-                const ledgerAccept = (n) =>
-                {
-                    return new Promise((resolve, reject) =>
-                    {
-                        const la = (remaining) =>
-                        {
-                            let req = {command: 'ledger_accept'};
-                            api.request(req).then(resp =>
-                            {
-                                if (remaining <= 0)
-                                    resolve(resp);
-                                la(remaining - 1);
-                            }).catch(e=>reject(e));
-                        };
-
-                        la(typeof(n) == 'undefined' ? 1 : n);
-                    });
-                };
-
-                const assertTxnSuccess = x =>
-                {
-                    if (!x || !x.result || x.result.engine_result_code != 0)
-                    {
-                        console.log("Transaction failed:", x)
-                        process.exit(1);
-                    }
-                };
-
-                const assert = (x, m) =>
-                {
-                    if (!(x))
-                    {
-                        console.log("Assertion failed: ", m);
-                        console.log(new Error().stack);
-                        process.exit(1);
-                    }
-                };
-
-                const fetchMeta = (hash) =>
-                {
-                    if (typeof(hash) != 'string')
-                        hash = hash.result.tx_json.hash
-
-                    return new Promise((resolve, reject) =>
-                    {
-                        api.request(
-                        {
-                            command:"tx",
-                            transaction: hash
-                        }).then(e=>{
-                            resolve(e.result.meta)
-                        }).catch(e=>reject(e));
-                    });
-                };
-
-
-                const fetchMetaHookExecutions = (hash, hookhash) =>
-                {
-                    return new Promise((resolve, reject) =>
-                    {
-                        fetchMeta(hash).then(m=>
-                        {
-                            if (typeof(m) == 'undefined' ||
-                                typeof(m.HookExecutions) == 'undefined' ||
-                                typeof(m.HookExecutions.length) == 'undefined')
-                                {
-                                    return resolve([])
-                                }
-
-                            let ret = [];
-
-                            for (let i = 0; i < m.HookExecutions.length; ++i)
-                            {
-                                if (typeof(hookhash) == 'undefined' ||
-                                    m.HookExecutions[i].HookExecution.HookHash == hookhash)
-                                m.HookExecutions[i].HookExecution.HookReturnCode =
-                                    parseInt(m.HookExecutions[i].HookExecution.HookReturnCode, 16);
-                                m.HookExecutions[i].HookExecution.HookInstructionCount =
-                                    parseInt(m.HookExecutions[i].HookExecution.HookInstructionCount, 16);
-
-                                let s = m.HookExecutions[i].HookExecution.HookReturnString;
-                                if (s != '')
-                                    m.HookExecutions[i].HookExecution.HookReturnString =
-                                        Buffer.from(s, 'hex').toString('utf-8')
-
-                                ret.push(m.HookExecutions[i].HookExecution);
-                            }
-
-                            resolve(ret);
-                        }).catch(e=>reject(e));
-                    });
-                };
-
-
-
-                const assertTxnFailure = x =>
-                {
-                    if (!x || !x.result || x.result.engine_result_code == 0)
-                    {
-                        console.log("Transaction failed:", x)
-                        process.exit(1);
-                    }
-                };
-
-
-                const wasm = (x) =>
-                {
-                    console.log('wasm(' + x + ')');
-                    try
-                    {
-                        return fs.readFileSync(x).toString('hex').toUpperCase();
-                    }
-                    catch (e) {}
-
-                    try
-                    {
-                        return fs.readFileSync('' + x).toString('hex').toUpperCase();
-                    }
-                    catch (e) {}
-
-                    console.log("Could not find " + x)
-                    process.exit(1);
-                };
-
-
-                const wasmHash = (x)=>
-                {
-                    const blob = wasm(x);
-                    return crypto.createHash('SHA512').
-                        update(Buffer.from(blob, 'hex')).
-                        digest().slice(0,32).toString('hex').toUpperCase();
-                }
-
-                const feeCompute = (account_seed, txn_org) =>
-                {
-                    return new Promise((resolve, reject) =>
-                    {
-                        txn_to_send = { ... txn_org };
-                        txn_to_send['SigningPubKey'] = '';
-
-                        let wal = xrpljs.Wallet.fromSeed(account_seed);
-                        api.prepareTransaction(txn_to_send, {wallet: wal}).then(txn =>
-                        {
-                            let ser = rbc.encode(txn);
-                            fee(ser).then(fees =>
-                            {
-                                let base_drops = fees.base_fee
-
-                                delete txn_to_send['SigningPubKey']
-                                if (txn_to_send['Fee'] === undefined)
-                                    txn_to_send['Fee'] = base_drops + '';
-
-
-                                api.request(
-                                {
-                                    command: "account_info",
-                                    account: txn.Account
-                                }).then(y=>
-                                {
-                                    let seq = (y.result.account_data.Sequence);
-                                    txn_to_send.Sequence = seq;
-                                    api.prepareTransaction(txn_to_send, {wallet: wal}).then(txn =>
-                                    {
-                                        resolve(txn);
-                                    }).catch(e=>{reject(e);});
-                                }).catch(e=>{reject(e);});
-                            }).catch(e=>{reject(e);});
-                        }).catch(e=>{reject(e);});
-                    });
-                }
-
-                const feeSubmitAccept = (seed, txn) =>
-                {
-                    return new Promise((resolve, reject) =>
-                    {
-                        feeSubmit(seed, txn).then(x=>
-                        {
-                            ledgerAccept().then(()=>
-                            {
-                                resolve(x);
-                            }).catch(e=>
-                            {
-                                reject(e);
-                            });
-                        }).catch(e =>
-                        {
-                            reject(e);
-                        });
-                    });
-                }
-
-                const submit = (seed, txn) =>
-                {
-                    return new Promise((resolve, reject) =>
-                    {
-                        api.submit(txn,
-                            {wallet: xrpljs.Wallet.fromSeed(seed)}).then(s=>
-                        {
-                            resolve(s);
-                        }).catch(e=>{reject(e);});
-                    });
-                }
-
-                const feeSubmit = (seed, txn) =>
-                {
-                    return new Promise((resolve, reject) =>
-                    {
-                        feeCompute(seed, txn).then(txn=>
-                        {
-                            api.submit(txn,
-                                {wallet: xrpljs.Wallet.fromSeed(seed)}).then(s=>
-                            {
-                                resolve(s);
-                            }).catch(e=>{reject(e);});
-                        }).catch(e=>{reject(e);});
-                    });
-                }
-
-                const genesisseed = 'snoPBrXtMeMyMHUVTgbuqAfg1SUTb';
-                const genesisaddr = 'rHb9CJAWyB4rj91VRWn96DkukG4bwdtyTh';
-
-
-                const genesis =  xrpljs.Wallet.fromSeed(genesisseed);
-
-                const randomAccount = ()=>
-                {
-                    const acc = xrpljs.Wallet.fromSeed(kp.generateSeed());
-                    return acc
-                };
-                
-                const fromSeed = (x)=>
-                {
-                    const acc = xrpljs.Wallet.fromSeed(x);
-                    return acc
-                };
-
-                const pay_mock = (seed, amt, dest) =>
-                {
-                    if (dest.classicAddress != undefined)
-                        dest = dest.classicAddress;
-
-                    return new Promise((resolve, reject) =>
-                    {
-
-                        let wal = xrpljs.Wallet.fromSeed(seed);
-                        api.prepareTransaction({
-                            Account: wal.classicAddress,
-                            TransactionType: "Payment",
-                            Amount: ''+amt,
-                            Destination: dest,
-                            SigningPubKey: ''
-                        }, {wallet: wal}).then(txn =>
-                        {
-                            resolve(rbc.encode(txn));
-                        }).catch(e=>
-                        {
-                            reject(e);
-                        });
-                    });
-
-                }
-
-                const pay = (seed, amt, dest) =>
-                {
-                    if (dest.classicAddress != undefined)
-                        dest = dest.classicAddress;
-
-                    return new Promise((resolve, reject) =>
-                    {
-                        let wal = xrpljs.Wallet.fromSeed(seed);
-
-                        feeSubmit(seed, {
-                            Account: wal.classicAddress,
-                            TransactionType: "Payment",
-                            Amount: ''+amt,
-                            Destination: dest
-                        }).then(x=>
-                        {
-                            assertTxnSuccess(x);
-                            resolve(x);
-                        }).catch(err);
-                    });
-                };
-
-                const hookHash = fn =>
-                {
-                    let b = fs.readFileSync('' + fn);
-                    return crypto.createHash('SHA512').update(b).digest().slice(0,32).toString('hex').toUpperCase()
-                }
-
-                const fundFromGenesis = (acc) =>
-                {
-                    return new Promise((resolve, reject) =>
-                    {
-                        const ffg = (acc, after) =>
-                        {
-                            if (typeof(acc) != 'string')
-                                acc = acc.classicAddress;
-
-                            console.log('ffg: ' + acc);
-                            feeSubmitAccept(genesis.seed, {
-                                Account: genesis.classicAddress,        // fund account from genesis
-                                TransactionType: "Payment",
-                                Amount: "100000000000",
-                                Destination: acc,
-                            }).then(x=>
-                            {
-                                assertTxnSuccess(x);
-                                if (after)
-                                    return after();
-                                else
-                                    resolve();
-                            }).catch(err);
-                        };
-
-                        const doFfg = (acc) =>
-                        {
-
-                            if (typeof(acc.length) == 'undefined')
-                                return ffg(acc);
-                            else if (acc.length == 1)
-                                return ffg(acc[0]);
-                            else
-                            {
-                                return ffg(acc[0],
-                                    ((acc)=>{
-                                        return ()=>{
-                                            acc.shift();
-                                            return doFfg(acc);
-                                        };
-                                    })(acc));
-                            }
-                        }
-
-                        return doFfg(acc);
-
-                    });
-                };
-
-
-
-                const trustSet = (issuer, currency, limit, holders) =>
-                {
-                    if (typeof(issuer.classicAddress) != 'undefined')
-                        issuer = issuer.classicAddress;
-
-                    return new Promise((resolve, reject)=>
-                    {
-                        const doTs = (holder) =>
-                        {
-                            if (holder.length == 0)
-                                return resolve();
-                            let h = holder.shift();
-                            feeSubmitAccept(h.seed,
-                            {
-                                Account: h.classicAddress,
-                                TransactionType: "TrustSet",
-                                LimitAmount: {
-                                    "currency": currency + "",
-                                    "issuer": issuer,
-                                    "value": limit + ""
-                                }
-                            }).then(x=>
-                            {
-                                console.log(x)
-                                assertTxnSuccess(x);
-                                return doTs(holder);
-                            }).catch(e=>reject(e));
-                        };
-
-                        doTs(holders);
-                    });
-                };
-
-                const issueTokens = (issuer, currency, toWhom) =>
-                {
-                    return new Promise((resolve, reject) =>
-                    {
-                        const itf = (issuer, currency, toWhom) =>
-                        {
-                            let c = 0;
-                            for (let next in toWhom)
-                            {
-                                c++;
-
-                                let addr = next;
-                                let amt = toWhom[addr];
-                                delete toWhom[addr];
-                                let txn =
-                                {
-                                    Account: issuer.classicAddress,
-                                    TransactionType: "Payment",
-                                    Amount: {
-                                        "currency": currency,
-                                        "value": amt + "",
-                                        "issuer": issuer.classicAddress
-                                    },
-                                    Destination: addr
-                                };
-
-                                feeSubmitAccept(issuer.seed, txn).then(x=>
-                                {
-                                    console.log(x);
-                                    assertTxnSuccess(x);
-                                    return itf(issuer, currency, toWhom);
-                                }).catch(e=>reject(e));
-                                break;
-                            }
-                            if (c == 0)
-                                resolve();
-                        };
-                        return itf(issuer, currency, toWhom);
-                    });
-                };
-
-                const setTshCollect = (accounts) =>
-                {
-                    return new Promise((resolve, reject) =>
-                    {
-                        const stc = (accounts) =>
-                        {
-                            if (accounts.length == 0)
-                                return resolve();
-                            let acc = accounts.shift();
-
-                            feeSubmitAccept(acc.seed,
-                            {
-                                Account: acc.classicAddress,
-                                TransactionType: "AccountSet",
-                                SetFlag: 11
-                            }).then(x=>
-                            {
-                                console.log(x);
-                                assertTxnSuccess(x);
-                                return stc(accounts);
-                            }).catch(e=>reject(e));
-                        };
-                        stc(accounts);
-                    });
-                }
-
-                const feeSubmitAcceptMultiple = (txn, accounts) =>
-                {
-                    return new Promise((resolve, reject) =>
-                    {
-                        const stc = (accounts) =>
-                        {
-                            if (accounts.length == 0)
-                                return resolve();
-                            let acc = accounts.shift();
-
-                            let txn_to_submit = { ... txn };
-
-                            txn_to_submit['Account'] = acc.classicAddress;
-                            feeSubmitAccept(acc.seed, txn_to_submit).then(x=>
-                            {
-                                console.log(x);
-                                assertTxnSuccess(x);
-                                return stc(accounts);
-                            }).catch(e=>reject(e));
-                        };
-                        stc(accounts);
-                    });
-                }
-
-                const log = m =>
-                {
-//                    console.log(JSON.stringify(m, null, 4));
-                      console.dir(m, {depth:null});
-                }
-
-
-                const hex_memos = (x) =>
-                {
-                    if (!("Memos" in x))
-                        return;
-
-                    for (y in x["Memos"])
-                    {
-                        for (a in x["Memos"][y])
-                        {
-                            let Fields = ["MemoFormat", "MemoType", "MemoData"];
-                            for (z in Fields)
-                            {
-                                if (Fields[z] in x["Memos"][y][a])
-                                {
-                                    let u = x["Memos"][y][a][Fields[z]].toUpperCase()
-                                    if (u.match(/^[0-9A-F]+$/))
-                                    {
-                                        x["Memos"][y][a][Fields[z]] = u;
-                                        continue;
-                                    }
-
-                                    x["Memos"][y][a][Fields[z]] =
-                                            ""+Buffer.from(x["Memos"][y][a][Fields[z]]).toString('hex').toUpperCase();
-                                }
-                            }
-                        }
-                    }
-                }
-
-                api.connect().then(()=>
-                {
-                    resolve({
-                        hex_memos: hex_memos,
-                        rbc: rbc,
-                        rac: rac,
-                        api: api,
-                        xrpljs: xrpljs,
-                        assertTxnSuccess: assertTxnSuccess,
-                        assertTxnFailure: assertTxnFailure,
-                        wasm: wasm,
-                        kp: kp,
-                        genesis: genesis,
-                        randomAccount: randomAccount,
-                        fromSeed: fromSeed,
-                        fundFromGenesis: fundFromGenesis,
-                        err: err,
-                        hsfOVERRIDE: 1,
-                        hsfNSDELETE: 2,
-                        hsfCOLLECT: 4,
-                        asfTshCollect: 11,
-                        hookHash: hookHash,
-                        pay: pay,
-                        pay_mock: pay_mock,
-                        fee: fee,
-                        genesisseed: genesisseed,
-                        genesisaddr: genesisaddr,
-                        feeCompute: feeCompute,
-                        feeSubmit: feeSubmit,
-                        feeSubmitAccept: feeSubmitAccept,
-                        ledgerAccept: ledgerAccept,
-                        fetchMeta: fetchMeta,
-                        fetchMetaHookExecutions: fetchMetaHookExecutions,
-                        wasmHash: wasmHash,
-                        assert: assert,
-                        trustSet: trustSet,
-                        issueTokens: issueTokens,
-                        log: log,
-                        submit: submit,
-                        setTshCollect: setTshCollect,
-                        feeSubmitAcceptMultiple: feeSubmitAcceptMultiple,
-                        nftid: nftid
-
-                    });
-                }).catch(err);
-        });
-    }
-};
diff --git a/hook/v3l1s b/hook/v3l1s
deleted file mode 100644
index 503b8c69d..000000000
--- a/hook/v3l1s
+++ /dev/null
@@ -1,13 +0,0 @@
-nHDs6fHVnhb4ZbSFWF2dTTPHoZ6Rr39i2UfLotzgf8FKUi7iZdxx #tn4
-nHUvgFxT8EGrXqQZ1rqMw67UtduefbaCHiVtVahk9RXDJP1g1mB4 #tn5
-nHU7Vn6co7xEFMBesV7qw7FXE8ucKrhVWQiYZB5oGyMhvqrnZrnJ #tn6 table with 2 members
-nHBoJCE3wPgkTcrNPMHyTJFQ2t77EyCAqcBRspFCpL6JhwCm94VZ #tn7 table with 3 members
-nHUVv4g47bFMySAZFUKVaXUYEmfiUExSoY4FzwXULNwJRzju4XnQ #tn8 table with 5 members
-nHBvr8avSFTz4TFxZvvi4rEJZZtyqE3J6KAAcVWVtifsE7edPM7q #tn9 tbale with 20 members
-nHUH3Z8TRU57zetHbEPr1ynyrJhxQCwrJvNjr4j1SMjYADyW1WWe #tn10
-nHBdSXv3DhYJVXUppMLpCwJWDFVQyFdZrbMxeh8CFiBEvfTCy3Uh #tn11
-
-+sn4KDPD8Voo8izLJmZ3YouPWupc6t - rh93ixpFBnSmgnytLe2qkC5YNCqykvcKH5 # tn4
-+snggivhEEkSuTWuznHCiezKkd16aF - rHWQWs7Ci1N6USkJRcCQgmDQB4RjrKg9MT # tn5
-+sn6QG65zVPwBdAiWiMV9gYUpTSs12 - rswfmipoy321YysJo145LaD3q3b7aqiC3t # tn10
-+shvun4yCBEkvDmTjKek6jERHjrHDM - rwLnvKrNK1YvdnbrLoeFLUfZwSTWg3iuLx # tn11
diff --git a/hook/v3l2s b/hook/v3l2s
deleted file mode 100644
index df729e3a5..000000000
--- a/hook/v3l2s
+++ /dev/null
@@ -1,150 +0,0 @@
-V6 Seat 0:
-{
-  r: 'rN4HyiKpUuQM68pohiMPinQnyPdY8tBcWa',
-  s: 'shF4XCzYHfe3pSvKY6Yc526wFZmMA'
-}
-V6 Seat 1:
-{
-  r: 'rnsVxwrctvfiPX6wtX5tjYLPMidXFTqWc8',
-  s: 'sssxXie97ndAUgkrEsWasPYm7SgZx'
-}
-V7 Seat 0:
-{
-  r: 'rLjrBkBUSRtZJnFfZAbX4SFQSpkk2HcpX8',
-  s: 'shhohoyMjYKrJWDE4vUpS2SRNwB6G'
-}
-V7 Seat 1:
-{
-  r: 'rKUt5c9zqGCsZTVhUqrTUNnAbHHo19vtoM',
-  s: 'ssbEWV1w8gVdL8nFZncCmXVjnM8BW'
-}
-V7 Seat 2:
-{
-  r: 'rJY9NAbesWDGupunxyTvvtL3yWUDrbuCRF',
-  s: 'sp8vt3bSN28JWKhYB9ifcXcy76jT8'
-}
-V8 Seat 0:
-{
-  r: 'r38XdJQ2TKdLRAENjLoft8eDvFsUee5wbr',
-  s: 'saE4QyTBzDSNsmADzGE7hfywxxVzR'
-}
-V8 Seat 1:
-{
-  r: 'rnVtyAEp4TGyje7ccS1SjWHVwxqqQBeft3',
-  s: 'shngKoSPmo2vTq5ZEQ48MSsEkpRX7'
-}
-V8 Seat 2:
-{
-  r: 'rpzQniG7qsVi6qaS5X2QuscfpWY31j5bks',
-  s: 'spqgANJeTx9B8KdkWde46tNMtpD7Q'
-}
-V8 Seat 3:
-{
-  r: 'rsb7Y9qE7uvftjHZPW1qBVbLdCxjGe5G8X',
-  s: 'shaTpXZPvxiZu4LFTYHm6iqJNAQ4b'
-}
-V8 Seat 4:
-{
-  r: 'rJeoxs1fZW78sMeamwJ27CVcXZNpQZR3t',
-  s: 'sh2St95HdoahwkTKvQZPia3yquzER'
-}
-V9 Seat 0:
-{
-  r: 'rh8svn1EFs3TKEYVBZfWpNdqNr2R1uyM7y',
-  s: 'snPYNXdWmiXCKYai56JVqmHxCpQoU'
-}
-V9 Seat 1:
-{
-  r: 'rMn7PRAiT22hytTpLBHHZ34jPe2RCi2svT',
-  s: 'ssGAQLWSp3f48NgA1MuesSZePu4xH'
-}
-V9 Seat 2:
-{
-  r: 'rLSCctV2Q5rsduFuZk7N65mbSrf3BFknPc',
-  s: 'sn4kAb9ZvbExZSGgkrnefSxM9NdWo'
-}
-V9 Seat 3:
-{
-  r: 'rn8b9tjZbKmPSoMKV8QwgcU8wq2DVCdfQN',
-  s: 'snoo6dLLZkrriMrRqojLtcdjzSMH7'
-}
-V9 Seat 4:
-{
-  r: 'rEAeU9EDmdcJ3xPLW714bm9fygDV8XoNrj',
-  s: 'shw3jMCqPKSTQStGXG1vneYPDdG2e'
-}
-V9 Seat 5:
-{
-  r: 'rpCLrZYhfaN7ysBNpfAZuNj49Fyg4SHFGv',
-  s: 'snLsZJ32spovJXHq46DUZbiYU7DVn'
-}
-V9 Seat 6:
-{
-  r: 'rafa8E9RPa5vxZ4rN8s571aWUdbE4oqv7i',
-  s: 'sp8AzxGA9wreHT6GtjEKRxKqqXHbe'
-}
-V9 Seat 7:
-{
-  r: 'r37Qu8nTfdJFkE14ERAB3LH3wSdz1LbyzU',
-  s: 'shb2eB57gjwJQghkywSJB7dFzs2Gf'
-}
-V9 Seat 8:
-{
-  r: 'rnqXJXh1mGf9BGt3aB74RscNsJiDMV1YPK',
-  s: 'spxN1dbqj2eCR9GavBkZdk5vbHwgV'
-}
-V9 Seat 9:
-{
-  r: 'rLhHTgwBbq7aVsrSPp2CDeyX3RRuXihGVv',
-  s: 'shFpc2PJJKoacdPjgp4wio7rpZnyq'
-}
-V9 Seat 10:
-{
-  r: 'rJt6kBV8EjhN9v7U4KnYtydfFu3DaQVCvF',
-  s: 'shBVfbpYYrUKjBzwgzCDz1ZkeGEeU'
-}
-V9 Seat 11:
-{
-  r: 'r4YGLYBzvWTZGLKwbByfVxc8ZjCgBUUwBn',
-  s: 'sseqqUjoNptmRiNNpduE67vAiKKur'
-}
-V9 Seat 12:
-{
-  r: 'rEw7zrMdCXFs3CzxtC3NFpjmM2HBLTigVL',
-  s: 'shXdBhi8GyyPtmrRXwoVSRNDxpabc'
-}
-V9 Seat 13:
-{
-  r: 'rwrqQBN88MeT3QDipCfJSeJ9sZMZA54xkD',
-  s: 'sahvves9GbwcMPto383CBio5mq1uy'
-}
-V9 Seat 14:
-{
-  r: 'rpmAcuJAWVgS1zL3R1ob8F5ZSJ9d4jEAoj',
-  s: 'ssnPapLdvquGWyMqNj7nSgsRosjKa'
-}
-V9 Seat 15:
-{
-  r: 'rwGMc2FXtvPitSppNwJaSxqSfEfrLVRtMm',
-  s: 'shzZZot7Q4kEuWqW4j6SmCe9ptuc1'
-}
-V9 Seat 16:
-{
-  r: 'rUrAvfQTv16EETc3Q2sgCTAoKS9C49crx2',
-  s: 'snxdfhfLTkHM5ZYUTrTCPicxRt5Xh'
-}
-V9 Seat 17:
-{
-  r: 'rBDsW6p9Xak9b2ye2eAgh9JjpubTzeV1ti',
-  s: 'snb9xGH9M4WrtK4M15u4ZppzP84Rb'
-}
-V9 Seat 18:
-{
-  r: 'rhGbC5n1qK3Cq3sBbdtKGV5AR3kboXi41K',
-  s: 'shM2R6t3bM1cWrLKb7cMALgkL6mHS'
-}
-V9 Seat 19:
-{
-  r: 'rNu4sdVz6d2H37okUeH47ASMwckhzSx3k5',
-  s: 'ssBG6mp4rSAfK7FuwxCa4biY8Lqsu'
-}