Travis CI improvements:

FIXES: #2527

* define custom docker image for travis-linux builds based on
  package build image
* add macos builds
* add windows builds (currently allowed to fail)
* improve build and shell scripts as required for the CI envs
* add asio timer latency workaround
* omit several manual tests from TravisCI which cause memory exhaustion
This commit is contained in:
Mike Ellery
2019-05-20 10:53:24 -07:00
committed by Nik Bougalis
parent 87e9ee5ce9
commit 13a4fefe34
18 changed files with 1300 additions and 591 deletions

View File

@@ -1,62 +1,309 @@
sudo: false
language: cpp
dist: xenial
services:
- docker
env:
global:
# Maintenance note: to move to a new version
# of boost, update both BOOST_ROOT and BOOST_URL.
# Note that for simplicity, BOOST_ROOT's final
# namepart must match the folder name internal
# to boost's .tar.gz.
- LCOV_ROOT=$HOME/lcov
- GDB_ROOT=$HOME/gdb
- BOOST_ROOT=$HOME/boost_1_70_0
- BOOST_URL='http://sourceforge.net/projects/boost/files/boost/1.70.0/boost_1_70_0.tar.gz'
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-xenial-7
packages:
- gcc-7
- g++-7
- gcc-8
- g++-8
- python-software-properties
- protobuf-compiler
- libprotobuf-dev
- libssl-dev
- libstdc++6
- binutils-gold
- cmake
- lcov
- llvm-7
- clang-7
matrix:
include:
- compiler: gcc
env: GCC_VER=7 BUILD_TYPE=Debug
- compiler: clang
env: GCC_VER=7 BUILD_TYPE=Debug
cache:
directories:
- $BOOST_ROOT
- .nih_c
- DOCKER_IMAGE="mellery451/rippled-ci-builder:2019-07-18"
- CMAKE_EXTRA_ARGS="-Dwerr=ON"
- NINJA_BUILD=true
# change this if we get more VM capacity
- MAX_TIME_MIN=80
- CACHE_DIR=${TRAVIS_HOME}/_cache
- NIH_CACHE_ROOT=${CACHE_DIR}/nih_c
- PARALLEL_TESTS=true
# this is NOT used by linux container based builds (which already have boost installed)
- BOOST_URL='https://dl.bintray.com/boostorg/release/1.70.0/source/boost_1_70_0.tar.bz2'
- VCPKG_DIR=${CACHE_DIR}/vcpkg
- USE_CCACHE=true
- CCACHE_BASEDIR=${TRAVIS_HOME}"
- CCACHE_NOHASHDIR=true
- CCACHE_DIR=${CACHE_DIR}/ccache
before_install:
- bin/ci/ubuntu/install-dependencies.sh
- if [ "$(uname)" = "Darwin" ] ; then export NUM_PROCESSORS=$(sysctl -n hw.physicalcpu); else export NUM_PROCESSORS=$(nproc); fi
- echo "NUM PROC is ${NUM_PROCESSORS}"
- if [ "$(uname)" = "Linux" ] ; then docker pull ${DOCKER_IMAGE}; fi
- if [ "${MATRIX_EVAL}" != "" ] ; then eval "${MATRIX_EVAL}"; fi
- if [ "${CMAKE_ADD}" != "" ] ; then export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} ${CMAKE_ADD}"; fi
- bin/ci/ubuntu/travis-cache-start.sh
matrix:
fast_finish: true
allow_failures:
# TODO these all need more investigation
#
# current tsan failure might be related to:
# https://github.com/google/sanitizers/issues/1104
- name: tsan, clang-8
# there are a number of UBs caught currently that need triage
- name: ubsan, clang-8
# all of the windows builds are unreliable
# because the travis VMs are currently slow and unreliable
- name: windows, debug
- name: windows, release
- name: windows, visual studio, debug
# these builds are ordered approximately by
# descending build time so that the longest
# builds start first
include:
# windows
- &windows
os: windows
name: windows, debug
env:
# caching is pretty flakey on windows...
# put NIH in a non-cached location until
# we come up with a way to stabilize that
# cache on windows (minimize incremental changes)
- NIH_CACHE_ROOT=${TRAVIS_BUILD_DIR}/nih_c
- VCPKG_DEFAULT_TRIPLET="x64-windows-static"
- BOOST_ROOT=${CACHE_DIR}/boost_1_70_0
- MATRIX_EVAL="CC=cl.exe && CXX=cl.exe"
- >-
CMAKE_ADD="
-DBOOST_ROOT=${BOOST_ROOT}/_INSTALLED_
-DCMAKE_VERBOSE_MAKEFILE=ON
-DCMAKE_TOOLCHAIN_FILE=${VCPKG_DIR}/scripts/buildsystems/vcpkg.cmake
-DVCPKG_TARGET_TRIPLET=x64-windows-static"
install:
- choco upgrade cmake.install
- choco install ninja visualstudio2017-workload-vctools -y
- travis_wait 30 bin/sh/install-vcpkg.sh
- travis_wait ${MAX_TIME_MIN} Builds/containers/shared/install_boost.sh
before_script:
- export BLD_CONFIG=Debug
script:
- . ./bin/sh/setup-msvc.sh
- mkdir -p build.ms && cd build.ms
- cmake -G Ninja ${CMAKE_EXTRA_ARGS} -DCMAKE_BUILD_TYPE=${BLD_CONFIG} ..
- travis_wait ${MAX_TIME_MIN} cmake --build . --parallel ${NUM_PROCESSORS} --verbose
- ./rippled.exe --unittest --quiet --unittest-log --unittest-jobs ${NUM_PROCESSORS}
- <<: *windows
name: windows, release
before_script:
- export BLD_CONFIG=Release
- <<: *windows
name: windows, visual studio, debug
script:
- mkdir -p build.ms && cd build.ms
- cmake -G "Visual Studio 15 2017 Win64" ${CMAKE_EXTRA_ARGS} ..
- export DESTDIR=${PWD}/_installed_
- travis_wait ${MAX_TIME_MIN} cmake --build . --parallel ${NUM_PROCESSORS} --verbose --config ${BLD_CONFIG} --target install
- >-
"./_installed_/Program Files/rippled/bin/rippled.exe" --unittest --quiet --unittest-log --unittest-jobs ${NUM_PROCESSORS}
# asan
- compiler: clang-8
name: asan, clang-8
env:
- MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
- BUILD_TYPE=Release
- CMAKE_ADD="-Dsan=address"
- ASAN_OPTIONS="print_stats=true:atexit=true"
#- LSAN_OPTIONS="verbosity=1:log_threads=1"
- PARALLEL_TESTS=false
# ubsan
- compiler: clang-8
name: ubsan, clang-8
env:
- MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
- BUILD_TYPE=Release
- CMAKE_ADD="-Dsan=undefined"
# once we can run clean under ubsan, add halt_on_error=1 to options below
- UBSAN_OPTIONS="print_stacktrace=1:report_error_type=1"
- PARALLEL_TESTS=false
# tsan
- compiler: clang-8
name: tsan, clang-8
env:
- MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
- BUILD_TYPE=Release
- CMAKE_ADD="-Dsan=thread"
- TSAN_OPTIONS="history_size=3 external_symbolizer_path=/usr/bin/llvm-symbolizer verbosity=1"
- PARALLEL_TESTS=false
# coverage builds
- compiler: gcc-8
name: coverage, gcc-8
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dcoverage=ON"
- TARGET=coverage_report
- SKIP_TESTS=true
- compiler: clang-8
name: coverage, clang-8
env:
- MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dcoverage=ON"
- TARGET=coverage_report
- SKIP_TESTS=true
# nounity
- compiler: gcc-8
name: non-unity, gcc-8
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dunity=OFF"
- compiler: clang-8
name: non-unity, clang-8
env:
- MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dunity=OFF"
# manual tests
- compiler: gcc-8
name: manual tests, gcc-8, debug
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- MANUAL_TESTS=true
# manual tests
- compiler: gcc-8
name: manual tests, gcc-8, release
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Release
- CMAKE_ADD="-Dassert=ON"
- MANUAL_TESTS=true
# release builds
- compiler: gcc-8
name: gcc-8, release
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Release
- CMAKE_ADD="-Dassert=ON"
- compiler: clang-8
name: clang-8, release
env:
- MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
- BUILD_TYPE=Release
- CMAKE_ADD="-Dassert=ON"
# debug builds
- compiler: gcc-8
name: gcc-8, debug
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- compiler: clang-8
name: clang-8, debug
env:
- MATRIX_EVAL="CC=clang-8 && CXX=clang++-8"
- BUILD_TYPE=Debug
# dynamic lib builds
- compiler: gcc-8
name: non-static, gcc-8
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dstatic=OFF"
- compiler: gcc-8
name: non-static + BUILD_SHARED_LIBS, gcc-8
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- CMAKE_ADD="-Dstatic=OFF -DBUILD_SHARED_LIBS=ON"
# makefile
- compiler: gcc-8
name: makefile generator, gcc-8
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- BUILD_TYPE=Debug
- NINJA_BUILD=false
# misc alternative compilers
- compiler: gcc-6
name: gcc-6
env:
- MATRIX_EVAL="CC=gcc-6 && CXX=g++-6"
- BUILD_TYPE=Debug
- compiler: gcc-7
name: gcc-7
env:
- MATRIX_EVAL="CC=gcc-7 && CXX=g++-7"
- BUILD_TYPE=Debug
- compiler: gcc-9
name: gcc-9
env:
- MATRIX_EVAL="CC=gcc-9 && CXX=g++-9"
- BUILD_TYPE=Debug
- compiler: clang-6.0
name: clang-6
env:
- MATRIX_EVAL="CC=clang-6.0 && CXX=clang++-6.0"
- BUILD_TYPE=Debug
- compiler: clang-7
name: clang-7
env:
- MATRIX_EVAL="CC=clang-7 && CXX=clang++-7"
- BUILD_TYPE=Debug
- compiler: clang-9
name: clang-9
env:
- MATRIX_EVAL="CC=clang-9 && CXX=clang++-9"
- BUILD_TYPE=Debug
# macos
- &macos
os: osx
osx_image: xcode10.2
name: xcode10, debug
env:
- BLD_CONFIG=Debug
- BOOST_ROOT=${CACHE_DIR}/boost_1_70_0
- >-
CMAKE_ADD="
-DBOOST_ROOT=${BOOST_ROOT}/_INSTALLED_
-DBoost_ARCHITECTURE=-x64
-DBoost_NO_SYSTEM_PATHS=ON
-DCMAKE_VERBOSE_MAKEFILE=ON"
addons:
homebrew:
packages:
- bash
- ninja
- cmake
- openssl@1.1
update: true
install:
- export OPENSSL_ROOT=$(brew --prefix openssl@1.1)
- travis_wait ${MAX_TIME_MIN} Builds/containers/shared/install_boost.sh
script:
- mkdir -p build.macos && cd build.macos
- cmake -G Ninja ${CMAKE_EXTRA_ARGS} -DCMAKE_BUILD_TYPE=${BLD_CONFIG} ..
- travis_wait ${MAX_TIME_MIN} cmake --build . --parallel ${NUM_PROCESSORS} --verbose
- ./rippled --unittest --quiet --unittest-log --unittest-jobs ${NUM_PROCESSORS}
- <<: *macos
name: xcode10, release
before_script:
- export BLD_CONFIG=Release
- <<: *macos
osx_image: xcode9.4
name: xcode9, debug
before_script:
# turn off warnings as err for this build
- export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} -Dwerr=OFF"
- <<: *macos
osx_image: xcode11
name: xcode11, debug
cache:
timeout: 900
directories:
- $CACHE_DIR
before_cache:
- if [ $(uname) = "Linux" ] ; then SUDO="sudo"; else SUDO=""; fi
- cd ${TRAVIS_HOME}
- if [ -f cache_ignore.tar ] ; then $SUDO tar xvf cache_ignore.tar; fi
- cd ${TRAVIS_BUILD_DIR}
script:
- travis_wait 35 bin/ci/ubuntu/build-and-test.sh
- sudo chmod -R a+rw ${CACHE_DIR}
- ccache -s
- travis_wait ${MAX_TIME_MIN} bin/ci/ubuntu/build-in-docker.sh
- ccache -s
notifications:
email:
false
irc:
channels:
- "chat.freenode.net#ripple-dev"
email: false

File diff suppressed because it is too large Load Diff

View File

@@ -5,11 +5,13 @@
IN_FILE variable.
#]=========================================================]
file (READ ${IN_FILE} contents)
## only print files that actually have some text in them
if (contents MATCHES "[a-z0-9A-Z]+")
execute_process(
COMMAND
${CMAKE_COMMAND} -E echo "${contents}")
if (EXISTS ${IN_FILE})
file (READ ${IN_FILE} contents)
## only print files that actually have some text in them
if (contents MATCHES "[a-z0-9A-Z]+")
execute_process(
COMMAND
${CMAKE_COMMAND} -E echo "${contents}")
endif ()
endif ()

View File

@@ -8,20 +8,22 @@ COPY centos-builder/centos_setup.sh /tmp/
COPY shared/build_deps.sh /tmp/
COPY shared/install_cmake.sh /tmp/
COPY centos-builder/extras.sh /tmp/
COPY shared/install_boost.sh /tmp/
RUN chmod +x /tmp/centos_setup.sh && \
chmod +x /tmp/build_deps.sh && \
chmod +x /tmp/install_boost.sh && \
chmod +x /tmp/install_cmake.sh && \
chmod +x /tmp/extras.sh
RUN /tmp/centos_setup.sh
RUN /tmp/install_cmake.sh
ENV PATH="/opt/local/cmake/bin:$PATH"
RUN source scl_source enable devtoolset-6 python27 && \
RUN source scl_source enable devtoolset-7 python27 && \
/tmp/build_deps.sh
ENV BOOST_ROOT="/opt/local/boost/_INSTALLED_"
ENV PLANTUML_JAR="/opt/plantuml/plantuml.jar"
ENV BOOST_ROOT="/opt/local/boost"
ENV OPENSSL_ROOT="/opt/local/openssl"
ENV GDB_ROOT="/opt/local/gdb"
RUN source scl_source enable devtoolset-6 python27 && \
RUN source scl_source enable devtoolset-7 python27 && \
/tmp/extras.sh
# prep files for package building

View File

@@ -9,14 +9,8 @@ yum -y install epel-release centos-release-scl
yum -y install \
wget curl time gcc-c++ time yum-utils \
libstdc++-static rpm-build gnupg which make cmake \
devtoolset-4 devtoolset-4-gdb devtoolset-4-libasan-devel devtoolset-4-libtsan-devel devtoolset-4-libubsan-devel \
devtoolset-6 devtoolset-6-gdb devtoolset-6-libasan-devel devtoolset-6-libtsan-devel devtoolset-6-libubsan-devel \
devtoolset-7 devtoolset-7-gdb devtoolset-7-libasan-devel devtoolset-7-libtsan-devel devtoolset-7-libubsan-devel \
llvm-toolset-7 llvm-toolset-7-runtime llvm-toolset-7-build llvm-toolset-7-clang \
llvm-toolset-7-clang-analyzer llvm-toolset-7-clang-devel llvm-toolset-7-clang-libs \
llvm-toolset-7-clang-tools-extra llvm-toolset-7-compiler-rt llvm-toolset-7-lldb \
llvm-toolset-7-lldb-devel llvm-toolset-7-python-lldb \
flex flex-devel bison bison-devel \
flex flex-devel bison bison-devel parallel \
ncurses ncurses-devel ncurses-libs graphviz graphviz-devel \
lzip p7zip bzip2 bzip2-devel lzma-sdk lzma-sdk-devel xz-devel \
zlib zlib-devel zlib-static texinfo openssl openssl-static \
@@ -28,8 +22,17 @@ yum -y install \
ninja-build git svn \
protobuf protobuf-static protobuf-c-devel \
protobuf-compiler protobuf-devel \
swig ccache perl-Digest-MD5 python2-pip
swig perl-Digest-MD5 python2-pip
# TODO need permanent link
yum -y install ftp://ftp.pbone.net/mirror/archive.fedoraproject.org/fedora-secondary/updates/26/i386/Packages/p/python2-six-1.10.0-9.fc26.noarch.rpm
if [ "${CI_USE}" = true ] ; then
# TODO need permanent link
yum -y install ftp://ftp.pbone.net/mirror/archive.fedoraproject.org/fedora-secondary/updates/26/i386/Packages/p/python2-six-1.10.0-9.fc26.noarch.rpm
yum -y install \
devtoolset-4 devtoolset-4-gdb devtoolset-4-libasan-devel devtoolset-4-libtsan-devel devtoolset-4-libubsan-devel \
devtoolset-6 devtoolset-6-gdb devtoolset-6-libasan-devel devtoolset-6-libtsan-devel devtoolset-6-libubsan-devel \
llvm-toolset-7 llvm-toolset-7-runtime llvm-toolset-7-build llvm-toolset-7-clang \
llvm-toolset-7-clang-analyzer llvm-toolset-7-clang-devel llvm-toolset-7-clang-libs \
llvm-toolset-7-clang-tools-extra llvm-toolset-7-compiler-rt llvm-toolset-7-lldb \
llvm-toolset-7-lldb-devel llvm-toolset-7-python-lldb
fi

View File

@@ -6,20 +6,15 @@ function build_boost()
local boost_ver=$1
local do_link=$2
local boost_path=$(echo "${boost_ver}" | sed -e 's!\.!_!g')
cd /tmp
wget https://dl.bintray.com/boostorg/release/${boost_ver}/source/boost_${boost_path}.tar.bz2
mkdir -p /opt/local
cd /opt/local
tar xf /tmp/boost_${boost_path}.tar.bz2
BOOST_ROOT=/opt/local/boost_${boost_path}
BOOST_URL="https://dl.bintray.com/boostorg/release/${boost_ver}/source/boost_${boost_path}.tar.bz2"
BOOST_BUILD_ALL=true
. /tmp/install_boost.sh
if [ "$do_link" = true ] ; then
ln -s ./boost_${boost_path} boost
fi
cd boost_${boost_path}
./bootstrap.sh
./b2 -j$(nproc)
./b2 stage
cd ..
rm -f /tmp/boost_${boost_path}.tar.bz2
}
build_boost "1.70.0" true
@@ -39,7 +34,7 @@ make install
cd ..
rm -f openssl-${OPENSSL_VER}.tar.gz
rm -rf openssl-${OPENSSL_VER}
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/opt/local/openssl/lib /opt/local/openssl/bin/openssl version -a
LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-}:/opt/local/openssl/lib /opt/local/openssl/bin/openssl version -a
if [ "${CI_USE}" = true ] ; then
cd /tmp
@@ -59,20 +54,25 @@ if [ "${CI_USE}" = true ] ; then
wget -O /opt/plantuml/plantuml.jar https://downloads.sourceforge.net/project/plantuml/plantuml.jar
cd /tmp
wget https://github.com/linux-test-project/lcov/releases/download/v1.13/lcov-1.13.tar.gz
tar xfz lcov-1.13.tar.gz
cd lcov-1.13
wget https://github.com/linux-test-project/lcov/releases/download/v1.14/lcov-1.14.tar.gz
tar xfz lcov-1.14.tar.gz
cd lcov-1.14
make install PREFIX=/usr/local
cd ..
rm -r lcov-1.13 lcov-1.13.tar.gz
rm -r lcov-1.14 lcov-1.14.tar.gz
cd /tmp
wget https://github.com/ccache/ccache/releases/download/v3.7.1/ccache-3.7.1.tar.gz
tar xf ccache-3.7.1.tar.gz
cd ccache-3.7.1
./configure --prefix=/usr/local
make
make install
cd ..
rm -f ccache-3.7.1.tar.gz
rm -rf ccache-3.7.1
pip install requests
pip install https://github.com/codecov/codecov-python/archive/master.zip
set +e
mkdir -p /opt/local/nih_cache
mkdir -p /opt/jenkins
set -e
fi

View File

@@ -0,0 +1,80 @@
#!/usr/bin/env bash
# Assumptions:
# 1) BOOST_ROOT and BOOST_URL are already defined,
# and contain valid values.
# 2) The last namepart of BOOST_ROOT matches the
# folder name internal to boost's .tar.gz
# When testing you can force a boost build by clearing travis caches:
# https://travis-ci.org/ripple/rippled/caches
set -exu
odir=$(pwd)
if [[ -d "$BOOST_ROOT/lib" || -d "${BOOST_ROOT}/stage/lib" ]] ; then
echo "Using cached boost at $BOOST_ROOT"
exit
fi
#fetch/unpack:
fn=$(basename -- "$BOOST_URL")
ext="${fn##*.}"
wget --quiet $BOOST_URL -O /tmp/boost.tar.${ext}
cd $(dirname $BOOST_ROOT)
rm -fr ${BOOST_ROOT}
tar xf /tmp/boost.tar.${ext}
cd $BOOST_ROOT
BLDARGS=()
if [[ ${BOOST_BUILD_ALL:-false} == "true" ]]; then
# we never need boost-python...so even for ALL
# option we can skip it
BLDARGS+=(--without-python)
else
BLDARGS+=(--with-chrono)
BLDARGS+=(--with-context)
BLDARGS+=(--with-coroutine)
BLDARGS+=(--with-date_time)
BLDARGS+=(--with-filesystem)
BLDARGS+=(--with-program_options)
BLDARGS+=(--with-regex)
BLDARGS+=(--with-serialization)
BLDARGS+=(--with-system)
BLDARGS+=(--with-atomic)
BLDARGS+=(--with-thread)
fi
BLDARGS+=(-j$((2*${NUM_PROCESSORS:-2})))
BLDARGS+=(--prefix=${BOOST_ROOT}/_INSTALLED_)
BLDARGS+=(-d0) # suppress messages/output
if [[ -z ${COMSPEC:-} ]]; then
if [[ "$(uname)" == "Darwin" ]] ; then
BLDARGS+=(cxxflags="-std=c++14 -fvisibility=default")
else
BLDARGS+=(cxxflags="-std=c++14")
BLDARGS+=(runtime-link="static,shared")
fi
BLDARGS+=(--layout=tagged)
./bootstrap.sh
./b2 "${BLDARGS[@]}" stage
./b2 "${BLDARGS[@]}" install
else
BLDARGS+=(runtime-link="static,shared")
BLDARGS+=(--layout=versioned)
BLDARGS+=(--toolset="msvc-14.1")
BLDARGS+=(address-model=64)
BLDARGS+=(architecture=x86)
BLDARGS+=(link=static)
BLDARGS+=(threading=multi)
cmd /E:ON /D /S /C"bootstrap.bat"
./b2.exe "${BLDARGS[@]}" stage
./b2.exe "${BLDARGS[@]}" install
fi
if [[ ${CI:-false} == "true" ]]; then
# save some disk space...these are mostly
# obj files and don't need to be kept in CI contexts
rm -rf bin.v2
fi
cd $odir

View File

@@ -1,22 +1,24 @@
ARG DIST_TAG=16.04
ARG GIT_COMMIT=unknown
FROM ubuntu:$DIST_TAG
ARG GIT_COMMIT=unknown
ARG CI_USE=false
LABEL git-commit=$GIT_COMMIT
# install/setup prerequisites:
COPY ubuntu-builder/ubuntu_setup.sh /tmp/
COPY shared/build_deps.sh /tmp/
COPY shared/install_cmake.sh /tmp/
COPY shared/install_boost.sh /tmp/
RUN chmod +x /tmp/ubuntu_setup.sh && \
chmod +x /tmp/build_deps.sh && \
chmod +x /tmp/install_boost.sh && \
chmod +x /tmp/install_cmake.sh
RUN /tmp/ubuntu_setup.sh
RUN /tmp/install_cmake.sh
ENV PATH="/opt/local/cmake/bin:$PATH"
RUN /tmp/build_deps.sh
ENV PLANTUML_JAR="/opt/plantuml/plantuml.jar"
ENV BOOST_ROOT="/opt/local/boost"
ENV BOOST_ROOT="/opt/local/boost/_INSTALLED_"
ENV OPENSSL_ROOT="/opt/local/openssl"
# prep files for package building

View File

@@ -24,29 +24,27 @@ apt-get -y upgrade
if [[ ${VERSION_ID} =~ ^18\. ]] ; then
apt-add-repository -y multiverse
apt-add-repository -y universe
elif [[ ${VERSION_ID} =~ ^16\. ]] ; then
add-apt-repository -y ppa:ubuntu-toolchain-r/test
fi
add-apt-repository -y ppa:ubuntu-toolchain-r/test
apt-get -y clean
apt-get -y update
apt-get -y --fix-missing install \
make cmake ninja-build ccache \
apt-get -y --fix-missing install \
make cmake ninja-build \
protobuf-compiler libprotobuf-dev openssl libssl-dev \
liblzma-dev libbz2-dev zlib1g-dev \
libjemalloc-dev \
python-pip \
gdb gdbserver \
libstdc++6 \
flex bison \
flex bison parallel \
libicu-dev texinfo \
java-common javacc \
gcc-7 g++-7 \
gcc-8 g++-8 \
dpkg-dev debhelper devscripts fakeroot \
debmake git-buildpackage dh-make gitpkg debsums gnupg \
dh-buildinfo dh-make dh-systemd
apt-get -y install gcc-7 g++-7
update-alternatives --install \
/usr/bin/gcc gcc /usr/bin/gcc-7 40 \
--slave /usr/bin/g++ g++ /usr/bin/g++-7 \
@@ -56,6 +54,8 @@ update-alternatives --install \
--slave /usr/bin/gcov gcov /usr/bin/gcov-7 \
--slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-dump-7 \
--slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-tool-7
apt-get -y install gcc-8 g++-8
update-alternatives --install \
/usr/bin/gcc gcc /usr/bin/gcc-8 20 \
--slave /usr/bin/g++ g++ /usr/bin/g++-8 \
@@ -71,6 +71,30 @@ update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-7 40
update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-8 20
update-alternatives --auto cpp
if [ "${CI_USE}" = true ] ; then
apt-get -y install gcc-6 g++-6
update-alternatives --install \
/usr/bin/gcc gcc /usr/bin/gcc-6 10 \
--slave /usr/bin/g++ g++ /usr/bin/g++-6 \
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-6 \
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-6 \
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-6 \
--slave /usr/bin/gcov gcov /usr/bin/gcov-6 \
--slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-dump-6 \
--slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-tool-6
apt-get -y install gcc-9 g++-9
update-alternatives --install \
/usr/bin/gcc gcc /usr/bin/gcc-9 15 \
--slave /usr/bin/g++ g++ /usr/bin/g++-9 \
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-9 \
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-9 \
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-9 \
--slave /usr/bin/gcov gcov /usr/bin/gcov-9 \
--slave /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-dump-9 \
--slave /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-tool-9
fi
if [[ ${VERSION_ID} =~ ^18\. ]] ; then
apt-get -y install binutils
elif [[ ${VERSION_ID} =~ ^16\. ]] ; then
@@ -82,39 +106,109 @@ if [[ ${VERSION_ID} =~ ^18\. ]] ; then
cat << EOF > /etc/apt/sources.list.d/llvm.list
deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic main
deb-src http://apt.llvm.org/bionic/ llvm-toolchain-bionic main
deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic-6.0 main
deb-src http://apt.llvm.org/bionic/ llvm-toolchain-bionic-6.0 main
deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic-7 main
deb-src http://apt.llvm.org/bionic/ llvm-toolchain-bionic-7 main
deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic-8 main
deb-src http://apt.llvm.org/bionic/ llvm-toolchain-bionic-8 main
EOF
elif [[ ${VERSION_ID} =~ ^16\. ]] ; then
cat << EOF > /etc/apt/sources.list.d/llvm.list
deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial main
deb-src http://apt.llvm.org/xenial/ llvm-toolchain-xenial main
deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial-6.0 main
deb-src http://apt.llvm.org/xenial/ llvm-toolchain-xenial-6.0 main
deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial-7 main
deb-src http://apt.llvm.org/xenial/ llvm-toolchain-xenial-7 main
deb http://apt.llvm.org/xenial/ llvm-toolchain-xenial-8 main
deb-src http://apt.llvm.org/xenial/ llvm-toolchain-xenial-8 main
EOF
fi
apt-get -y update
apt-get -y install \
clang-7 libclang-common-7-dev libclang-7-dev libllvm7 lldb-7 llvm-7 \
llvm-7-dev llvm-7-runtime clang-format-7 python-clang-7 python-lldb-7 \
liblldb-7-dev lld-7 libfuzzer-7-dev libc++-7-dev
apt-get -y install \
clang-7 libclang-common-7-dev libclang-7-dev libllvm7 llvm-7 \
llvm-7-dev llvm-7-runtime clang-format-7 python-clang-7 \
lld-7 libfuzzer-7-dev libc++-7-dev
update-alternatives --install \
/usr/bin/clang clang /usr/bin/clang-7 40 \
--slave /usr/bin/clang++ clang++ /usr/bin/clang++-7 \
--slave /usr/bin/llvm-profdata llvm-profdata /usr/bin/llvm-profdata-7 \
--slave /usr/bin/asan-symbolize asan-symbolize /usr/bin/asan_symbolize-7 \
--slave /usr/bin/clang-format clang-format /usr/bin/clang-format-7 \
--slave /usr/bin/lldb lldb /usr/bin/lldb-7 \
--slave /usr/bin/lldb-server lldb-server /usr/bin/lldb-server-7 \
--slave /usr/bin/llvm-ar llvm-ar /usr/bin/llvm-ar-7 \
--slave /usr/bin/llvm-cov llvm-cov /usr/bin/llvm-cov-7 \
--slave /usr/bin/llvm-nm llvm-nm /usr/bin/llvm-nm-7
/usr/bin/clang clang /usr/bin/clang-7 40 \
--slave /usr/bin/clang++ clang++ /usr/bin/clang++-7 \
--slave /usr/bin/llvm-profdata llvm-profdata /usr/bin/llvm-profdata-7 \
--slave /usr/bin/asan-symbolize asan-symbolize /usr/bin/asan_symbolize-7 \
--slave /usr/bin/llvm-symbolizer llvm-symbolizer /usr/bin/llvm-symbolizer-7 \
--slave /usr/bin/clang-format clang-format /usr/bin/clang-format-7 \
--slave /usr/bin/llvm-ar llvm-ar /usr/bin/llvm-ar-7 \
--slave /usr/bin/llvm-cov llvm-cov /usr/bin/llvm-cov-7 \
--slave /usr/bin/llvm-nm llvm-nm /usr/bin/llvm-nm-7
apt-get -y install \
clang-8 libclang-common-8-dev libclang-8-dev libllvm8 llvm-8 \
llvm-8-dev llvm-8-runtime clang-format-8 python-clang-8 \
lld-8 libfuzzer-8-dev libc++-8-dev
update-alternatives --install \
/usr/bin/clang clang /usr/bin/clang-8 20 \
--slave /usr/bin/clang++ clang++ /usr/bin/clang++-8 \
--slave /usr/bin/llvm-profdata llvm-profdata /usr/bin/llvm-profdata-8 \
--slave /usr/bin/asan-symbolize asan-symbolize /usr/bin/asan_symbolize-8 \
--slave /usr/bin/llvm-symbolizer llvm-symbolizer /usr/bin/llvm-symbolizer-8 \
--slave /usr/bin/clang-format clang-format /usr/bin/clang-format-8 \
--slave /usr/bin/llvm-ar llvm-ar /usr/bin/llvm-ar-8 \
--slave /usr/bin/llvm-cov llvm-cov /usr/bin/llvm-cov-8 \
--slave /usr/bin/llvm-nm llvm-nm /usr/bin/llvm-nm-8
update-alternatives --auto clang
if [ "${CI_USE}" = true ] ; then
apt-get -y install \
clang-5.0 libclang-common-5.0-dev libclang-5.0-dev libllvm5.0 llvm-5.0 \
llvm-5.0-dev llvm-5.0-runtime clang-format-5.0 python-clang-5.0 \
lld-5.0 libfuzzer-5.0-dev
update-alternatives --install \
/usr/bin/clang clang /usr/bin/clang-5.0 10 \
--slave /usr/bin/clang++ clang++ /usr/bin/clang++-5.0 \
--slave /usr/bin/llvm-profdata llvm-profdata /usr/bin/llvm-profdata-5.0 \
--slave /usr/bin/asan-symbolize asan-symbolize /usr/bin/asan_symbolize-5.0 \
--slave /usr/bin/llvm-symbolizer llvm-symbolizer /usr/bin/llvm-symbolizer-5.0 \
--slave /usr/bin/clang-format clang-format /usr/bin/clang-format-5.0 \
--slave /usr/bin/llvm-ar llvm-ar /usr/bin/llvm-ar-5.0 \
--slave /usr/bin/llvm-cov llvm-cov /usr/bin/llvm-cov-5.0 \
--slave /usr/bin/llvm-nm llvm-nm /usr/bin/llvm-nm-5.0
apt-get -y install \
clang-6.0 libclang-common-6.0-dev libclang-6.0-dev libllvm6.0 llvm-6.0 \
llvm-6.0-dev llvm-6.0-runtime clang-format-6.0 python-clang-6.0 \
lld-6.0 libfuzzer-6.0-dev
update-alternatives --install \
/usr/bin/clang clang /usr/bin/clang-6.0 12 \
--slave /usr/bin/clang++ clang++ /usr/bin/clang++-6.0 \
--slave /usr/bin/llvm-profdata llvm-profdata /usr/bin/llvm-profdata-6.0 \
--slave /usr/bin/asan-symbolize asan-symbolize /usr/bin/asan_symbolize-6.0 \
--slave /usr/bin/llvm-symbolizer llvm-symbolizer /usr/bin/llvm-symbolizer-6.0 \
--slave /usr/bin/clang-format clang-format /usr/bin/clang-format-6.0 \
--slave /usr/bin/llvm-ar llvm-ar /usr/bin/llvm-ar-6.0 \
--slave /usr/bin/llvm-cov llvm-cov /usr/bin/llvm-cov-6.0 \
--slave /usr/bin/llvm-nm llvm-nm /usr/bin/llvm-nm-6.0
apt-get -y install \
clang-9 libclang-common-9-dev libclang-9-dev libllvm9 llvm-9 \
llvm-9-dev llvm-9-runtime clang-format-9 python-clang-9 \
lld-9 libfuzzer-9-dev libc++-9-dev
update-alternatives --install \
/usr/bin/clang clang /usr/bin/clang-9 20 \
--slave /usr/bin/clang++ clang++ /usr/bin/clang++-9 \
--slave /usr/bin/llvm-profdata llvm-profdata /usr/bin/llvm-profdata-9 \
--slave /usr/bin/asan-symbolize asan-symbolize /usr/bin/asan_symbolize-9 \
--slave /usr/bin/llvm-symbolizer llvm-symbolizer /usr/bin/llvm-symbolizer-9 \
--slave /usr/bin/clang-format clang-format /usr/bin/clang-format-9 \
--slave /usr/bin/llvm-ar llvm-ar /usr/bin/llvm-ar-9 \
--slave /usr/bin/llvm-cov llvm-cov /usr/bin/llvm-cov-9 \
--slave /usr/bin/llvm-nm llvm-nm /usr/bin/llvm-nm-9
# only install latest lldb
apt-get -y install lldb-9 python-lldb-9 liblldb-9-dev
update-alternatives --install \
/usr/bin/lldb lldb /usr/bin/lldb-9 50 \
--slave /usr/bin/lldb-server lldb-server /usr/bin/lldb-server-9 \
--slave /usr/bin/lldb-argdumper lldb-argdumper /usr/bin/lldb-argdumper-9 \
--slave /usr/bin/lldb-instr lldb-instr /usr/bin/lldb-instr-9 \
--slave /usr/bin/lldb-mi lldb-mi /usr/bin/lldb-mi-9
update-alternatives --auto clang
fi
apt-get -y autoremove

View File

@@ -77,6 +77,12 @@ else ()
set (is_linux FALSE)
endif ()
if ("$ENV{CI}" STREQUAL "true" OR "$ENV{CONTINUOUS_INTEGRATION}" STREQUAL "true")
set (is_ci TRUE)
else ()
set (is_ci FALSE)
endif ()
# check for in-source build and fail
if ("${CMAKE_CURRENT_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
message (FATAL_ERROR "Builds (in-source) are not allowed in "
@@ -151,6 +157,11 @@ else ()
set (perf OFF CACHE BOOL "perf flags, linux only" FORCE)
set (use_gold OFF CACHE BOOL "gold linker, linux only" FORCE)
endif ()
if (is_clang)
option (use_lld "enables detection of lld linker" ON)
else ()
set (use_lld OFF CACHE BOOL "try lld linker, clang only" FORCE)
endif ()
option (jemalloc "Enables jemalloc for heap profiling" OFF)
option (werr "treat warnings as errors" OFF)
option (local_protobuf
@@ -396,6 +407,45 @@ if (is_root_project)
if (NOT have_package_container)
add_dependencies(dpkg dpkg_container)
endif ()
#[===================================================================[
ci container
#]===================================================================]
# now use the same ubuntu image for our travis-ci docker images,
# but we use a newer distro (18.04 vs 16.04).
#
# steps for publishing a new CI image when you make changes:
#
# mkdir bld.ci && cd bld.ci && cmake -Dpackages_only=ON -Dcontainer_label=CI_LATEST
# cmake --build . --target ci_container --verbose
# docker tag rippled-ci-builder:CI_LATEST <DOCKERHUB_USER>/rippled-ci-builder:YYYY-MM-DD
# (change YYYY-MM-DD to match current date..or use a different
# tag/label scheme if you prefer)
# docker push <DOCKERHUB_USER>/rippled-ci-builder:YYYY-MM-DD
#
# ...then change the DOCKER_IMAGE line in .travis.yml :
# - DOCKER_IMAGE="<DOCKERHUB_USER>/rippled-ci-builder:YYYY-MM-DD"
add_custom_target (ci_container
docker build
--pull
--build-arg DIST_TAG=18.04
--build-arg GIT_COMMIT=${commit_hash}
--build-arg CI_USE=true
-t rippled-ci-builder:${container_label}
$<$<BOOL:${ci_cache_from}>:--cache-from=${ci_cache_from}>
-f ubuntu-builder/Dockerfile .
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/Builds/containers
VERBATIM
USES_TERMINAL
COMMAND_EXPAND_LISTS
SOURCES
Builds/containers/ubuntu-builder/Dockerfile
Builds/containers/ubuntu-builder/ubuntu_setup.sh
Builds/containers/shared/build_deps.sh
Builds/containers/shared/rippled.service
Builds/containers/shared/update_sources.sh
Builds/containers/shared/update-rippled.sh
)
exclude_from_default (ci_container)
else ()
message (STATUS "docker NOT found -- won't be able to build containers for packaging")
endif ()
@@ -515,7 +565,11 @@ else ()
target_link_libraries (common
INTERFACE
-rdynamic
$<$<AND:$<BOOL:${static}>,$<NOT:$<BOOL:${APPLE}>>>:-static-libstdc++>)
# link to static libc/c++ iff:
# * static option set and
# * NOT APPLE (AppleClang does not support static libc/c++) and
# * NOT san (sanitizers typically don't work with static libc/c++)
$<$<AND:$<BOOL:${static}>,$<NOT:$<BOOL:${APPLE}>>,$<NOT:$<BOOL:${san}>>>:-static-libstdc++>)
endif ()
if (use_gold AND is_gcc)
@@ -552,7 +606,7 @@ if (use_gold AND is_gcc)
unset (LD_VERSION)
endif ()
if (is_clang)
if (use_lld)
# use lld linker if available
execute_process (
COMMAND ${CMAKE_CXX_COMPILER} -fuse-ld=lld -Wl,--version
@@ -627,6 +681,8 @@ endif ()
if (san)
target_compile_options (opts
INTERFACE
# sanitizers recommend minimum of -O1 for reasonable performance
$<$<CONFIG:Debug>:-O1>
${SAN_FLAG}
-fno-omit-frame-pointer)
target_compile_definitions (opts
@@ -748,6 +804,26 @@ target_link_libraries (ripple_boost
Boost::serialization
Boost::system
Boost::thread)
if (san)
if (NOT Boost_INCLUDE_DIRS AND TARGET Boost::headers)
get_target_property (Boost_INCLUDE_DIRS Boost::headers INTERFACE_INCLUDE_DIRECTORIES)
endif ()
message(STATUS "Adding [${Boost_INCLUDE_DIRS}] to sanitizer blacklist")
file (WRITE ${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt "src:${Boost_INCLUDE_DIRS}/*")
target_compile_options (opts
INTERFACE
# ignore boost headers for sanitizing
-fsanitize-blacklist=${CMAKE_CURRENT_BINARY_DIR}/san_bl.txt)
endif ()
# workaround for xcode 10.2 and boost < 1.69
# once we require Boost 1.69 or higher, this can be removed
# see: https://github.com/boostorg/asio/commit/43874d5
if (CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" AND
CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 10.0.1.10010043 AND
Boost_VERSION LESS 106900)
target_compile_definitions (opts INTERFACE BOOST_ASIO_HAS_STD_STRING_VIEW)
endif ()
#[===================================================================[
NIH dep: openssl
@@ -773,12 +849,7 @@ if (static)
set (OPENSSL_USE_STATIC_LIBS ON)
endif ()
set (OPENSSL_MSVC_STATIC_RT ON)
set (_ssl_min_ver 1.0.2)
# HACK for travis
if ("$ENV{CI}" STREQUAL "true" AND "$ENV{TRAVIS}" STREQUAL "true")
set (_ssl_min_ver 1.0.1)
endif ()
find_package (OpenSSL ${_ssl_min_ver} REQUIRED)
find_package (OpenSSL 1.0.2 REQUIRED)
target_link_libraries (ripple_libs
INTERFACE
OpenSSL::SSL
@@ -1239,7 +1310,7 @@ ExternalProject_Add (snappy
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
-DSNAPPY_BUILD_TESTS=OFF
$<$<BOOL:${MSVC}>:
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -MP"
"-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP"
"-DCMAKE_CXX_FLAGS_DEBUG=-MTd"
"-DCMAKE_CXX_FLAGS_RELEASE=-MT"
>
@@ -2581,6 +2652,12 @@ target_link_libraries (rippled
Ripple::libs
Ripple::xrpl_core)
exclude_if_included (rippled)
# define a macro for tests that might need to
# be exluded or run differently in CI environment
if (is_ci)
target_compile_definitions(rippled PRIVATE RIPPLED_RUNNING_IN_CI)
endif ()
#[===================================================================[
install stuff
@@ -2675,6 +2752,11 @@ if (coverage)
COMMAND rippled --unittest$<$<BOOL:${coverage_test}>:=${coverage_test}> --quiet --unittest-log
COMMAND ${LLVM_PROFDATA}
merge -sparse default.profraw -o rip.profdata
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
COMMAND ${LLVM_COV}
report -instr-profile=rip.profdata
$<TARGET_FILE:rippled> ${extract_pattern}
# generate html report
COMMAND ${LLVM_COV}
show -format=html -output-dir=${CMAKE_BINARY_DIR}/coverage
-instr-profile=rip.profdata
@@ -2718,6 +2800,8 @@ if (coverage)
# extract our files
COMMAND ${LCOV}
-e lcov-all.info "${extract_pattern}" -o lcov.info
COMMAND ${CMAKE_COMMAND} -E echo "Summary of coverage:"
COMMAND ${LCOV} --summary lcov.info
# generate HTML report
COMMAND ${GENHTML}
-o ${CMAKE_BINARY_DIR}/coverage lcov.info

View File

@@ -1,18 +1,16 @@
#!/bin/bash -u
# We use set -e and bash with -u to bail on first non zero exit code of any
# processes launched or upon any unbound variable.
# We use set -x to print commands before running them to help with
# debugging.
#!/usr/bin/env bash
set -ex
__dirname=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
echo "using CC: ${CC}"
"${CC}" --version
export CC
COMPNAME=$(basename $CC)
echo "using CXX: ${CXX:-notset}"
if [[ $CXX ]]; then
"${CXX}" --version
export CXX
"${CXX}" --version
export CXX
fi
: ${BUILD_TYPE:=Debug}
echo "BUILD TYPE: ${BUILD_TYPE}"
@@ -26,34 +24,33 @@ echo "using APP: ${APP}"
JOBS=${NUM_PROCESSORS:-2}
if [[ ${TRAVIS:-false} != "true" ]]; then
JOBS=$((JOBS+1))
JOBS=$((JOBS+1))
fi
if [ -x /usr/bin/time ] ; then
: ${TIME:="Duration: %E"}
export TIME
time=/usr/bin/time
: ${TIME:="Duration: %E"}
export TIME
time=/usr/bin/time
else
time=
time=
fi
if [[ -z "${MAX_TIME:-}" ]] ; then
timeout_cmd=""
timeout_cmd=""
else
timeout_cmd="timeout ${MAX_TIME}"
timeout_cmd="timeout ${MAX_TIME}"
fi
echo "cmake building ${APP}"
: ${CMAKE_EXTRA_ARGS:=""}
if [[ ${NINJA_BUILD:-} == true ]]; then
CMAKE_EXTRA_ARGS+=" -G Ninja"
CMAKE_EXTRA_ARGS+=" -G Ninja"
fi
coverage=false
if [[ "${TARGET}" == "coverage_report" ]] ; then
echo "coverage option detected."
coverage=true
export PATH=$PATH:${LCOV_ROOT}/usr/bin
fi
#
@@ -61,24 +58,24 @@ fi
# dir, otherwise default to the compiler.build_type
#
: "${BUILD_DIR:=${COMPNAME}.${BUILD_TYPE}}"
BUILDARGS=" -j${JOBS}"
BUILDARGS=""
if [[ ${VERBOSE_BUILD:-} == true ]]; then
CMAKE_EXTRA_ARGS+=" -DCMAKE_VERBOSE_MAKEFILE=ON"
CMAKE_EXTRA_ARGS+=" -DCMAKE_VERBOSE_MAKEFILE=ON"
# TODO: if we use a different generator, this
# option to build verbose would need to change:
if [[ ${NINJA_BUILD:-} == true ]]; then
BUILDARGS+=" -v"
else
BUILDARGS+=" verbose=1"
fi
# TODO: if we use a different generator, this
# option to build verbose would need to change:
if [[ ${NINJA_BUILD:-} == true ]]; then
BUILDARGS+=" -v"
else
BUILDARGS+=" verbose=1"
fi
fi
if [[ ${USE_CCACHE:-} == true ]]; then
echo "using ccache with basedir [${CCACHE_BASEDIR:-}]"
CMAKE_EXTRA_ARGS+=" -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
echo "using ccache with basedir [${CCACHE_BASEDIR:-}]"
CMAKE_EXTRA_ARGS+=" -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
fi
if [ -d "build/${BUILD_DIR}" ]; then
rm -rf "build/${BUILD_DIR}"
rm -rf "build/${BUILD_DIR}"
fi
mkdir -p "build/${BUILD_DIR}"
@@ -87,16 +84,16 @@ pushd "build/${BUILD_DIR}"
${time} cmake ../.. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} ${CMAKE_EXTRA_ARGS}
# build
export DESTDIR=$(pwd)/_INSTALLED_
time ${timeout_cmd} cmake --build . --target ${TARGET} -- $BUILDARGS
time ${timeout_cmd} cmake --build . --target ${TARGET} --parallel -- $BUILDARGS
if [[ ${TARGET} == "docs" ]]; then
## mimic the standard test output for docs build
## to make controlling processes like jenkins happy
if [ -f html_doc/index.html ]; then
echo "1 case, 1 test total, 0 failures"
else
echo "1 case, 1 test total, 1 failures"
fi
exit
## mimic the standard test output for docs build
## to make controlling processes like jenkins happy
if [ -f html_doc/index.html ]; then
echo "1 case, 1 test total, 0 failures"
else
echo "1 case, 1 test total, 1 failures"
fi
exit
fi
popd
export APP_PATH="$PWD/build/${BUILD_DIR}/${APP}"
@@ -109,61 +106,116 @@ function join_by { local IFS="$1"; shift; echo "$*"; }
# This is a list of manual tests
# in rippled that we want to run
# ORDER matters here...sorted in approximately
# descending execution time (longest running tests at top)
declare -a manual_tests=(
"beast.chrono.abstract_clock"
"beast.unit_test.print"
"ripple.NodeStore.Timing"
"ripple.app.Flow_manual"
"ripple.app.NoRippleCheckLimits"
"ripple.app.PayStrandAllPairs"
"ripple.consensus.ByzantineFailureSim"
"ripple.consensus.DistributedValidators"
"ripple.consensus.ScaleFreeSim"
"ripple.ripple_data.digest"
"ripple.tx.CrossingLimits"
"ripple.tx.FindOversizeCross"
"ripple.tx.Offer_manual"
"ripple.tx.OversizeMeta"
"ripple.tx.PlumpBook"
'ripple.ripple_data.digest'
'ripple.tx.Offer_manual'
'ripple.app.PayStrandAllPairs'
'ripple.tx.CrossingLimits'
'ripple.tx.PlumpBook'
'ripple.app.Flow_manual'
'ripple.tx.OversizeMeta'
'ripple.consensus.DistributedValidators'
'ripple.app.NoRippleCheckLimits'
'ripple.NodeStore.Timing'
'ripple.consensus.ByzantineFailureSim'
'beast.chrono.abstract_clock'
'beast.unit_test.print'
)
if [[ ${TRAVIS:-false} != "true" ]]; then
# these two tests cause travis CI to run out of memory.
# TODO: investigate possible workarounds.
manual_tests=(
'ripple.consensus.ScaleFreeSim'
'ripple.tx.FindOversizeCross'
"${manual_tests[@]}"
)
fi
: ${APP_ARGS:=}
if [[ ${APP} == "rippled" ]]; then
if [[ ${MANUAL_TESTS:-} == true ]]; then
APP_ARGS+=" --unittest=$(join_by , "${manual_tests[@]}")"
else
APP_ARGS+=" --unittest --quiet --unittest-log"
fi
if [[ ${coverage} == false && ${PARALLEL_TESTS:-} == true ]]; then
APP_ARGS+=" --unittest-jobs ${JOBS}"
fi
if [[ ${MANUAL_TESTS:-} == true ]]; then
APP_ARGS+=" --unittest=$(join_by , "${manual_tests[@]}")"
else
APP_ARGS+=" --unittest --quiet --unittest-log"
fi
if [[ ${coverage} == false && ${PARALLEL_TESTS:-} == true ]]; then
APP_ARGS+=" --unittest-jobs ${JOBS}"
fi
fi
if [[ ${coverage} == true ]]; then
# Push the results (lcov.info) to codecov
codecov -X gcov # don't even try and look for .gcov files ;)
find . -name "*.gcda" | xargs rm -f
if [[ ${coverage} == true && $CC =~ ^gcc ]]; then
# Push the results (lcov.info) to codecov
codecov -X gcov # don't even try and look for .gcov files ;)
find . -name "*.gcda" | xargs rm -f
fi
if [[ ${SKIP_TESTS:-} == true ]]; then
echo "skipping tests."
exit
echo "skipping tests."
exit
fi
if [[ ${DEBUGGER:-true} == "true" && -v GDB_ROOT && -x ${GDB_ROOT}/bin/gdb ]]; then
${GDB_ROOT}/bin/gdb -v
# Execute unit tests under gdb, printing a call stack
# if we get a crash.
export APP_ARGS
${timeout_cmd} ${GDB_ROOT}/bin/gdb -return-child-result -quiet -batch \
-ex "set env MALLOC_CHECK_=3" \
-ex "set print thread-events off" \
-ex run \
-ex "thread apply all backtrace full" \
-ex "quit" \
--args ${APP_PATH} ${APP_ARGS}
ulimit -a
corepat=$(cat /proc/sys/kernel/core_pattern)
if [[ ${corepat} =~ ^[:space:]*\| ]] ; then
echo "WARNING: core pattern is piping - can't search for core files"
look_core=false
else
${timeout_cmd} ${APP_PATH} ${APP_ARGS}
look_core=true
coredir=$(dirname ${corepat})
fi
if [[ ${look_core} == true ]]; then
before=$(ls -A1 ${coredir})
fi
set +e
echo "Running tests for ${APP_PATH}"
if [[ ${MANUAL_TESTS:-} == true && ${PARALLEL_TESTS:-} != true ]]; then
for t in "${manual_tests[@]}" ; do
${timeout_cmd} ${APP_PATH} --unittest=${t}
TEST_STAT=$?
if [[ $TEST_STAT -ne 0 ]] ; then
break
fi
done
else
${timeout_cmd} ${APP_PATH} ${APP_ARGS}
TEST_STAT=$?
fi
set -e
if [[ ${look_core} == true ]]; then
after=$(ls -A1 ${coredir})
oIFS="${IFS}"
IFS=$'\n\r'
found_core=false
for l in $(diff -w --suppress-common-lines <(echo "$before") <(echo "$after")) ; do
if [[ "$l" =~ ^[[:space:]]*\>[[:space:]]*(.+)$ ]] ; then
corefile="${BASH_REMATCH[1]}"
echo "FOUND core dump file at '${coredir}/${corefile}'"
gdb_output=$(/bin/mktemp /tmp/gdb_output_XXXXXXXXXX.txt)
found_core=true
gdb \
-ex "set height 0" \
-ex "set logging file ${gdb_output}" \
-ex "set logging on" \
-ex "print 'ripple::BuildInfo::versionString'" \
-ex "thread apply all backtrace full" \
-ex "info inferiors" \
-ex quit \
"$APP_PATH" \
"${coredir}/${corefile}" &> /dev/null
echo -e "CORE INFO: \n\n $(cat ${gdb_output}) \n\n)"
fi
done
IFS="${oIFS}"
fi
if [[ ${found_core} == true ]]; then
exit -1
else
exit $TEST_STAT
fi

View File

@@ -0,0 +1,36 @@
#!/usr/bin/env bash
# run our build script in a docker container
# using travis-ci hosts
set -eux
function join_by { local IFS="$1"; shift; echo "$*"; }
set +x
echo "VERBOSE_BUILD=true" > /tmp/co.env
matchers=(
'TRAVIS.*' 'CI' 'CC' 'CXX'
'BUILD_TYPE' 'TARGET' 'MAX_TIME'
'CODECOV.+' 'CMAKE.*' '.+_TESTS'
'.+_OPTIONS' 'NINJA.*' 'NUM_.+'
'NIH_.+' 'BOOST.*' '.*CCACHE.*')
matchstring=$(join_by '|' "${matchers[@]}")
echo "MATCHSTRING IS:: $matchstring"
env | grep -E "^(${matchstring})=" >> /tmp/co.env
set -x
# need to eliminate TRAVIS_CMD...don't want to pass it to the container
cat /tmp/co.env | grep -v TRAVIS_CMD > /tmp/co.env.2
mv /tmp/co.env.2 /tmp/co.env
cat /tmp/co.env
mkdir -p -m 0777 ${TRAVIS_BUILD_DIR}/cores
echo "${TRAVIS_BUILD_DIR}/cores/%e.%p" | sudo tee /proc/sys/kernel/core_pattern
docker run \
-t --env-file /tmp/co.env \
-v ${TRAVIS_HOME}:${TRAVIS_HOME} \
-w ${TRAVIS_BUILD_DIR} \
--cap-add SYS_PTRACE \
--ulimit "core=-1" \
$DOCKER_IMAGE \
/bin/bash -c 'if [[ $CC =~ ([[:alpha:]]+)-([[:digit:].]+) ]] ; then sudo update-alternatives --set ${BASH_REMATCH[1]} /usr/bin/$CC; fi; bin/ci/ubuntu/build-and-test.sh'

View File

@@ -1,29 +0,0 @@
#!/bin/bash -u
# Exit if anything fails. Echo commands to aid debugging.
set -ex
# Target working dir - defaults to current dir.
# Can be set from caller, or in the first parameter
TWD=$( cd ${TWD:-${1:-${PWD:-$( pwd )}}}; pwd )
echo "Target path is: $TWD"
# Override gcc version to $GCC_VER.
# Put an appropriate symlink at the front of the path.
mkdir -pv $HOME/bin
for g in gcc g++ gcov gcc-ar gcc-nm gcc-ranlib
do
test -x $( type -p ${g}-$GCC_VER )
ln -sv $(type -p ${g}-$GCC_VER) $HOME/bin/${g}
done
# What versions are we ACTUALLY running?
if [ -x $HOME/bin/g++ ]; then
$HOME/bin/g++ -v
else
g++ -v
fi
pip install --user requests==2.13.0
pip install --user https://github.com/codecov/codecov-python/archive/master.zip
bash bin/sh/install-boost.sh

View File

@@ -0,0 +1,29 @@
#!/usr/bin/env bash
# some cached files create churn, so save them here for
# later restoration before packing the cache
set -eux
pushd ${TRAVIS_HOME}
if [ -f cache_ignore.tar ] ; then
rm -f cache_ignore.tar
fi
if [ -d _cache/nih_c ] ; then
find _cache/nih_c -name "build.ninja" | tar rf cache_ignore.tar --files-from -
find _cache/nih_c -name ".ninja_deps" | tar rf cache_ignore.tar --files-from -
find _cache/nih_c -name ".ninja_log" | tar rf cache_ignore.tar --files-from -
find _cache/nih_c -name "*.log" | tar rf cache_ignore.tar --files-from -
find _cache/nih_c -name "*.tlog" | tar rf cache_ignore.tar --files-from -
# show .a files in the cache, for sanity checking
find _cache/nih_c -name "*.a" -ls
fi
if [ -d _cache/ccache ] ; then
find _cache/ccache -name "stats" | tar rf cache_ignore.tar --files-from -
fi
if [ -f cache_ignore.tar ] ; then
tar -tf cache_ignore.tar
fi
popd

View File

@@ -1,33 +0,0 @@
#!/bin/sh
# Assumptions:
# 1) BOOST_ROOT and BOOST_URL are already defined,
# and contain valid values.
# 2) The last namepart of BOOST_ROOT matches the
# folder name internal to boost's .tar.gz
# When testing you can force a boost build by clearing travis caches:
# https://travis-ci.org/ripple/rippled/caches
set -e
if [ -x /usr/bin/time ] ; then
: ${TIME:="Duration: %E"}
export TIME
time=/usr/bin/time
else
time=
fi
if [ ! -d "$BOOST_ROOT/lib" ]
then
wget $BOOST_URL -O /tmp/boost.tar.gz
cd `dirname $BOOST_ROOT`
rm -fr ${BOOST_ROOT}
tar xzf /tmp/boost.tar.gz
cd $BOOST_ROOT && \
$time ./bootstrap.sh --prefix=$BOOST_ROOT && \
$time ./b2 cxxflags="-std=c++14" -j$((2*${NUM_PROCESSORS:-2})) &&\
$time ./b2 install
else
echo "Using cached boost at $BOOST_ROOT"
fi

33
bin/sh/install-vcpkg.sh Executable file
View File

@@ -0,0 +1,33 @@
#!/usr/bin/env bash
set -exu
if [[ -z ${COMSPEC:-} ]]; then
EXE="vcpkg"
else
EXE="vcpkg.exe"
fi
if [[ -d "${VCPKG_DIR}" && -x "${VCPKG_DIR}/${EXE}" ]] ; then
echo "Using cached vcpkg at ${VCPKG_DIR}"
${VCPKG_DIR}/${EXE} list
exit
fi
if [[ -d "${VCPKG_DIR}" ]] ; then
rm -rf "${VCPKG_DIR}"
fi
git clone --branch 2019.06 https://github.com/Microsoft/vcpkg.git ${VCPKG_DIR}
pushd ${VCPKG_DIR}
if [[ -z ${COMSPEC:-} ]]; then
chmod +x ./bootstrap-vcpkg.sh
./bootstrap-vcpkg.sh
else
./bootstrap-vcpkg.bat
fi
popd
# TODO -- can pin specific ports to a commit/version like this:
#git checkout <SOME COMMIT HASH> ports/boost
${VCPKG_DIR}/${EXE} install openssl

38
bin/sh/setup-msvc.sh Executable file
View File

@@ -0,0 +1,38 @@
# NOTE: must be sourced from a shell so it can export vars
cat << BATCH > ./getenv.bat
CALL %*
ENV
BATCH
while read line ; do
IFS='"' read x path arg <<<"${line}"
if [ -f "${path}" ] ; then
echo "FOUND: $path"
export VCINSTALLDIR=$(./getenv.bat "${path}" ${arg} | grep "^VCINSTALLDIR=" | sed -E "s/^VCINSTALLDIR=//g")
if [ "${VCINSTALLDIR}" != "" ] ; then
echo "USING ${VCINSTALLDIR}"
export LIB=$(./getenv.bat "${path}" ${arg} | grep "^LIB=" | sed -E "s/^LIB=//g")
export LIBPATH=$(./getenv.bat "${path}" ${arg} | grep "^LIBPATH=" | sed -E "s/^LIBPATH=//g")
export INCLUDE=$(./getenv.bat "${path}" ${arg} | grep "^INCLUDE=" | sed -E "s/^INCLUDE=//g")
ADDPATH=$(./getenv.bat "${path}" ${arg} | grep "^PATH=" | sed -E "s/^PATH=//g")
export PATH="${ADDPATH}:${PATH}"
break
fi
fi
done <<EOL
"C:/Program Files (x86)/Microsoft Visual Studio/2017/BuildTools/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
"C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Auxiliary/Build/vcvarsall.bat" x86_amd64
"C:/Program Files (x86)/Microsoft Visual Studio 15.0/VC/vcvarsall.bat" amd64
"C:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/vcvarsall.bat" amd64
"C:/Program Files (x86)/Microsoft Visual Studio 13.0/VC/vcvarsall.bat" amd64
"C:/Program Files (x86)/Microsoft Visual Studio 12.0/VC/vcvarsall.bat" amd64
EOL
# TODO: update the list above as needed to support newer versions of msvc tools
rm -f getenv.bat
if [ "${VCINSTALLDIR}" = "" ] ; then
echo "No compatible visual studio found!"
fi

View File

@@ -19,8 +19,17 @@
#include <ripple/beast/asio/io_latency_probe.h>
#include <ripple/beast/unit_test.h>
#include <beast/test/yield_to.hpp>
#include <boost/asio/basic_waitable_timer.hpp>
#include <boost/asio/deadline_timer.hpp>
#include <boost/asio/io_service.hpp>
#include <boost/optional.hpp>
#include <algorithm>
#include <chrono>
#include <mutex>
#include <numeric>
#include <thread>
#include <vector>
using namespace std::chrono_literals;
@@ -29,6 +38,81 @@ class io_latency_probe_test :
{
using MyTimer = boost::asio::basic_waitable_timer<std::chrono::steady_clock>;
#ifdef RIPPLED_RUNNING_IN_CI
/**
* @brief attempt to measure inaccuracy of asio waitable timers
*
* This class is needed in some VM/CI environments where
* timer inaccuracy impacts the io_probe tests below.
*
*/
template <class Clock, class MeasureClock = std::chrono::high_resolution_clock>
struct measure_asio_timers
{
using duration = typename Clock::duration;
using rep = typename MeasureClock::duration::rep;
std::vector<duration> elapsed_times_;
measure_asio_timers(duration interval = 100ms, size_t num_samples = 50)
{
using namespace std::chrono;
boost::asio::io_service ios;
boost::optional<boost::asio::io_service::work> work {ios};
std::thread worker { [&]{ ios.run(); } };
boost::asio::basic_waitable_timer<Clock> timer {ios};
elapsed_times_.reserve (num_samples);
std::mutex gate;
boost::system::error_code wait_err;
while (--num_samples)
{
auto const start {MeasureClock::now()};
timer.expires_after (interval);
gate.lock ();
timer.async_wait ( [&] (boost::system::error_code const& ec) {
if (ec)
wait_err = ec;
auto const end {MeasureClock::now()};
elapsed_times_.emplace_back (end-start);
gate.unlock ();
});
std::unique_lock <std::mutex> waithere {gate};
}
work = boost::none;
worker.join();
if (wait_err)
boost::asio::detail::throw_error(wait_err, "wait");
}
template <class D>
auto getMean()
{
double sum = {0};
for (auto const& v : elapsed_times_)
{
sum += static_cast<double>(
std::chrono::duration_cast<D>(v).count());
}
return sum / elapsed_times_.size();
}
template <class D>
auto getMax()
{
return std::chrono::duration_cast<D>(*std::max_element(
elapsed_times_.begin(),elapsed_times_.end())).count();
}
template <class D>
auto getMin()
{
return std::chrono::duration_cast<D>(*std::min_element(
elapsed_times_.begin(),elapsed_times_.end())).count();
}
};
#endif
struct test_sampler
{
beast::io_latency_probe <std::chrono::steady_clock> probe_;
@@ -79,14 +163,38 @@ class io_latency_probe_test :
{
testcase << "sample ongoing";
boost::system::error_code ec;
test_sampler io_probe {99ms, get_io_service()};
using namespace std::chrono;
auto interval = 99ms;
auto probe_duration = 1s;
size_t expected_probe_count_max = (probe_duration/interval);
size_t expected_probe_count_min = expected_probe_count_max;
#ifdef RIPPLED_RUNNING_IN_CI
// adjust min expected based on measurements
// if running in CI/VM environment
measure_asio_timers<steady_clock> tt {interval};
log << "measured mean for timers: "
<< tt.getMean<milliseconds>() << "ms\n";
log << "measured max for timers: "
<< tt.getMax<milliseconds>() << "ms\n";
expected_probe_count_min =
static_cast<size_t>(
duration_cast<milliseconds>(probe_duration).count())
/ static_cast<size_t>(tt.getMean<milliseconds>());
#endif
log << "expected_probe_count_min: " << expected_probe_count_min << "\n";
log << "expected_probe_count_max: " << expected_probe_count_max << "\n";
test_sampler io_probe {interval, get_io_service()};
io_probe.start();
MyTimer timer {get_io_service(), 1s};
MyTimer timer {get_io_service(), probe_duration};
timer.async_wait(yield[ec]);
if(! BEAST_EXPECTS(! ec, ec.message()))
return;
auto probes_seen = io_probe.durations_.size();
BEAST_EXPECTS(probes_seen >=9 && probes_seen <= 11,
BEAST_EXPECTS(
probes_seen >= (expected_probe_count_min - 1) &&
probes_seen <= (expected_probe_count_max + 1),
std::string("probe count is ") + std::to_string(probes_seen));
io_probe.probe_.cancel_async();
// wait again in order to flush the remaining