mirror of
https://github.com/XRPLF/rippled.git
synced 2025-12-01 00:15:51 +00:00
Travis CI improvements:
FIXES: #2527 * define custom docker image for travis-linux builds based on package build image * add macos builds * add windows builds (currently allowed to fail) * improve build and shell scripts as required for the CI envs * add asio timer latency workaround * omit several manual tests from TravisCI which cause memory exhaustion
This commit is contained in:
committed by
Nik Bougalis
parent
87e9ee5ce9
commit
13a4fefe34
@@ -1,18 +1,16 @@
|
||||
#!/bin/bash -u
|
||||
# We use set -e and bash with -u to bail on first non zero exit code of any
|
||||
# processes launched or upon any unbound variable.
|
||||
# We use set -x to print commands before running them to help with
|
||||
# debugging.
|
||||
#!/usr/bin/env bash
|
||||
set -ex
|
||||
|
||||
__dirname=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
|
||||
echo "using CC: ${CC}"
|
||||
"${CC}" --version
|
||||
export CC
|
||||
|
||||
COMPNAME=$(basename $CC)
|
||||
echo "using CXX: ${CXX:-notset}"
|
||||
if [[ $CXX ]]; then
|
||||
"${CXX}" --version
|
||||
export CXX
|
||||
"${CXX}" --version
|
||||
export CXX
|
||||
fi
|
||||
: ${BUILD_TYPE:=Debug}
|
||||
echo "BUILD TYPE: ${BUILD_TYPE}"
|
||||
@@ -26,34 +24,33 @@ echo "using APP: ${APP}"
|
||||
|
||||
JOBS=${NUM_PROCESSORS:-2}
|
||||
if [[ ${TRAVIS:-false} != "true" ]]; then
|
||||
JOBS=$((JOBS+1))
|
||||
JOBS=$((JOBS+1))
|
||||
fi
|
||||
|
||||
if [ -x /usr/bin/time ] ; then
|
||||
: ${TIME:="Duration: %E"}
|
||||
export TIME
|
||||
time=/usr/bin/time
|
||||
: ${TIME:="Duration: %E"}
|
||||
export TIME
|
||||
time=/usr/bin/time
|
||||
else
|
||||
time=
|
||||
time=
|
||||
fi
|
||||
|
||||
if [[ -z "${MAX_TIME:-}" ]] ; then
|
||||
timeout_cmd=""
|
||||
timeout_cmd=""
|
||||
else
|
||||
timeout_cmd="timeout ${MAX_TIME}"
|
||||
timeout_cmd="timeout ${MAX_TIME}"
|
||||
fi
|
||||
|
||||
echo "cmake building ${APP}"
|
||||
: ${CMAKE_EXTRA_ARGS:=""}
|
||||
if [[ ${NINJA_BUILD:-} == true ]]; then
|
||||
CMAKE_EXTRA_ARGS+=" -G Ninja"
|
||||
CMAKE_EXTRA_ARGS+=" -G Ninja"
|
||||
fi
|
||||
|
||||
coverage=false
|
||||
if [[ "${TARGET}" == "coverage_report" ]] ; then
|
||||
echo "coverage option detected."
|
||||
coverage=true
|
||||
export PATH=$PATH:${LCOV_ROOT}/usr/bin
|
||||
fi
|
||||
|
||||
#
|
||||
@@ -61,24 +58,24 @@ fi
|
||||
# dir, otherwise default to the compiler.build_type
|
||||
#
|
||||
: "${BUILD_DIR:=${COMPNAME}.${BUILD_TYPE}}"
|
||||
BUILDARGS=" -j${JOBS}"
|
||||
BUILDARGS=""
|
||||
if [[ ${VERBOSE_BUILD:-} == true ]]; then
|
||||
CMAKE_EXTRA_ARGS+=" -DCMAKE_VERBOSE_MAKEFILE=ON"
|
||||
CMAKE_EXTRA_ARGS+=" -DCMAKE_VERBOSE_MAKEFILE=ON"
|
||||
|
||||
# TODO: if we use a different generator, this
|
||||
# option to build verbose would need to change:
|
||||
if [[ ${NINJA_BUILD:-} == true ]]; then
|
||||
BUILDARGS+=" -v"
|
||||
else
|
||||
BUILDARGS+=" verbose=1"
|
||||
fi
|
||||
# TODO: if we use a different generator, this
|
||||
# option to build verbose would need to change:
|
||||
if [[ ${NINJA_BUILD:-} == true ]]; then
|
||||
BUILDARGS+=" -v"
|
||||
else
|
||||
BUILDARGS+=" verbose=1"
|
||||
fi
|
||||
fi
|
||||
if [[ ${USE_CCACHE:-} == true ]]; then
|
||||
echo "using ccache with basedir [${CCACHE_BASEDIR:-}]"
|
||||
CMAKE_EXTRA_ARGS+=" -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||
echo "using ccache with basedir [${CCACHE_BASEDIR:-}]"
|
||||
CMAKE_EXTRA_ARGS+=" -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||
fi
|
||||
if [ -d "build/${BUILD_DIR}" ]; then
|
||||
rm -rf "build/${BUILD_DIR}"
|
||||
rm -rf "build/${BUILD_DIR}"
|
||||
fi
|
||||
|
||||
mkdir -p "build/${BUILD_DIR}"
|
||||
@@ -87,16 +84,16 @@ pushd "build/${BUILD_DIR}"
|
||||
${time} cmake ../.. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} ${CMAKE_EXTRA_ARGS}
|
||||
# build
|
||||
export DESTDIR=$(pwd)/_INSTALLED_
|
||||
time ${timeout_cmd} cmake --build . --target ${TARGET} -- $BUILDARGS
|
||||
time ${timeout_cmd} cmake --build . --target ${TARGET} --parallel -- $BUILDARGS
|
||||
if [[ ${TARGET} == "docs" ]]; then
|
||||
## mimic the standard test output for docs build
|
||||
## to make controlling processes like jenkins happy
|
||||
if [ -f html_doc/index.html ]; then
|
||||
echo "1 case, 1 test total, 0 failures"
|
||||
else
|
||||
echo "1 case, 1 test total, 1 failures"
|
||||
fi
|
||||
exit
|
||||
## mimic the standard test output for docs build
|
||||
## to make controlling processes like jenkins happy
|
||||
if [ -f html_doc/index.html ]; then
|
||||
echo "1 case, 1 test total, 0 failures"
|
||||
else
|
||||
echo "1 case, 1 test total, 1 failures"
|
||||
fi
|
||||
exit
|
||||
fi
|
||||
popd
|
||||
export APP_PATH="$PWD/build/${BUILD_DIR}/${APP}"
|
||||
@@ -109,61 +106,116 @@ function join_by { local IFS="$1"; shift; echo "$*"; }
|
||||
|
||||
# This is a list of manual tests
|
||||
# in rippled that we want to run
|
||||
# ORDER matters here...sorted in approximately
|
||||
# descending execution time (longest running tests at top)
|
||||
declare -a manual_tests=(
|
||||
"beast.chrono.abstract_clock"
|
||||
"beast.unit_test.print"
|
||||
"ripple.NodeStore.Timing"
|
||||
"ripple.app.Flow_manual"
|
||||
"ripple.app.NoRippleCheckLimits"
|
||||
"ripple.app.PayStrandAllPairs"
|
||||
"ripple.consensus.ByzantineFailureSim"
|
||||
"ripple.consensus.DistributedValidators"
|
||||
"ripple.consensus.ScaleFreeSim"
|
||||
"ripple.ripple_data.digest"
|
||||
"ripple.tx.CrossingLimits"
|
||||
"ripple.tx.FindOversizeCross"
|
||||
"ripple.tx.Offer_manual"
|
||||
"ripple.tx.OversizeMeta"
|
||||
"ripple.tx.PlumpBook"
|
||||
'ripple.ripple_data.digest'
|
||||
'ripple.tx.Offer_manual'
|
||||
'ripple.app.PayStrandAllPairs'
|
||||
'ripple.tx.CrossingLimits'
|
||||
'ripple.tx.PlumpBook'
|
||||
'ripple.app.Flow_manual'
|
||||
'ripple.tx.OversizeMeta'
|
||||
'ripple.consensus.DistributedValidators'
|
||||
'ripple.app.NoRippleCheckLimits'
|
||||
'ripple.NodeStore.Timing'
|
||||
'ripple.consensus.ByzantineFailureSim'
|
||||
'beast.chrono.abstract_clock'
|
||||
'beast.unit_test.print'
|
||||
)
|
||||
if [[ ${TRAVIS:-false} != "true" ]]; then
|
||||
# these two tests cause travis CI to run out of memory.
|
||||
# TODO: investigate possible workarounds.
|
||||
manual_tests=(
|
||||
'ripple.consensus.ScaleFreeSim'
|
||||
'ripple.tx.FindOversizeCross'
|
||||
"${manual_tests[@]}"
|
||||
)
|
||||
fi
|
||||
|
||||
: ${APP_ARGS:=}
|
||||
if [[ ${APP} == "rippled" ]]; then
|
||||
if [[ ${MANUAL_TESTS:-} == true ]]; then
|
||||
APP_ARGS+=" --unittest=$(join_by , "${manual_tests[@]}")"
|
||||
else
|
||||
APP_ARGS+=" --unittest --quiet --unittest-log"
|
||||
fi
|
||||
if [[ ${coverage} == false && ${PARALLEL_TESTS:-} == true ]]; then
|
||||
APP_ARGS+=" --unittest-jobs ${JOBS}"
|
||||
fi
|
||||
if [[ ${MANUAL_TESTS:-} == true ]]; then
|
||||
APP_ARGS+=" --unittest=$(join_by , "${manual_tests[@]}")"
|
||||
else
|
||||
APP_ARGS+=" --unittest --quiet --unittest-log"
|
||||
fi
|
||||
if [[ ${coverage} == false && ${PARALLEL_TESTS:-} == true ]]; then
|
||||
APP_ARGS+=" --unittest-jobs ${JOBS}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ ${coverage} == true ]]; then
|
||||
# Push the results (lcov.info) to codecov
|
||||
codecov -X gcov # don't even try and look for .gcov files ;)
|
||||
find . -name "*.gcda" | xargs rm -f
|
||||
if [[ ${coverage} == true && $CC =~ ^gcc ]]; then
|
||||
# Push the results (lcov.info) to codecov
|
||||
codecov -X gcov # don't even try and look for .gcov files ;)
|
||||
find . -name "*.gcda" | xargs rm -f
|
||||
fi
|
||||
|
||||
if [[ ${SKIP_TESTS:-} == true ]]; then
|
||||
echo "skipping tests."
|
||||
exit
|
||||
echo "skipping tests."
|
||||
exit
|
||||
fi
|
||||
|
||||
if [[ ${DEBUGGER:-true} == "true" && -v GDB_ROOT && -x ${GDB_ROOT}/bin/gdb ]]; then
|
||||
${GDB_ROOT}/bin/gdb -v
|
||||
# Execute unit tests under gdb, printing a call stack
|
||||
# if we get a crash.
|
||||
export APP_ARGS
|
||||
${timeout_cmd} ${GDB_ROOT}/bin/gdb -return-child-result -quiet -batch \
|
||||
-ex "set env MALLOC_CHECK_=3" \
|
||||
-ex "set print thread-events off" \
|
||||
-ex run \
|
||||
-ex "thread apply all backtrace full" \
|
||||
-ex "quit" \
|
||||
--args ${APP_PATH} ${APP_ARGS}
|
||||
ulimit -a
|
||||
corepat=$(cat /proc/sys/kernel/core_pattern)
|
||||
if [[ ${corepat} =~ ^[:space:]*\| ]] ; then
|
||||
echo "WARNING: core pattern is piping - can't search for core files"
|
||||
look_core=false
|
||||
else
|
||||
${timeout_cmd} ${APP_PATH} ${APP_ARGS}
|
||||
look_core=true
|
||||
coredir=$(dirname ${corepat})
|
||||
fi
|
||||
if [[ ${look_core} == true ]]; then
|
||||
before=$(ls -A1 ${coredir})
|
||||
fi
|
||||
|
||||
set +e
|
||||
echo "Running tests for ${APP_PATH}"
|
||||
if [[ ${MANUAL_TESTS:-} == true && ${PARALLEL_TESTS:-} != true ]]; then
|
||||
for t in "${manual_tests[@]}" ; do
|
||||
${timeout_cmd} ${APP_PATH} --unittest=${t}
|
||||
TEST_STAT=$?
|
||||
if [[ $TEST_STAT -ne 0 ]] ; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
else
|
||||
${timeout_cmd} ${APP_PATH} ${APP_ARGS}
|
||||
TEST_STAT=$?
|
||||
fi
|
||||
set -e
|
||||
|
||||
if [[ ${look_core} == true ]]; then
|
||||
after=$(ls -A1 ${coredir})
|
||||
oIFS="${IFS}"
|
||||
IFS=$'\n\r'
|
||||
found_core=false
|
||||
for l in $(diff -w --suppress-common-lines <(echo "$before") <(echo "$after")) ; do
|
||||
if [[ "$l" =~ ^[[:space:]]*\>[[:space:]]*(.+)$ ]] ; then
|
||||
corefile="${BASH_REMATCH[1]}"
|
||||
echo "FOUND core dump file at '${coredir}/${corefile}'"
|
||||
gdb_output=$(/bin/mktemp /tmp/gdb_output_XXXXXXXXXX.txt)
|
||||
found_core=true
|
||||
gdb \
|
||||
-ex "set height 0" \
|
||||
-ex "set logging file ${gdb_output}" \
|
||||
-ex "set logging on" \
|
||||
-ex "print 'ripple::BuildInfo::versionString'" \
|
||||
-ex "thread apply all backtrace full" \
|
||||
-ex "info inferiors" \
|
||||
-ex quit \
|
||||
"$APP_PATH" \
|
||||
"${coredir}/${corefile}" &> /dev/null
|
||||
|
||||
echo -e "CORE INFO: \n\n $(cat ${gdb_output}) \n\n)"
|
||||
fi
|
||||
done
|
||||
IFS="${oIFS}"
|
||||
fi
|
||||
|
||||
if [[ ${found_core} == true ]]; then
|
||||
exit -1
|
||||
else
|
||||
exit $TEST_STAT
|
||||
fi
|
||||
|
||||
|
||||
36
bin/ci/ubuntu/build-in-docker.sh
Executable file
36
bin/ci/ubuntu/build-in-docker.sh
Executable file
@@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env bash
|
||||
# run our build script in a docker container
|
||||
# using travis-ci hosts
|
||||
set -eux
|
||||
|
||||
function join_by { local IFS="$1"; shift; echo "$*"; }
|
||||
|
||||
set +x
|
||||
echo "VERBOSE_BUILD=true" > /tmp/co.env
|
||||
matchers=(
|
||||
'TRAVIS.*' 'CI' 'CC' 'CXX'
|
||||
'BUILD_TYPE' 'TARGET' 'MAX_TIME'
|
||||
'CODECOV.+' 'CMAKE.*' '.+_TESTS'
|
||||
'.+_OPTIONS' 'NINJA.*' 'NUM_.+'
|
||||
'NIH_.+' 'BOOST.*' '.*CCACHE.*')
|
||||
|
||||
matchstring=$(join_by '|' "${matchers[@]}")
|
||||
echo "MATCHSTRING IS:: $matchstring"
|
||||
env | grep -E "^(${matchstring})=" >> /tmp/co.env
|
||||
set -x
|
||||
# need to eliminate TRAVIS_CMD...don't want to pass it to the container
|
||||
cat /tmp/co.env | grep -v TRAVIS_CMD > /tmp/co.env.2
|
||||
mv /tmp/co.env.2 /tmp/co.env
|
||||
cat /tmp/co.env
|
||||
mkdir -p -m 0777 ${TRAVIS_BUILD_DIR}/cores
|
||||
echo "${TRAVIS_BUILD_DIR}/cores/%e.%p" | sudo tee /proc/sys/kernel/core_pattern
|
||||
docker run \
|
||||
-t --env-file /tmp/co.env \
|
||||
-v ${TRAVIS_HOME}:${TRAVIS_HOME} \
|
||||
-w ${TRAVIS_BUILD_DIR} \
|
||||
--cap-add SYS_PTRACE \
|
||||
--ulimit "core=-1" \
|
||||
$DOCKER_IMAGE \
|
||||
/bin/bash -c 'if [[ $CC =~ ([[:alpha:]]+)-([[:digit:].]+) ]] ; then sudo update-alternatives --set ${BASH_REMATCH[1]} /usr/bin/$CC; fi; bin/ci/ubuntu/build-and-test.sh'
|
||||
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
#!/bin/bash -u
|
||||
# Exit if anything fails. Echo commands to aid debugging.
|
||||
set -ex
|
||||
|
||||
# Target working dir - defaults to current dir.
|
||||
# Can be set from caller, or in the first parameter
|
||||
TWD=$( cd ${TWD:-${1:-${PWD:-$( pwd )}}}; pwd )
|
||||
echo "Target path is: $TWD"
|
||||
# Override gcc version to $GCC_VER.
|
||||
# Put an appropriate symlink at the front of the path.
|
||||
mkdir -pv $HOME/bin
|
||||
for g in gcc g++ gcov gcc-ar gcc-nm gcc-ranlib
|
||||
do
|
||||
test -x $( type -p ${g}-$GCC_VER )
|
||||
ln -sv $(type -p ${g}-$GCC_VER) $HOME/bin/${g}
|
||||
done
|
||||
|
||||
# What versions are we ACTUALLY running?
|
||||
if [ -x $HOME/bin/g++ ]; then
|
||||
$HOME/bin/g++ -v
|
||||
else
|
||||
g++ -v
|
||||
fi
|
||||
|
||||
pip install --user requests==2.13.0
|
||||
pip install --user https://github.com/codecov/codecov-python/archive/master.zip
|
||||
|
||||
bash bin/sh/install-boost.sh
|
||||
|
||||
29
bin/ci/ubuntu/travis-cache-start.sh
Executable file
29
bin/ci/ubuntu/travis-cache-start.sh
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env bash
|
||||
# some cached files create churn, so save them here for
|
||||
# later restoration before packing the cache
|
||||
set -eux
|
||||
pushd ${TRAVIS_HOME}
|
||||
if [ -f cache_ignore.tar ] ; then
|
||||
rm -f cache_ignore.tar
|
||||
fi
|
||||
|
||||
if [ -d _cache/nih_c ] ; then
|
||||
find _cache/nih_c -name "build.ninja" | tar rf cache_ignore.tar --files-from -
|
||||
find _cache/nih_c -name ".ninja_deps" | tar rf cache_ignore.tar --files-from -
|
||||
find _cache/nih_c -name ".ninja_log" | tar rf cache_ignore.tar --files-from -
|
||||
find _cache/nih_c -name "*.log" | tar rf cache_ignore.tar --files-from -
|
||||
find _cache/nih_c -name "*.tlog" | tar rf cache_ignore.tar --files-from -
|
||||
# show .a files in the cache, for sanity checking
|
||||
find _cache/nih_c -name "*.a" -ls
|
||||
fi
|
||||
|
||||
if [ -d _cache/ccache ] ; then
|
||||
find _cache/ccache -name "stats" | tar rf cache_ignore.tar --files-from -
|
||||
fi
|
||||
|
||||
if [ -f cache_ignore.tar ] ; then
|
||||
tar -tf cache_ignore.tar
|
||||
fi
|
||||
popd
|
||||
|
||||
|
||||
Reference in New Issue
Block a user