Add jenkins multiconfig pipeline support:

Fixes: RIPD-1514

Create a Jenkinsfile to build on rippled slaves. Modify
build_and_test.sh to support a few additional ENV settings.
This commit is contained in:
Mike Ellery
2017-08-09 14:49:06 -07:00
committed by seelabs
parent 1853c0d678
commit b9fc9f6334
4 changed files with 362 additions and 37 deletions

296
Jenkinsfile vendored Normal file
View File

@@ -0,0 +1,296 @@
#!/usr/bin/env groovy
import groovy.json.JsonOutput
import java.text.*
def all_status = [:]
def commit_id = ''
// this is not the actual token, but an ID/key into the jenkins
// credential store which httpRequest can access.
def github_cred = '6bd3f3b9-9a35-493e-8aef-975403c82d3e'
try {
stage ('Startup Checks') {
// here we check the commit author against collaborators
// we need a node to do this because readJSON requires
// a filesystem (although we just pass it text)
node {
echo "CHANGE AUTHOR ---> $CHANGE_AUTHOR"
echo "CHANGE TARGET ---> $CHANGE_TARGET"
echo "CHANGE ID ---> $CHANGE_ID"
checkout scm
commit_id = sh(script: 'git rev-parse HEAD', returnStdout: true)
commit_id = commit_id.trim()
echo "commit ID is ${commit_id}"
def response = httpRequest(
timeout: 10,
authentication: github_cred,
url: 'https://api.github.com/repos/ripple/rippled/collaborators')
def collab_data = readJSON(text: response.content)
collab_found = false;
for (collaborator in collab_data) {
if (collaborator['login'] == "$CHANGE_AUTHOR") {
echo "$CHANGE_AUTHOR is a collaborator!"
collab_found = true;
break;
}
}
if (! collab_found) {
manager.addShortText(
"Author of this change is not a collaborator!",
"Crimson",
"white",
"0px",
"white")
all_status['startup'] =
[false, 'Author Check', "$CHANGE_AUTHOR is not a collaborator!"]
error "$CHANGE_AUTHOR does not appear to be a collaborator...bailing on this build"
}
}
}
stage ('Parallel Build') {
def variants = [
'coverage',
'clang.debug.unity',
'clang.debug.nounity',
'gcc.debug.unity',
'gcc.debug.nounity',
'clang.release.unity',
'gcc.release.unity'] as String[]
// create a map of all builds
// that we want to run. The map
// is string keys and node{} object values
def builds = [:]
for (int index = 0; index < variants.size(); index++) {
def bldtype = variants[index]
builds[bldtype] = {
node('rippled-dev') {
checkout scm
dir ('build') {
deleteDir()
}
def cdir = upDir(pwd())
echo "BASEDIR: ${cdir}"
def compiler = getCompiler(bldtype)
def target = getTarget(bldtype)
if (compiler == "coverage") {
compiler = 'gcc'
}
echo "COMPILER: ${compiler}"
echo "TARGET: ${target}"
def clang_cc =
(compiler == "clang") ? "${LLVM_ROOT}/bin/clang" : ''
def clang_cxx =
(compiler == "clang") ? "${LLVM_ROOT}/bin/clang++" : ''
def ucc = isNoUnity(target) ? 'true' : 'false'
echo "USE_CC: ${ucc}"
withEnv(["CCACHE_BASEDIR=${cdir}",
"CCACHE_NOHASHDIR=true",
'LCOV_ROOT=""',
"TARGET=${target}",
"CC=${compiler}",
'BUILD=cmake',
'VERBOSE_BUILD=true',
"CLANG_CC=${clang_cc}",
"CLANG_CXX=${clang_cxx}",
"USE_CCACHE=${ucc}"])
{
myStage(bldtype)
try {
sh "ccache -s > ${bldtype}.txt"
// the devtoolset from SCL gives us a recent gcc. It's
// not strictly needed when we are building with clang,
// but it doesn't seem to interfere either
sh "source /opt/rh/devtoolset-6/enable && " +
"(/usr/bin/time -p ./bin/ci/ubuntu/build-and-test.sh 2>&1) 2>&1 " +
">> ${bldtype}.txt"
sh "ccache -s >> ${bldtype}.txt"
}
finally {
def outstr = readFile("${bldtype}.txt")
def st = getResults(outstr)
def time = getTime(outstr)
def fail_count = getFailures(outstr)
outstr = null
def txtcolor =
fail_count == 0 ? "DarkGreen" : "Crimson"
def shortbld = bldtype
shortbld = shortbld.replace('debug', 'dbg')
shortbld = shortbld.replace('release', 'rel')
shortbld = shortbld.replace('unity', 'un')
manager.addShortText(
"${shortbld}: ${st}, t: ${time}",
txtcolor,
"white",
"0px",
"white")
archive("${bldtype}.txt")
lock('rippled_dev_status') {
all_status[bldtype] =
[fail_count == 0, bldtype, "${st}, t: ${time}"]
}
}
}
}
}
}
parallel builds
}
}
finally {
// anything here should run always...
stage ('Final Status') {
node {
def start_time = new Date()
def sdf = new SimpleDateFormat("yyyyMMdd - HH:mm:ss")
def datestamp = sdf.format(start_time)
def results = """
## Jenkins Build Summary
Built from [this commit](https://github.com/ripple/rippled/commit/${commit_id})
Built at __${datestamp}__
### Test Results
Build Type | Result | Status
---------- | ------ | ------
"""
for ( e in all_status) {
results += e.value[1] + " | " + e.value[2] + " | " +
(e.value[0] ? "PASS :white_check_mark: " : "FAIL :red_circle: ") + "\n"
}
results += "\n"
echo "FINAL BUILD RESULTS"
echo results
try {
def url_comment = ""
if ("$CHANGE_ID" ==~ /\d+/) {
// CHANGE_ID indicates we are building a PR
// find PR comments
def resp = httpRequest(
timeout: 10,
authentication: github_cred,
url: "https://api.github.com/repos/ripple/rippled/pulls/$CHANGE_ID")
def result = readJSON(text: resp.content)
// follow issue comments link
url_comment = result['_links']['issue']['href'] + '/comments'
}
else {
// if not a PR, just search comments for our commit ID
url_comment =
'https://api.github.com/repos/ripple/rippled/commits/' +
"${commit_id}/comments"
}
def response = httpRequest(
timeout: 10,
authentication: github_cred,
url: url_comment)
def data = readJSON(text: response.content)
def comment_id = 0
def mode = 'POST'
// see if we can find and existing comment here with
// a heading that matches ours...
for (comment in data) {
if (comment['body'] =~ /(?m)^##\s+Jenkins Build/) {
comment_id = comment['id']
echo "existing status comment ${comment_id} found"
url_comment = comment['url']
mode = 'PATCH'
break;
}
}
if (comment_id == 0) {
echo "no existing status comment found"
}
def body = JsonOutput.toJson([
body: results
])
response = httpRequest(
timeout: 10,
authentication: github_cred,
url: url_comment,
contentType: 'APPLICATION_JSON',
httpMode: mode,
requestBody: body)
}
catch (any) {
echo "had a problem interacting with github...status is probably not updated"
}
}
}
}
// ---------------
// util functions
// ---------------
def myStage(name) {
echo """
+++++++++++++++++++++++++++++++++++++++++
>> building ${name}
+++++++++++++++++++++++++++++++++++++++++
"""
}
@NonCPS
def getResults(text) {
// example:
/// 194.5s, 154 suites, 948 cases, 360485 tests total, 0 failures
def matcher = text =~ /(\d+) cases, (\d+) tests total, (\d+) (failure(s?))/
matcher ? matcher[0][1] + " cases, " + matcher[0][3] + " failed" : "no test results"
}
def getFailures(text) {
// example:
/// 194.5s, 154 suites, 948 cases, 360485 tests total, 0 failures
def matcher = text =~ /(\d+) tests total, (\d+) (failure(s?))/
// if we didn't match, then return 1 since something is
// probably wrong, e.g. maybe the build failed...
matcher ? matcher[0][2] as Integer : 1i
}
@NonCPS
def getCompiler(bld) {
def matcher = bld =~ /^(.+?)\.(.+)$/
matcher ? matcher[0][1] : bld
}
@NonCPS
def isNoUnity(bld) {
def matcher = bld =~ /\.nounity\s*$/
matcher ? true : false
}
@NonCPS
def getTarget(bld) {
def matcher = bld =~ /^(.+?)\.(.+)$/
matcher ? matcher[0][2] : bld
}
// because I can't seem to find path manipulation
// functions in groovy....
@NonCPS
def upDir(path) {
def matcher = path =~ /^(.+)\/(.+?)/
matcher ? matcher[0][1] : path
}
@NonCPS
def getTime(text) {
// look for text following a label 'real' for
// wallclock time. Some `time`s report fractional
// seconds and we can omit those in what we report
def matcher = text =~ /(?m)^real\s+(.+)\.(\d+?)[s]?/
matcher ? matcher[0][1] + "s" : "n/a"
}

View File

@@ -10,26 +10,45 @@ echo "using TARGET: $TARGET"
# Ensure APP defaults to rippled if it's not set.
: ${APP:=rippled}
echo "using APP: $APP"
JOBS=${NUM_PROCESSORS:-2}
if [[ ${TARGET} == *.nounity ]]; then
JOBS=$((2*${JOBS}))
fi
JOBS=$((JOBS+1))
if [[ ${BUILD:-scons} == "cmake" ]]; then
echo "cmake building ${APP}"
CMAKE_EXTRA_ARGS=" -DCMAKE_VERBOSE_MAKEFILE=ON"
CMAKE_TARGET=$CC.$TARGET
BUILDARGS=" -j${JOBS}"
if [[ ${VERBOSE_BUILD:-} == true ]]; then
# TODO: if we use a different generator, this
# option to build verbose would need to change:
BUILDARGS+=" verbose=1"
fi
if [[ ${CI:-} == true ]]; then
CMAKE_TARGET=$CMAKE_TARGET.ci
fi
if [[ ${USE_CCACHE:-} == true ]]; then
echo "using ccache with basedir [${CCACHE_BASEDIR:-}]"
CMAKE_EXTRA_ARGS+=" -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
fi
if [ -d "build/${CMAKE_TARGET}" ]; then
rm -rf "build/${CMAKE_TARGET}"
fi
mkdir -p "build/${CMAKE_TARGET}"
pushd "build/${CMAKE_TARGET}"
cmake ../.. -Dtarget=$CMAKE_TARGET
cmake --build . -- -j${JOBS}
cmake ../.. -Dtarget=$CMAKE_TARGET ${CMAKE_EXTRA_ARGS}
cmake --build . -- $BUILDARGS
if [[ ${BUILD_BOTH:-} == true ]]; then
if [[ ${TARGET} == *.unity ]]; then
cmake --build . --target rippled_classic -- $BUILDARGS
else
cmake --build . --target rippled_unity -- $BUILDARGS
fi
fi
popd
export APP_PATH="$PWD/build/${CMAKE_TARGET}/${APP}"
echo "using APP_PATH: $APP_PATH"
else
export APP_PATH="$PWD/build/$CC.$TARGET/${APP}"
echo "using APP_PATH: $APP_PATH"
@@ -65,26 +84,31 @@ if [[ $TARGET == "coverage" ]]; then
export PATH=$PATH:$LCOV_ROOT/usr/bin
# Create baseline coverage data file
lcov --no-external -c -i -d . -o baseline.info
lcov --no-external -c -i -d . -o baseline.info | grep -v "ignoring data for external file"
fi
if [[ ${TARGET} == debug ]]; then
# Execute unit tests under gdb, printing a call stack
# if we get a crash.
$GDB_ROOT/bin/gdb -return-child-result -quiet -batch \
-ex "set env MALLOC_CHECK_=3" \
-ex "set print thread-events off" \
-ex run \
-ex "thread apply all backtrace full" \
-ex "quit" \
--args $APP_PATH $APP_ARGS
if [[ ${SKIP_TESTS:-} == true ]]; then
echo "skipping tests for ${TARGET}"
exit
fi
if [[ $TARGET == debug* ]]; then
# Execute unit tests under gdb, printing a call stack
# if we get a crash.
$GDB_ROOT/bin/gdb -return-child-result -quiet -batch \
-ex "set env MALLOC_CHECK_=3" \
-ex "set print thread-events off" \
-ex run \
-ex "thread apply all backtrace full" \
-ex "quit" \
--args $APP_PATH $APP_ARGS
else
$APP_PATH $APP_ARGS
$APP_PATH $APP_ARGS
fi
if [[ $TARGET == "coverage" ]]; then
# Create test coverage data file
lcov --no-external -c -d . -o tests.info
lcov --no-external -c -d . -o tests.info | grep -v "ignoring data for external file"
# Combine baseline and test coverage data
lcov -a baseline.info -a tests.info -o lcov-all.info

View File

@@ -67,8 +67,7 @@ tar xfvz lcov-1.12.tar.gz -C $HOME
mkdir -p $LCOV_ROOT
cd $HOME/lcov-1.12 && make install PREFIX=$LCOV_ROOT
if [[ ${TARGET} == debug && ! -x ${GDB_ROOT}/bin/gdb ]]; then
if [[ ${TARGET} == debug* && ! -x ${GDB_ROOT}/bin/gdb ]]; then
pushd $HOME
#install gdb
wget https://ftp.gnu.org/gnu/gdb/gdb-8.0.tar.xz

View File

@@ -1,13 +1,13 @@
//------------------------------------------------------------------------------
/*
This file is part of rippled: https://github.com/ripple/rippled
Copyright 2014 Ripple Labs Inc.
Copyright 2014 Ripple Labs Inc.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
@@ -64,13 +64,6 @@ private:
std::thread thread_;
std::shared_ptr<boost::asio::ssl::context> context_;
static
endpoint_type
endpoint()
{
return endpoint_type(address_type::from_string("127.0.0.1"), 9000);
}
template <class Streambuf>
static
void
@@ -175,6 +168,7 @@ private:
{
private:
short_read_test& test_;
endpoint_type endpoint_;
struct Acceptor
: Child, std::enable_shared_from_this<Acceptor>
@@ -189,11 +183,15 @@ private:
: Child(server)
, server_(server)
, test_(server_.test_)
, acceptor_(test_.io_service_, endpoint())
, acceptor_(test_.io_service_,
endpoint_type(address_type::from_string("127.0.0.1"), 0))
, socket_(test_.io_service_)
, strand_(socket_.get_io_service())
{
acceptor_.listen();
server_.endpoint_ = acceptor_.local_endpoint();
test_.log << "[server] up on port: " <<
server_.endpoint_.port() << std::endl;
}
void
@@ -382,11 +380,17 @@ private:
close();
wait();
}
endpoint_type const&
endpoint () const
{
return endpoint_;
}
};
//--------------------------------------------------------------------------
class Client : public Base
{
private:
short_read_test& test_;
@@ -401,8 +405,9 @@ private:
strand_type strand_;
timer_type timer_;
boost::asio::streambuf buf_;
endpoint_type const& ep_;
Connection (Client& client)
Connection (Client& client, endpoint_type const& ep)
: Child(client)
, client_(client)
, test_(client_.test_)
@@ -410,6 +415,7 @@ private:
, stream_(socket_, *test_.context_)
, strand_(socket_.get_io_service())
, timer_(socket_.get_io_service())
, ep_(ep)
{
}
@@ -432,7 +438,7 @@ private:
timer_.expires_from_now(std::chrono::seconds(3));
timer_.async_wait(strand_.wrap(std::bind(&Connection::on_timer,
shared_from_this(), std::placeholders::_1)));
socket_.async_connect(endpoint(), strand_.wrap(std::bind(
socket_.async_connect(ep_, strand_.wrap(std::bind(
&Connection::on_connect, shared_from_this(),
std::placeholders::_1)));
}
@@ -531,10 +537,10 @@ private:
};
public:
Client(short_read_test& test)
Client(short_read_test& test, endpoint_type const& ep)
: test_(test)
{
auto const p = std::make_shared<Connection>(*this);
auto const p = std::make_shared<Connection>(*this, ep);
add(p);
p->run();
}
@@ -567,7 +573,7 @@ public:
void run() override
{
Server s(*this);
Client c(*this);
Client c(*this, s.endpoint());
c.wait();
pass();
}