mirror of
https://github.com/XRPLF/rippled.git
synced 2025-11-20 02:55:50 +00:00
Add omitted unit tests, cleanup old files
This commit is contained in:
@@ -1,6 +1,5 @@
|
|||||||
|
|
||||||
codecov:
|
codecov:
|
||||||
ci:
|
ci:
|
||||||
- ci.ops.ripple.com # add custom jenkins server
|
|
||||||
- !appveyor
|
- !appveyor
|
||||||
- !travis
|
- travis
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ if (static OR MSVC)
|
|||||||
else ()
|
else ()
|
||||||
set (Boost_USE_STATIC_RUNTIME OFF)
|
set (Boost_USE_STATIC_RUNTIME OFF)
|
||||||
endif ()
|
endif ()
|
||||||
find_dependency (Boost 1.67
|
find_dependency (Boost 1.70
|
||||||
COMPONENTS
|
COMPONENTS
|
||||||
chrono
|
chrono
|
||||||
context
|
context
|
||||||
|
|||||||
@@ -392,6 +392,7 @@ if (unity)
|
|||||||
src/test/unity/crypto_test_unity.cpp
|
src/test/unity/crypto_test_unity.cpp
|
||||||
src/test/unity/json_test_unity.cpp
|
src/test/unity/json_test_unity.cpp
|
||||||
src/test/unity/ledger_test_unity.cpp
|
src/test/unity/ledger_test_unity.cpp
|
||||||
|
src/test/unity/net_test_unity.cpp
|
||||||
src/test/unity/nodestore_test_unity.cpp
|
src/test/unity/nodestore_test_unity.cpp
|
||||||
src/test/unity/overlay_test_unity.cpp
|
src/test/unity/overlay_test_unity.cpp
|
||||||
src/test/unity/peerfinder_test_unity.cpp
|
src/test/unity/peerfinder_test_unity.cpp
|
||||||
@@ -897,6 +898,11 @@ else ()
|
|||||||
src/test/ledger/PendingSaves_test.cpp
|
src/test/ledger/PendingSaves_test.cpp
|
||||||
src/test/ledger/SkipList_test.cpp
|
src/test/ledger/SkipList_test.cpp
|
||||||
src/test/ledger/View_test.cpp
|
src/test/ledger/View_test.cpp
|
||||||
|
#[===============================[
|
||||||
|
nounity, test sources:
|
||||||
|
subdir: net
|
||||||
|
#]===============================]
|
||||||
|
src/test/net/SSLHTTPDownloader_test.cpp
|
||||||
#[===============================[
|
#[===============================[
|
||||||
nounity, test sources:
|
nounity, test sources:
|
||||||
subdir: nodestore
|
subdir: nodestore
|
||||||
@@ -974,6 +980,7 @@ else ()
|
|||||||
src/test/rpc/NoRipple_test.cpp
|
src/test/rpc/NoRipple_test.cpp
|
||||||
src/test/rpc/OwnerInfo_test.cpp
|
src/test/rpc/OwnerInfo_test.cpp
|
||||||
src/test/rpc/Peers_test.cpp
|
src/test/rpc/Peers_test.cpp
|
||||||
|
src/test/rpc/Roles_test.cpp
|
||||||
src/test/rpc/RPCCall_test.cpp
|
src/test/rpc/RPCCall_test.cpp
|
||||||
src/test/rpc/RPCOverload_test.cpp
|
src/test/rpc/RPCOverload_test.cpp
|
||||||
src/test/rpc/RobustTransaction_test.cpp
|
src/test/rpc/RobustTransaction_test.cpp
|
||||||
|
|||||||
694
Jenkinsfile
vendored
694
Jenkinsfile
vendored
@@ -1,694 +0,0 @@
|
|||||||
#!/usr/bin/env groovy
|
|
||||||
|
|
||||||
import groovy.json.JsonOutput
|
|
||||||
import java.text.*
|
|
||||||
|
|
||||||
all_status = [:]
|
|
||||||
commit_id = ''
|
|
||||||
git_fork = 'ripple'
|
|
||||||
git_repo = 'rippled'
|
|
||||||
//
|
|
||||||
// this is not the actual token, but an ID/key into the jenkins
|
|
||||||
// credential store which httpRequest can access.
|
|
||||||
//
|
|
||||||
github_cred = '6bd3f3b9-9a35-493e-8aef-975403c82d3e'
|
|
||||||
//
|
|
||||||
// root API url for our repo (default, overriden below)
|
|
||||||
//
|
|
||||||
github_api = 'https://api.github.com/repos/ripple/rippled'
|
|
||||||
|
|
||||||
try {
|
|
||||||
stage ('Startup Checks') {
|
|
||||||
// here we check the commit author against collaborators
|
|
||||||
// we need a node to do this because readJSON requires
|
|
||||||
// a filesystem (although we just pass it text)
|
|
||||||
node {
|
|
||||||
checkout scm
|
|
||||||
commit_id = getCommitID()
|
|
||||||
//
|
|
||||||
// NOTE this getUserRemoteConfigs call requires a one-time
|
|
||||||
// In-process Script Approval (configure jenkins). We need this
|
|
||||||
// to detect the remote repo to interact with via the github API.
|
|
||||||
//
|
|
||||||
def remote_url = scm.getUserRemoteConfigs()[0].getUrl()
|
|
||||||
if (remote_url) {
|
|
||||||
echo "GIT URL scm: $remote_url"
|
|
||||||
git_fork = remote_url.tokenize('/')[2]
|
|
||||||
git_repo = remote_url.tokenize('/')[3].split('\\.')[0]
|
|
||||||
echo "GIT FORK: $git_fork"
|
|
||||||
echo "GIT REPO: $git_repo"
|
|
||||||
github_api = "https://api.github.com/repos/${git_fork}/${git_repo}"
|
|
||||||
echo "API URL REPO: $github_api"
|
|
||||||
}
|
|
||||||
|
|
||||||
if (env.CHANGE_AUTHOR) {
|
|
||||||
def collab_found = false;
|
|
||||||
//
|
|
||||||
// this means we have some sort of PR , so verify the author
|
|
||||||
//
|
|
||||||
echo "CHANGE AUTHOR ---> $CHANGE_AUTHOR"
|
|
||||||
echo "CHANGE TARGET ---> $CHANGE_TARGET"
|
|
||||||
echo "CHANGE ID ---> $CHANGE_ID"
|
|
||||||
//
|
|
||||||
// check the commit author against collaborators
|
|
||||||
// we need a node to do this because readJSON requires
|
|
||||||
// a filesystem (although we just pass it text)
|
|
||||||
//
|
|
||||||
def response = httpRequest(
|
|
||||||
timeout: 10,
|
|
||||||
authentication: github_cred,
|
|
||||||
url: "${github_api}/collaborators")
|
|
||||||
def collab_data = readJSON(
|
|
||||||
text: response.content)
|
|
||||||
for (collaborator in collab_data) {
|
|
||||||
if (collaborator['login'] == "$CHANGE_AUTHOR") {
|
|
||||||
echo "$CHANGE_AUTHOR is a collaborator!"
|
|
||||||
collab_found = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (! collab_found) {
|
|
||||||
echo "$CHANGE_AUTHOR is not a collaborator - waiting for manual approval."
|
|
||||||
sendToSlack("A <${env.BUILD_URL}|jenkins job (PR)> is waiting for approval - please review.")
|
|
||||||
|
|
||||||
try {
|
|
||||||
httpRequest(
|
|
||||||
timeout: 10,
|
|
||||||
authentication: github_cred,
|
|
||||||
url: getCommentURL(),
|
|
||||||
contentType: 'APPLICATION_JSON',
|
|
||||||
httpMode: 'POST',
|
|
||||||
requestBody: JsonOutput.toJson([
|
|
||||||
body: """
|
|
||||||
**Thank you** for your submission. It will be reviewed soon and submitted for processing in CI.
|
|
||||||
"""
|
|
||||||
])
|
|
||||||
)
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
echo 'had a problem interacting with github...comments are probably not updated'
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
input (
|
|
||||||
message: "User $CHANGE_AUTHOR has submitted PR #$CHANGE_ID. " +
|
|
||||||
"**Please review** the changes for any CI/security concerns " +
|
|
||||||
"and then decide whether to proceed with building.")
|
|
||||||
}
|
|
||||||
catch(e) {
|
|
||||||
def user = e.getCauses()[0].getUser().toString()
|
|
||||||
all_status['startup'] = [
|
|
||||||
false,
|
|
||||||
'Approval Check',
|
|
||||||
"Build aborted by [${user}]",
|
|
||||||
"[console](${env.BUILD_URL}/console)"]
|
|
||||||
error "Aborted by: [${user}]"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// UNCOMMENT the following if we want one message for every job...
|
|
||||||
//node('rippled-dev') {
|
|
||||||
// sendToSlack("<${env.BUILD_URL}|Job ${env.BUILD_TAG}> has started.")
|
|
||||||
//}
|
|
||||||
}
|
|
||||||
|
|
||||||
stage ('Parallel Build') {
|
|
||||||
String[][] variants = [
|
|
||||||
['gcc.Release' ,'-Dassert=ON' ,'MANUAL_TESTS=true' ],
|
|
||||||
['docs' ,'' ,'TARGET=docs' ],
|
|
||||||
['msvc.Debug' ],
|
|
||||||
['msvc.Debug' ,'' ,'NINJA_BUILD=true' ],
|
|
||||||
['msvc.Debug' ,'-Dunity=OFF' ],
|
|
||||||
['msvc.Release' ],
|
|
||||||
['gcc.Debug' ],
|
|
||||||
['gcc.Debug' ,'-Dunity=OFF' ],
|
|
||||||
['gcc.Release' ,'-Dassert=ON' ],
|
|
||||||
['gcc.Debug' ,'-Dstatic=OFF' ],
|
|
||||||
['gcc.Debug' ,'-Dstatic=OFF -DBUILD_SHARED_LIBS=ON' ],
|
|
||||||
['gcc.Debug' ,'' ,'NINJA_BUILD=true' ],
|
|
||||||
]
|
|
||||||
|
|
||||||
// create a map of all builds
|
|
||||||
// that we want to run. The map
|
|
||||||
// is string keys and node{} object values
|
|
||||||
def builds = [:]
|
|
||||||
for (int index = 0; index < variants.size(); index++) {
|
|
||||||
def bldtype = variants[index][0]
|
|
||||||
def cmake_extra = variants[index].size() > 1 ? variants[index][1] : ''
|
|
||||||
def bldlabel = bldtype + cmake_extra
|
|
||||||
def extra_env = variants[index].size() > 2 ? variants[index][2..-1] : []
|
|
||||||
for (int j = 0; j < extra_env.size(); j++) {
|
|
||||||
bldlabel += "_" + extra_env[j]
|
|
||||||
}
|
|
||||||
bldlabel = bldlabel.replace('-', '_')
|
|
||||||
bldlabel = bldlabel.replace(' ', '')
|
|
||||||
bldlabel = bldlabel.replace('=', '_')
|
|
||||||
|
|
||||||
def compiler = getFirstPart(bldtype)
|
|
||||||
def config = getSecondPart(bldtype)
|
|
||||||
def target = 'install' // currently ignored for windows builds
|
|
||||||
if (compiler == 'docs') {
|
|
||||||
compiler = 'gcc'
|
|
||||||
config = 'Release'
|
|
||||||
target = 'docs'
|
|
||||||
}
|
|
||||||
def cc = 'gcc'
|
|
||||||
def cxx = 'g++'
|
|
||||||
def ucc = isNoUnity(cmake_extra) ? 'true' : 'false'
|
|
||||||
def node_type =
|
|
||||||
(compiler == 'msvc') ? 'rippled-win' : 'rippled-dev'
|
|
||||||
// the default disposition for parallel test..disabled
|
|
||||||
// for coverage, enabled otherwise. Can still be overridden
|
|
||||||
// by explicitly setting with extra env settings above.
|
|
||||||
def pt = isCoverage(cmake_extra) ? 'false' : 'true'
|
|
||||||
def max_minutes = 25
|
|
||||||
|
|
||||||
def env_vars = [
|
|
||||||
"TARGET=${target}",
|
|
||||||
"BUILD_TYPE=${config}",
|
|
||||||
"COMPILER=${compiler}",
|
|
||||||
"PARALLEL_TESTS=${pt}",
|
|
||||||
'BUILD=cmake',
|
|
||||||
"MAX_TIME=${max_minutes}m",
|
|
||||||
"BUILD_DIR=${bldlabel}",
|
|
||||||
"CMAKE_EXTRA_ARGS=-Dwerr=ON ${cmake_extra}",
|
|
||||||
'VERBOSE_BUILD=true']
|
|
||||||
|
|
||||||
builds[bldlabel] = {
|
|
||||||
node(node_type) {
|
|
||||||
checkout scm
|
|
||||||
dir ('build') {
|
|
||||||
deleteDir()
|
|
||||||
}
|
|
||||||
def cdir = upDir(pwd())
|
|
||||||
echo "BASEDIR: ${cdir}"
|
|
||||||
echo "COMPILER: ${compiler}"
|
|
||||||
echo "BUILD_TYPE: ${config}"
|
|
||||||
echo "USE_CC: ${ucc}"
|
|
||||||
env_vars.addAll([
|
|
||||||
"NIH_CACHE_ROOT=${cdir}/.nih_c"])
|
|
||||||
if (compiler == 'msvc') {
|
|
||||||
env_vars.addAll([
|
|
||||||
'BOOST_ROOT=c:\\lib\\boost_1_70',
|
|
||||||
'PROJECT_NAME=rippled',
|
|
||||||
'MSBUILDDISABLENODEREUSE=1', // this ENV setting is probably redundant since we also pass /nr:false to msbuild
|
|
||||||
'OPENSSL_ROOT=c:\\OpenSSL-Win64'])
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
env_vars.addAll([
|
|
||||||
'NINJA_BUILD=false',
|
|
||||||
"CCACHE_BASEDIR=${cdir}",
|
|
||||||
'PLANTUML_JAR=/opt/plantuml/plantuml.jar',
|
|
||||||
'APP_ARGS=--unittest-ipv6',
|
|
||||||
'CCACHE_NOHASHDIR=true',
|
|
||||||
"CC=${cc}",
|
|
||||||
"CXX=${cxx}",
|
|
||||||
'LCOV_ROOT=""',
|
|
||||||
'PATH+CMAKE_BIN=/opt/local/cmake',
|
|
||||||
'GDB_ROOT=/opt/local/gdb',
|
|
||||||
'BOOST_ROOT=/opt/local/boost_1_70_0',
|
|
||||||
"USE_CCACHE=${ucc}"])
|
|
||||||
}
|
|
||||||
|
|
||||||
if (extra_env.size() > 0) {
|
|
||||||
env_vars.addAll(extra_env)
|
|
||||||
}
|
|
||||||
|
|
||||||
// try to figure out codecov token to use. Look for
|
|
||||||
// MY_CODECOV_TOKEN id first so users can set that
|
|
||||||
// on job scope but then default to RIPPLED_CODECOV_TOKEN
|
|
||||||
// which should be globally scoped
|
|
||||||
def codecov_token = ''
|
|
||||||
try {
|
|
||||||
withCredentials( [string( credentialsId: 'MY_CODECOV_TOKEN', variable: 'CODECOV_TOKEN')]) {
|
|
||||||
codecov_token = env.CODECOV_TOKEN
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
// this might throw when MY_CODECOV_TOKEN doesn't exist
|
|
||||||
}
|
|
||||||
if (codecov_token == '') {
|
|
||||||
withCredentials( [string( credentialsId: 'RIPPLED_CODECOV_TOKEN', variable: 'CODECOV_TOKEN')]) {
|
|
||||||
codecov_token = env.CODECOV_TOKEN
|
|
||||||
}
|
|
||||||
}
|
|
||||||
env_vars.addAll(["CODECOV_TOKEN=${codecov_token}"])
|
|
||||||
|
|
||||||
withEnv(env_vars) {
|
|
||||||
myStage(bldlabel)
|
|
||||||
def thrown = '';
|
|
||||||
try {
|
|
||||||
timeout(
|
|
||||||
time: max_minutes * 2,
|
|
||||||
units: 'MINUTES')
|
|
||||||
{
|
|
||||||
if (compiler == 'msvc') {
|
|
||||||
powershell "Remove-Item -Path \"${bldlabel}.txt\" -Force -ErrorAction Ignore"
|
|
||||||
// we capture stdout to variable because I could
|
|
||||||
// not figure out how to make powershell redirect internally
|
|
||||||
output = powershell (
|
|
||||||
returnStdout: true,
|
|
||||||
script: windowsBuildCmd())
|
|
||||||
// if the powershell command fails (has nonzero exit)
|
|
||||||
// then the command above throws, we don't get our output,
|
|
||||||
// and we never create this output file.
|
|
||||||
// SEE https://issues.jenkins-ci.org/browse/JENKINS-44930
|
|
||||||
// Alternatively, figure out how to reliably redirect
|
|
||||||
// all output above to a file (Start/Stop transcript does not work)
|
|
||||||
writeFile(
|
|
||||||
file: "${bldlabel}.txt",
|
|
||||||
text: output)
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
sh "rm -fv ${bldlabel}.txt"
|
|
||||||
// execute the bld command in a redirecting shell
|
|
||||||
// to capture output
|
|
||||||
sh redhatBuildCmd(bldlabel)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch(e) {
|
|
||||||
thrown = "${e}"
|
|
||||||
throw e
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
if (bldtype == 'docs') {
|
|
||||||
publishHTML(
|
|
||||||
allowMissing: true,
|
|
||||||
alwaysLinkToLastBuild: true,
|
|
||||||
keepAll: true,
|
|
||||||
reportName: 'Doxygen',
|
|
||||||
reportDir: "build/${bldlabel}/html_doc",
|
|
||||||
reportFiles: 'index.html')
|
|
||||||
}
|
|
||||||
if (isCoverage(cmake_extra)) {
|
|
||||||
publishHTML(
|
|
||||||
allowMissing: true,
|
|
||||||
alwaysLinkToLastBuild: false,
|
|
||||||
keepAll: true,
|
|
||||||
reportName: 'Coverage',
|
|
||||||
reportDir: "build/${bldlabel}/coverage",
|
|
||||||
reportFiles: 'index.html')
|
|
||||||
}
|
|
||||||
def envs = ''
|
|
||||||
for (int j = 0; j < extra_env.size(); j++) {
|
|
||||||
envs += ", <br/>" + extra_env[j]
|
|
||||||
}
|
|
||||||
def cmake_txt = cmake_extra
|
|
||||||
if (cmake_txt != '') {
|
|
||||||
cmake_txt = " <br/>" + cmake_txt
|
|
||||||
}
|
|
||||||
def st = reportStatus(bldlabel, bldtype + cmake_txt + envs, env.BUILD_URL, thrown)
|
|
||||||
lock('rippled_dev_status') {
|
|
||||||
all_status[bldlabel] = st
|
|
||||||
}
|
|
||||||
if (thrown == '') {
|
|
||||||
assert st[0] : "Unit Test Failures"
|
|
||||||
}
|
|
||||||
} //try-catch-finally
|
|
||||||
} //withEnv
|
|
||||||
} //node
|
|
||||||
} //builds item
|
|
||||||
} //for variants
|
|
||||||
|
|
||||||
// this actually executes all the builds we just defined
|
|
||||||
// above, in parallel as slaves are available
|
|
||||||
parallel builds
|
|
||||||
}
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
// anything here should run always...
|
|
||||||
stage ('Final Status') {
|
|
||||||
node {
|
|
||||||
def results = makeResultText()
|
|
||||||
try {
|
|
||||||
def res = getCommentID() //get array return b/c jenkins does not allow multiple direct return/assign
|
|
||||||
def comment_id = res[0]
|
|
||||||
def url_comment = res[1]
|
|
||||||
def mode = 'PATCH'
|
|
||||||
if (comment_id == 0) {
|
|
||||||
echo 'no existing status comment found'
|
|
||||||
mode = 'POST'
|
|
||||||
}
|
|
||||||
|
|
||||||
def body = JsonOutput.toJson([
|
|
||||||
body: results
|
|
||||||
])
|
|
||||||
|
|
||||||
response = httpRequest(
|
|
||||||
timeout: 10,
|
|
||||||
authentication: github_cred,
|
|
||||||
url: url_comment,
|
|
||||||
contentType: 'APPLICATION_JSON',
|
|
||||||
httpMode: mode,
|
|
||||||
requestBody: body)
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
echo 'had a problem interacting with github...status is probably not updated'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------
|
|
||||||
// util functions
|
|
||||||
// ---------------
|
|
||||||
def myStage(name) {
|
|
||||||
echo """
|
|
||||||
+++++++++++++++++++++++++++++++++++++++++
|
|
||||||
>> building ${name}
|
|
||||||
+++++++++++++++++++++++++++++++++++++++++
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
|
|
||||||
def printGitInfo(id, log) {
|
|
||||||
echo """
|
|
||||||
+++++++++++++++++++++++++++++++++++++++++
|
|
||||||
>> Building commit ID ${id}
|
|
||||||
>>
|
|
||||||
${log}
|
|
||||||
+++++++++++++++++++++++++++++++++++++++++
|
|
||||||
"""
|
|
||||||
}
|
|
||||||
|
|
||||||
def makeResultText () {
|
|
||||||
def start_time = new Date()
|
|
||||||
def sdf = new SimpleDateFormat('yyyyMMdd - HH:mm:ss')
|
|
||||||
def datestamp = sdf.format(start_time)
|
|
||||||
|
|
||||||
def results = """
|
|
||||||
## Jenkins Build Summary
|
|
||||||
|
|
||||||
Built from [this commit](https://github.com/${git_fork}/${git_repo}/commit/${commit_id})
|
|
||||||
|
|
||||||
Built at __${datestamp}__
|
|
||||||
|
|
||||||
### Test Results
|
|
||||||
|
|
||||||
Build Type | Log | Result | Status
|
|
||||||
---------- | --- | ------ | ------
|
|
||||||
"""
|
|
||||||
for ( e in all_status) {
|
|
||||||
results += e.value[1] + ' | ' + e.value[3] + ' | ' + e.value[2] + ' | ' +
|
|
||||||
(e.value[0] ? 'PASS :white_check_mark: ' : 'FAIL :red_circle: ') + '\n'
|
|
||||||
}
|
|
||||||
results += '\n'
|
|
||||||
echo 'FINAL BUILD RESULTS'
|
|
||||||
echo results
|
|
||||||
results
|
|
||||||
}
|
|
||||||
|
|
||||||
def getCommentURL () {
|
|
||||||
def url_c = ''
|
|
||||||
if (env.CHANGE_ID && env.CHANGE_ID ==~ /\d+/) {
|
|
||||||
//
|
|
||||||
// CHANGE_ID indicates we are building a PR
|
|
||||||
// find PR comments
|
|
||||||
//
|
|
||||||
def resp = httpRequest(
|
|
||||||
timeout: 10,
|
|
||||||
authentication: github_cred,
|
|
||||||
url: "${github_api}/pulls/$CHANGE_ID")
|
|
||||||
def result = readJSON(text: resp.content)
|
|
||||||
//
|
|
||||||
// follow issue comments link
|
|
||||||
//
|
|
||||||
url_c = result['_links']['issue']['href'] + '/comments'
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
//
|
|
||||||
// if not a PR, just search comments for our commit ID
|
|
||||||
//
|
|
||||||
url_c =
|
|
||||||
"${github_api}/commits/${commit_id}/comments"
|
|
||||||
}
|
|
||||||
url_c
|
|
||||||
}
|
|
||||||
|
|
||||||
def getCommentID () {
|
|
||||||
def url_c = getCommentURL()
|
|
||||||
def response = httpRequest(
|
|
||||||
timeout: 10,
|
|
||||||
authentication: github_cred,
|
|
||||||
url: url_c)
|
|
||||||
def data = readJSON(text: response.content)
|
|
||||||
def comment_id = 0
|
|
||||||
// see if we can find and existing comment here with
|
|
||||||
// a heading that matches ours...
|
|
||||||
for (comment in data) {
|
|
||||||
if (comment['body'] =~ /(?m)^##\s+Jenkins Build/) {
|
|
||||||
comment_id = comment['id']
|
|
||||||
echo "existing status comment ${comment_id} found"
|
|
||||||
url_c = comment['url']
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[comment_id, url_c]
|
|
||||||
}
|
|
||||||
|
|
||||||
def getCommitID () {
|
|
||||||
def cid = sh (
|
|
||||||
script: 'git rev-parse HEAD',
|
|
||||||
returnStdout: true)
|
|
||||||
cid = cid.trim()
|
|
||||||
echo "commit ID is ${cid}"
|
|
||||||
commit_log = sh (
|
|
||||||
script: "git show --name-status ${cid}",
|
|
||||||
returnStdout: true)
|
|
||||||
printGitInfo (cid, commit_log)
|
|
||||||
cid
|
|
||||||
}
|
|
||||||
|
|
||||||
@NonCPS
|
|
||||||
def getResults(text, label) {
|
|
||||||
// example:
|
|
||||||
/// 194.5s, 154 suites, 948 cases, 360485 tests total, 0 failures
|
|
||||||
// or build log format:
|
|
||||||
// [msvc.release] 71.3s, 162 suites, 995 cases, 318901 tests total, 1 failure
|
|
||||||
def matcher =
|
|
||||||
text == '' ?
|
|
||||||
manager.getLogMatcher(/\[${label}\].+?(\d+) case[s]?, (\d+) test[s]? total, (\d+) (failure(s?))/) :
|
|
||||||
text =~ /(\d+) case[s]?, (\d+) test[s]? total, (\d+) (failure(s?))/
|
|
||||||
matcher ? matcher[0][1] + ' cases, ' + matcher[0][3] + ' failed' : 'no test results'
|
|
||||||
}
|
|
||||||
|
|
||||||
@NonCPS
|
|
||||||
def getFailures(text, label) {
|
|
||||||
// [see above for format]
|
|
||||||
def matcher =
|
|
||||||
text == '' ?
|
|
||||||
manager.getLogMatcher(/\[${label}\].+?(\d+) test[s]? total, (\d+) (failure(s?))/) :
|
|
||||||
text =~ /(\d+) test[s]? total, (\d+) (failure(s?))/
|
|
||||||
// if we didn't match, then return -1 since something is
|
|
||||||
// probably wrong, e.g. maybe the build failed...
|
|
||||||
matcher ? matcher[0][2] as Integer : -1i
|
|
||||||
}
|
|
||||||
|
|
||||||
@NonCPS
|
|
||||||
def getTime(text, label) {
|
|
||||||
// look for text following a label 'real' for
|
|
||||||
// wallclock time. Some `time`s report fractional
|
|
||||||
// seconds and we can omit those in what we report
|
|
||||||
def matcher =
|
|
||||||
text == '' ?
|
|
||||||
manager.getLogMatcher(/(?m)^\[${label}\]\s+real\s+(.+)\.(\d+?)[s]?/) :
|
|
||||||
text =~ /(?m)^real\s+(.+)\.(\d+?)[s]?/
|
|
||||||
if (matcher) {
|
|
||||||
return matcher[0][1] + 's'
|
|
||||||
}
|
|
||||||
|
|
||||||
// alternatively, look for powershell elapsed time
|
|
||||||
// format, e.g. :
|
|
||||||
// TotalSeconds : 523.2140529
|
|
||||||
def matcher2 =
|
|
||||||
text == '' ?
|
|
||||||
manager.getLogMatcher(/(?m)^\[${label}\]\s+TotalSeconds\s+:\s+(\d+)\.(\d+?)?/) :
|
|
||||||
text =~ /(?m)^TotalSeconds\s+:\s+(\d+)\.(\d+?)?/
|
|
||||||
matcher2 ? matcher2[0][1] + 's' : 'n/a'
|
|
||||||
}
|
|
||||||
|
|
||||||
@NonCPS
|
|
||||||
def getFirstPart(bld) {
|
|
||||||
def matcher = bld =~ /^(.+?)\.(.+)$/
|
|
||||||
matcher ? matcher[0][1] : bld
|
|
||||||
}
|
|
||||||
|
|
||||||
@NonCPS
|
|
||||||
def isNoUnity(bld) {
|
|
||||||
def matcher = bld =~ /-Dunity=(off|OFF)/
|
|
||||||
matcher ? true : false
|
|
||||||
}
|
|
||||||
|
|
||||||
@NonCPS
|
|
||||||
def isCoverage(bld) {
|
|
||||||
def matcher = bld =~ /-Dcoverage=(on|ON)/
|
|
||||||
matcher ? true : false
|
|
||||||
}
|
|
||||||
|
|
||||||
@NonCPS
|
|
||||||
def getSecondPart(bld) {
|
|
||||||
def matcher = bld =~ /^(.+?)\.(.+)$/
|
|
||||||
matcher ? matcher[0][2] : bld
|
|
||||||
}
|
|
||||||
|
|
||||||
// because I can't seem to find path manipulation
|
|
||||||
// functions in groovy....
|
|
||||||
@NonCPS
|
|
||||||
def upDir(path) {
|
|
||||||
def matcher = path =~ /^(.+)[\/\\](.+?)/
|
|
||||||
matcher ? matcher[0][1] : path
|
|
||||||
}
|
|
||||||
|
|
||||||
def sendToSlack(message) {
|
|
||||||
try {
|
|
||||||
withCredentials( [string( credentialsId: 'RIPPLED_SLACK_INCOMING_URL', variable: 'SLACK_URL')]) {
|
|
||||||
// I was unable to make httpRequest method work with the
|
|
||||||
// formdata required by slack API, so resorting
|
|
||||||
// to curl commands...
|
|
||||||
sh '''\
|
|
||||||
CONTENT=$(tr -d '[\n]' <<JSON
|
|
||||||
payload={
|
|
||||||
"channel": "#cpp-notifications",
|
|
||||||
"username": "JenkinsCI",
|
|
||||||
"text": "''' + message + '''",
|
|
||||||
"icon_emoji": ":jenkins:"}
|
|
||||||
JSON
|
|
||||||
)
|
|
||||||
curl ${SLACK_URL} --data-urlencode "${CONTENT}"
|
|
||||||
'''
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
echo "had a problem posting to slack: ${e}"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// the shell command used for building on redhat
|
|
||||||
def redhatBuildCmd(bldlabel) {
|
|
||||||
'''\
|
|
||||||
#!/bin/bash
|
|
||||||
set -ex
|
|
||||||
log_file=''' + "${bldlabel}.txt" + '''
|
|
||||||
exec 3>&1 1>>${log_file} 2>&1
|
|
||||||
ccache -s
|
|
||||||
source /opt/rh/devtoolset-7/enable
|
|
||||||
/usr/bin/time -p ./bin/ci/ubuntu/build-and-test.sh 2>&1
|
|
||||||
ccache -s
|
|
||||||
'''
|
|
||||||
}
|
|
||||||
|
|
||||||
// the powershell command used for building
|
|
||||||
def windowsBuildCmd() {
|
|
||||||
'''
|
|
||||||
# Enable streams 3-6
|
|
||||||
$WarningPreference = 'Continue'
|
|
||||||
$VerbosePreference = 'Continue'
|
|
||||||
$DebugPreference = 'Continue'
|
|
||||||
$InformationPreference = 'Continue'
|
|
||||||
|
|
||||||
Invoke-BatchFile "${env:ProgramFiles(x86)}\\Microsoft Visual Studio\\2017\\Community\\VC\\Auxiliary\\Build\\vcvarsall.bat" x86_amd64
|
|
||||||
Get-ChildItem env:* | Sort-Object name
|
|
||||||
cl
|
|
||||||
cmake --version
|
|
||||||
New-Item -ItemType Directory -Force -Path "build/$env:BUILD_DIR" -ErrorAction Stop
|
|
||||||
$sw = [Diagnostics.Stopwatch]::StartNew()
|
|
||||||
try {
|
|
||||||
Push-Location "build/$env:BUILD_DIR"
|
|
||||||
if ($env:NINJA_BUILD -eq "true") {
|
|
||||||
Invoke-Expression "& cmake -G`"Ninja`" -DCMAKE_BUILD_TYPE=$env:BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=ON $env:CMAKE_EXTRA_ARGS ../.."
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
Invoke-Expression "& cmake -G`"Visual Studio 15 2017 Win64`" -DCMAKE_VERBOSE_MAKEFILE=ON $env:CMAKE_EXTRA_ARGS ../.."
|
|
||||||
}
|
|
||||||
if ($LastExitCode -ne 0) { throw "CMake failed" }
|
|
||||||
|
|
||||||
## as of 01/2018, DO NOT USE cmake to run the actual build step. for some
|
|
||||||
## reason, cmake spawning the build under jenkins causes MSBUILD/ninja to
|
|
||||||
## get stuck at the end of the build. Perhaps cmake is spawning
|
|
||||||
## incorrectly or failing to pass certain params
|
|
||||||
|
|
||||||
if ($env:NINJA_BUILD -eq "true") {
|
|
||||||
ninja -j $env:NUMBER_OF_PROCESSORS -v
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
msbuild /fl /m /nr:false /p:Configuration="$env:BUILD_TYPE" /p:Platform=x64 /p:GenerateFullPaths=True /v:normal /nologo /clp:"ShowCommandLine;DisableConsoleColor" "$env:PROJECT_NAME.vcxproj"
|
|
||||||
}
|
|
||||||
if ($LastExitCode -ne 0) { throw "CMake build failed" }
|
|
||||||
|
|
||||||
$exe = "./$env:BUILD_TYPE/$env:PROJECT_NAME"
|
|
||||||
if ($env:NINJA_BUILD -eq "true") {
|
|
||||||
$exe = "./$env:PROJECT_NAME"
|
|
||||||
}
|
|
||||||
"Exe is at $exe"
|
|
||||||
$params = '--unittest', '--quiet', '--unittest-log'
|
|
||||||
if ($env:PARALLEL_TESTS -eq "true") {
|
|
||||||
$params = $params += "--unittest-jobs=$env:NUMBER_OF_PROCESSORS"
|
|
||||||
}
|
|
||||||
& $exe $params
|
|
||||||
if ($LastExitCode -ne 0) { throw "Unit tests failed" }
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
throw
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
$sw.Stop()
|
|
||||||
$sw.Elapsed
|
|
||||||
Pop-Location
|
|
||||||
}
|
|
||||||
'''
|
|
||||||
}
|
|
||||||
|
|
||||||
// post processing step after each build:
|
|
||||||
// * archives the log file
|
|
||||||
// * adds short description/status to build status
|
|
||||||
// * returns an array of result info to add to the all_build summary
|
|
||||||
def reportStatus(label, type, bldurl, errmsg) {
|
|
||||||
def outstr = ''
|
|
||||||
def loglink = "[console](${bldurl}/console)"
|
|
||||||
def logfile = "${label}.txt"
|
|
||||||
if ( fileExists(logfile) ) {
|
|
||||||
archiveArtifacts( artifacts: logfile )
|
|
||||||
outstr = readFile(logfile)
|
|
||||||
loglink = "[logfile](${bldurl}/artifact/${logfile})"
|
|
||||||
}
|
|
||||||
def st = getResults(outstr, label)
|
|
||||||
def time = getTime(outstr, label)
|
|
||||||
def fail_count = getFailures(outstr, label)
|
|
||||||
outstr = null
|
|
||||||
def txtcolor =
|
|
||||||
(fail_count == 0 && errmsg == '') ? 'DarkGreen' : 'Crimson'
|
|
||||||
def shortbld = label
|
|
||||||
// this is just an attempt to shorten the
|
|
||||||
// summary text label to the point of absurdity..
|
|
||||||
shortbld = shortbld.replace('Debug', 'dbg')
|
|
||||||
shortbld = shortbld.replace('Release', 'rel')
|
|
||||||
shortbld = shortbld.replace('true', 'Y')
|
|
||||||
shortbld = shortbld.replace('false', 'N')
|
|
||||||
shortbld = shortbld.replace('Dcoverage', 'cov')
|
|
||||||
shortbld = shortbld.replace('Dassert', 'asrt')
|
|
||||||
shortbld = shortbld.replace('Dunity', 'unty')
|
|
||||||
shortbld = shortbld.replace('Dsan=address', 'asan')
|
|
||||||
shortbld = shortbld.replace('Dsan=thread', 'tsan')
|
|
||||||
shortbld = shortbld.replace('Dsan=undefined', 'ubsan')
|
|
||||||
shortbld = shortbld.replace('PARALLEL_TEST', 'PL')
|
|
||||||
shortbld = shortbld.replace('MANUAL_TESTS', 'MAN')
|
|
||||||
shortbld = shortbld.replace('NINJA_BUILD', 'ninja')
|
|
||||||
shortbld = shortbld.replace('DEBUGGER', 'gdb')
|
|
||||||
shortbld = shortbld.replace('ON', 'Y')
|
|
||||||
shortbld = shortbld.replace('OFF', 'N')
|
|
||||||
def stattext = "${st}, t: ${time}"
|
|
||||||
if (fail_count <= 0 && errmsg != '') {
|
|
||||||
stattext += " [BAD EXIT]"
|
|
||||||
}
|
|
||||||
manager.addShortText(
|
|
||||||
"${shortbld}: ${stattext}",
|
|
||||||
txtcolor,
|
|
||||||
'white',
|
|
||||||
'0px',
|
|
||||||
'white')
|
|
||||||
[fail_count == 0 && errmsg == '', type, stattext, loglink]
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -21,6 +21,7 @@
|
|||||||
#include <ripple/protocol/ErrorCodes.h>
|
#include <ripple/protocol/ErrorCodes.h>
|
||||||
#include <ripple/protocol/jss.h>
|
#include <ripple/protocol/jss.h>
|
||||||
#include <test/jtx.h>
|
#include <test/jtx.h>
|
||||||
|
#include <test/jtx/WSClient.h>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <unordered_map>
|
#include <unordered_map>
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user