cassandra/.jenkins/Jenkinsfile

623 lines
27 KiB
Groovy
Raw Permalink Blame History

This file contains invisible Unicode characters

This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#!/usr/bin/env groovy
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//
// Jenkins CI declaration.
//
// The declarative pipeline is presented first as a high level view.
//
// Build and Test Stages are dynamic, the full possible list defined by the `tasks()` function.
// There is a choice of pipeline profles with sets of tasks that are run, see `pipelineProfiles()`.
//
// All tasks use the dockerised CI-agnostic scripts found under `.build/docker/`
// The `type: test` always `.build/docker/run-tests.sh`
//
//
// This Jenkinsfile is expected to work on any Jenkins infrastructure.
// The controller should have 4 cpu, 12GB ram (and be configured to use `-XX:+UseG1GC -Xmx8G`)
// It is required to have agents providing five labels, each that can provide docker and the following capabilities:
// - cassandra-amd64-small : 1 cpu, 1GB ram
// - cassandra-small : 1 cpu, 1GB ram (alias for above but for any arch)
// - cassandra-amd64-medium : 3 cpu, 5GB ram
// - cassandra-medium : 3 cpu, 5GB ram (alias for above but for any arch)
// - cassandra-amd64-large : 7 cpu, 14GB ram
//
// When running builds parameterised to other architectures the corresponding labels are expected.
// For example 'arm64' requires the labels: cassandra-arm64-small, cassandra-arm64-medium, cassandra-arm64-large.
//
// The built-in node must has the "controller" label. There must be more than two agents for each label.
//
// Plugins required are:
// git, workflow-job, workflow-cps, junit, workflow-aggregator, ws-cleanup, pipeline-build-step, test-stability, copyartifact.
//
// Any functionality that depends upon ASF Infra ( i.e. the canonical ci-cassandra.a.o )
// will be ignored when run on other environments.
// Note there are also differences when CI is being run pre- or post-commit.
//
// CAUTION! When running CI with changes in this file, ensure the "Pipeline script from SCM" scm details match
// the brances being tested. These details don't honour the per-build repository and branch parameterisation.
//
// Validate/lint this file using the following command
// `curl -X POST -F "jenkinsfile=<.jenkins/Jenkinsfile" https://ci-cassandra.apache.org/pipeline-model-converter/validate`
//
pipeline {
agent { label 'cassandra-small' }
options {
// must have: avoids agents waste in idle time on controller bottleneck
durabilityHint('PERFORMANCE_OPTIMIZED')
}
parameters {
string(name: 'repository', defaultValue: params.repository ?: scm.userRemoteConfigs[0].url, description: 'Cassandra Repository')
string(name: 'branch', defaultValue: params.branch ?: scm.userRemoteConfigs[0].refspec, description: 'Branch')
choice(name: 'profile', choices: pipelineProfileNames(params.profile ?: ''), description: 'Pick a pipeline profile.')
string(name: 'profile_custom_regexp', defaultValue: params.profile_custom_regexp ?: '', description: 'Regexp for stages when using custom profile. See `testSteps` in Jenkinsfile for list of stages. Example: stress.*|jvm-dtest.*')
choice(name: 'architecture', choices: archsSupported() + "all", description: 'Pick architecture. The ARM64 is disabled by default at the moment.')
string(name: 'jdk', defaultValue: params.jdk ?: '', description: 'Restrict JDK versions. (e.g. "11", "17", etc)')
string(name: 'dtest_repository', defaultValue: params.dtest_repository ?: 'https://github.com/apache/cassandra-dtest', description: 'Cassandra DTest Repository')
string(name: 'dtest_branch', defaultValue: params.dtest_branch ?: 'trunk', description: 'DTest Branch')
}
stages {
stage('jar') {
// the jar stage executes only the 'jar' build step, via the build(…) function
// the results of these (per jdk, per arch) are then stashed and used for every other build and test step
steps {
script {
parallel(getJarTasks())
}
}
}
stage('Tests') {
// the Tests stage executes all other build and task steps.
// build steps are sent to the build(…) function, test steps sent to the test(…) function
// these steps are parameterised and split by the tasks() function
when {
expression { hasNonJarTasks() }
}
steps {
script {
parallel(tasks()['tests'])
}
}
}
stage('Summary') {
// generate the ci_summary.html and results_details.tar.xz artefacts
steps {
generateTestReports()
}
}
}
post {
failure {
echo "ERROR pipeline failed  not all tests were run"
}
always {
sendNotifications()
}
}
}
///////////////////////////
//// scripting support ////
///////////////////////////
def archsSupported() { return ["amd64", "arm64"] }
def pythonsSupported() { return ["3.8", "3.11"] }
def pythonDefault() { return "3.8" }
def pipelineProfiles() {
return [
'packaging': ['artifacts', 'lint', 'debian', 'redhat'],
'skinny': ['lint', 'cqlsh-test', 'test', 'jvm-dtest', 'simulator-dtest', 'dtest'],
'pre-commit': ['artifacts', 'lint', 'debian', 'redhat', 'fqltool-test', 'cqlsh-test', 'test', 'test-latest', 'stress-test', 'test-burn', 'jvm-dtest', 'simulator-dtest', 'dtest', 'dtest-latest'],
'pre-commit w/ upgrades': ['artifacts', 'lint', 'debian', 'redhat', 'fqltool-test', 'cqlsh-test', 'test', 'test-latest', 'stress-test', 'test-burn', 'jvm-dtest', 'jvm-dtest-upgrade', 'simulator-dtest', 'dtest', 'dtest-novnode', 'dtest-latest', 'dtest-upgrade'],
'post-commit': ['artifacts', 'lint', 'debian', 'redhat', 'fqltool-test', 'cqlsh-test', 'test-cdc', 'test', 'test-latest', 'test-compression', 'stress-test', 'test-burn', 'long-test', 'test-oa', 'test-system-keyspace-directory', 'jvm-dtest', 'jvm-dtest-upgrade', 'simulator-dtest', 'dtest', 'dtest-novnode', 'dtest-latest', 'dtest-large', 'dtest-large-novnode', 'dtest-large-latest', 'dtest-upgrade', 'dtest-upgrade-novnode', 'dtest-upgrade-large', 'dtest-upgrade-novnode-large'],
'custom': []
]
}
def pipelineProfileNames(putFirst) {
set = pipelineProfiles().keySet() as List
set = set - putFirst
set.add(0, putFirst)
return set
}
def tasks() {
// Steps config
def buildSteps = [
'jar': [script: 'build-jars.sh', toCopy: null],
'artifacts': [script: 'build-artifacts.sh', toCopy: 'apache-cassandra-*.tar.gz,apache-cassandra-*.jar,apache-cassandra-*.pom'],
'lint': [script: 'check-code.sh', toCopy: null],
'debian': [script: 'build-debian.sh', toCopy: 'cassandra_*,cassandra-tools_*'],
'redhat': [script: 'build-redhat.sh rpm', toCopy: '*.rpm'],
]
buildSteps.each() {
it.value.put('type', 'build')
it.value.put('size', 'small')
it.value.put('splits', 1)
}
def testSteps = [
// Each splits size need to be high enough to avoid the one hour per split timeout,
// and low enough so test time is factors more than the setup+build time in each split.
// Splits can also be poorly balanced: splitting or renaming test classes is the best tactic.
// On unsaturated systems 10 minutes per split is optimal, higher with saturation
// (some buffer on the heaviest split under the 1h max is required, ref `timeout(…)` in `test(…)`)
'cqlsh-test': [splits: 1],
'fqltool-test': [splits: 1, size: 'small'],
'test-cdc': [splits: 20],
'test': [splits: 20],
'test-latest': [splits: 20],
'test-compression': [splits: 20],
'stress-test': [splits: 1, size: 'small'],
'test-burn': [splits: 2],
'long-test': [splits: 4],
'test-oa': [splits: 20],
'test-system-keyspace-directory': [splits: 20],
'jvm-dtest': [splits: 16],
'jvm-dtest-upgrade': [splits: 6],
'simulator-dtest': [splits: 1, size: 'large'],
'dtest': [splits: 64, size: 'large'],
'dtest-novnode': [splits: 64, size: 'large'],
'dtest-latest': [splits: 64, size: 'large'],
'dtest-large': [splits: 6, size: 'large'],
'dtest-large-novnode': [splits: 6, size: 'large'],
'dtest-large-latest': [splits: 6, size: 'large'],
'dtest-upgrade': [splits: 160, size: 'large'],
'dtest-upgrade-novnode': [splits: 160, size: 'large'],
'dtest-upgrade-large': [splits: 40, size: 'large'],
'dtest-upgrade-novnode-large': [splits: 40, size: 'large'],
]
testSteps.each() {
it.value.put('type', 'test')
if (!it.value['size']) {
it.value.put('size', 'medium')
}
if (it.key.startsWith('dtest')) {
it.value.put('python-dtest', true)
}
}
def stepsMap = buildSteps + testSteps
// define matrix axes
def Map matrix_axes = [
arch: archsSupported(),
jdk: javaVersionsSupported(),
python: pythonsSupported(),
cython: ['yes', 'no'],
step: stepsMap.keySet(),
split: (1..testSteps.values().splits.max()).toList()
]
def javaVersionDefault = javaVersionDefault()
def List _axes = getMatrixAxes(matrix_axes).findAll { axis ->
(isArchEnabled(axis['arch'])) && // skip disabled archs
(isJdkEnabled(axis['jdk'])) && // skip disabled jdks
(isStageEnabled(axis['step'])) && // skip disabled steps
!(axis['python'] != pythonDefault() && 'cqlsh-test' != axis['step']) && // Use only python 3.8 for all tests but cqlsh-test
!(axis['cython'] != 'no' && 'cqlsh-test' != axis['step']) && // cython only for cqlsh-test, disable for others
!(axis['jdk'] != javaVersionDefault && ('cqlsh-test' == axis['step'] || 'simulator-dtest' == axis['step'] || axis['step'].contains('dtest-upgrade'))) && // run cqlsh-test, simulator-dtest, *dtest-upgrade only with jdk11
// Disable splits for all but proper stages
!(axis['split'] > 1 && !stepsMap.findAll { entry -> entry.value.splits >= axis['split'] }.keySet().contains(axis['step'])) &&
// run only the build types on non-amd64
!(axis['arch'] != 'amd64' && !stepsMap.findAll { entry -> 'build' == entry.value.type }.keySet().contains(axis['step']))
}
def Map tasks = [
jars: [failFast: true],
tests: [failFast: true]
]
for (def axis in _axes) {
def cell = axis
def name = getStepName(cell, stepsMap[cell.step])
tasks[cell.step == "jar" ? "jars" : "tests"][name] = { ->
"${stepsMap[cell.step].type}"(stepsMap[cell.step], cell)
}
}
return tasks
}
@NonCPS
def List getMatrixAxes(Map matrix_axes) {
def List axes = []
matrix_axes.each { axis, values ->
List axisList = []
values.each { value ->
axisList << [(axis): value]
}
axes << axisList
}
axes.combinations()*.sum()
}
def getStepName(cell, command) {
def arch = "amd64" == cell.arch ? "" : " ${cell.arch}"
def python = "cqlsh-test" != cell.step ? "" : " python${cell.python}"
def cython = "no" == cell.cython ? "" : " cython"
def split = command.splits > 1 ? " ${cell.split}/${command.splits}" : ""
return "${cell.step}${arch} jdk${cell.jdk}${python}${cython}${split}"
}
def getJarTasks() {
Map jars = tasks()['jars']
assertJarTasks(jars)
return jars
}
def assertJarTasks(jars) {
if (jars.size() < 2) {
error("Nothing to build. Check parameters: jdk ${params.jdk} (${javaVersionsSupported()}), arch ${params.architecture} (${archsSupported()})")
}
}
def hasNonJarTasks() {
return tasks()['tests'].size() > 1
}
/**
* Return the default JDK defined by build.xml
**/
def javaVersionDefault() {
sh (returnStdout: true, script: 'grep \'property\\s*name=\"java.default\"\' build.xml | sed -ne \'s/.*value=\"\\([^\"]*\\)\".*/\\1/p\'').trim()
}
/**
* Return the supported JDKs defined by build.xml
**/
def javaVersionsSupported() {
sh (returnStdout: true, script: 'grep \'property\\s*name=\"java.supported\"\' build.xml | sed -ne \'s/.*value=\"\\([^\"]*\\)\".*/\\1/p\'').trim().split(',')
}
/**
* Is this a post-commit build (or a pre-commit build)
**/
def isPostCommit() {
// any build of a branch found on github.com/apache/cassandra is considered a post-commit (post-merge) CI run
return params.repository && params.repository.contains("apache/cassandra") // no params exist first build
}
/**
* Are we running on ci-cassandra.apache.org ?
**/
def isCanonical() {
return "${JENKINS_URL}".contains("ci-cassandra.apache.org")
}
def isStageEnabled(stage) {
return "jar" == stage || pipelineProfiles()[params.profile]?.contains(stage) || ("custom" == params.profile && stage ==~ params.profile_custom_regexp)
}
def isArchEnabled(arch) {
return params.architecture == arch || "all" == params.architecture
}
def isJdkEnabled(jdk) {
return !params.jdk?.trim() || params.jdk.trim() == jdk
}
/**
* Renders build script into pipeline steps
**/
def build(command, cell) {
def build_script = ".build/docker/${command.script}"
def maxAttempts = 2
def attempt = 0
def nodeExclusion = ""
retry(maxAttempts) {
attempt++
node(getNodeLabel(command, cell) + nodeExclusion) {
nodeExclusion = "&&!${NODE_NAME}"
withEnv(cell.collect { k, v -> "${k}=${v}" }) {
ws("workspace/${JOB_NAME}/${BUILD_NUMBER}/${cell.step}/${cell.arch}/jdk-${cell.jdk}") {
fetchSource(cell.step, cell.arch, cell.jdk)
sh """
test -f .jenkins/Jenkinsfile || { echo "Invalid git fork/branch"; exit 1; }
grep -q "Jenkins CI declaration" .jenkins/Jenkinsfile || { echo "Only Cassandra 5.0+ supported"; exit 1; }
"""
fetchDockerImages("redhat" == cell.step ? ['almalinux-build'] : ['bullseye-build'])
def cell_suffix = "_jdk${cell.jdk}_${cell.arch}"
def logfile = "stage-logs/${JOB_NAME}_${BUILD_NUMBER}_${cell.step}${cell_suffix}_attempt${attempt}.log.xz"
def script_vars = "#!/bin/bash \n set -o pipefail ; " // pipe to tee needs pipefail
script_vars = "${script_vars} m2_dir=\'${WORKSPACE}/build/m2\'"
timeout(time: 1, unit: 'HOURS') {
def status = sh label: "RUNNING ${cell.step}...", script: "${script_vars} ${build_script} ${cell.jdk} 2>&1 | tee >( xz -c > build/${logfile} )", returnStatus: true
dir("build") {
archiveArtifacts artifacts: "${logfile}", fingerprint: true
copyToNightlies("${logfile}", "${cell.step}/jdk${cell.jdk}/${cell.arch}/")
}
if (0 != status) { error("Stage ${cell.step}${cell_suffix} failed with exit status ${status}") }
if ("jar" == cell.step) {
stash name: "${cell.arch}_${cell.jdk}"
}
}
dir("build") {
copyToNightlies("${command.toCopy}", "${cell.step}/jdk${cell.jdk}/${cell.arch}/")
}
cleanAgent(cell.step)
}
}
}
}
}
def test(command, cell) {
if (command.containsKey('script')) { error("test commands all use `.build/docker/run-tests.sh`") }
def splits = command.splits ? command.splits : 1
def maxAttempts = 2
def attempt = 0
def nodeExclusion = ""
retry(maxAttempts) {
attempt++
node(getNodeLabel(command, cell) + nodeExclusion) {
nodeExclusion = "&&!${NODE_NAME}"
withEnv(cell.collect { k, v -> "${k}=${v}" }) {
ws("workspace/${JOB_NAME}/${BUILD_NUMBER}/${cell.step}/${cell.arch}/jdk-${cell.jdk}/python-${cell.python}") {
fetchSource(cell.step, cell.arch, cell.jdk)
fetchDockerImages(['ubuntu2004_test'])
def cell_suffix = "_jdk${cell.jdk}_python_${cell.python}_${cell.cython}_${cell.arch}_${cell.split}_${splits}"
def logfile = "stage-logs/${JOB_NAME}_${BUILD_NUMBER}_${cell.step}${cell_suffix}_attempt${attempt}.log.xz"
def script_vars = "#!/bin/bash \n set -o pipefail ; " // pipe to tee needs pipefail
script_vars = "${script_vars} python_version=\'${cell.python}\'"
script_vars = "${script_vars} m2_dir=\'${WORKSPACE}/build/m2\'"
if ("cqlsh-test" == cell.step) {
script_vars = "${script_vars} cython=\'${cell.cython}\'"
}
script_vars = fetchDTestsSource(command, script_vars)
timeout(time: 1, unit: 'HOURS') { // best throughput with each cell at ~10 minutes
def timer = System.currentTimeMillis()
try {
buildJVMDTestJars(cell, script_vars, logfile)
def status = sh label: "RUNNING TESTS ${cell.step}...", script: "${script_vars} .build/docker/run-tests.sh -a ${cell.step} -c '${cell.split}/${splits}' -j ${cell.jdk} 2>&1 | tee >( xz -c > build/${logfile} )", returnStatus: true
dir("build") {
archiveArtifacts artifacts: "${logfile}", fingerprint: true
}
if (0 != status) { error("Stage ${cell.step}${cell_suffix} failed with exit status ${status}") }
} finally {
def duration = System.currentTimeMillis() - timer
def formattedTime = String.format("%tT.%tL", duration, duration)
echo "Time ${cell.step}${cell_suffix}: ${formattedTime}"
}
}
dir("build") {
sh """
mkdir -p test/output/${cell.step}
find test/output -type f -name TEST*.xml -execdir mkdir -p jdk_${cell.jdk}/${cell.arch} ';' -execdir mv {} jdk_${cell.jdk}/${cell.arch}/{} ';'
find test/output -name cqlshlib.xml -execdir mv cqlshlib.xml ${cell.step}/cqlshlib${cell_suffix}.xml ';'
find test/output -name nosetests.xml -execdir mv nosetests.xml ${cell.step}/nosetests${cell_suffix}.xml ';'
"""
junit testResults: "test/**/TEST-*.xml,test/**/cqlshlib*.xml,test/**/nosetests*.xml", testDataPublishers: [[$class: 'StabilityTestDataPublisher']]
sh "find test/output -type f -name *.xml -exec sh -c 'xz -f {} &' ';' ; wait ; find test/output -type f -name *.xml.xz | wc -l"
archiveArtifacts artifacts: "test/logs/**,test/**/TEST-*.xml.xz,test/**/cqlshlib*.xml.xz,test/**/nosetests*.xml.xz", fingerprint: true
copyToNightlies("${logfile}, test/logs/**", "${cell.step}/${cell.arch}/jdk${cell.jdk}/python${cell.python}/cython_${cell.cython}/" + "split_${cell.split}_${splits}".replace("/", "_"))
}
cleanAgent(cell.step)
}
}
}
}
}
def fetchSource(stage, arch, jdk) {
cleanAgent(stage)
if ("jar" == stage) {
checkout changelog: false, scm: scmGit(branches: [[name: params.branch ?: 'trunk']], extensions: [cloneOption(depth: 1, noTags: true, reference: '', shallow: true)], userRemoteConfigs: [[url: params.repository]])
sh "mkdir -p build/stage-logs"
} else {
unstash name: "${arch}_${jdk}"
}
}
def fetchDTestsSource(command, script_vars) {
if (command.containsKey('python-dtest')) {
checkout changelog: false, poll: false, scm: scmGit(branches: [[name: params.dtest_branch ?: 'trunk']], extensions: [cloneOption(depth: 1, noTags: true, reference: '', shallow: true), [$class: 'RelativeTargetDirectory', relativeTargetDir: "${WORKSPACE}/build/cassandra-dtest"]], userRemoteConfigs: [[url: params.dtest_repository]])
sh "test -f build/cassandra-dtest/requirements.txt || { echo 'Invalid cassandra-dtest fork/branch'; exit 1; }"
return "${script_vars} cassandra_dtest_dir='${WORKSPACE}/build/cassandra-dtest'"
}
return script_vars
}
def buildJVMDTestJars(cell, script_vars, logfile) {
if (cell.step.startsWith("jvm-dtest-upgrade")) {
try {
unstash name: "jvm_dtests_${cell.arch}_${cell.jdk}"
} catch (error) {
sh label: "RUNNING build_dtest_jars...", script: "${script_vars} .build/docker/run-tests.sh -a build_dtest_jars -j ${cell.jdk} 2>&1 | tee >( xz -c > build/${logfile} )"
stash name: "jvm_dtests_${cell.arch}_${cell.jdk}", includes: '**/dtest*.jar'
}
}
}
def fetchDockerImages(dockerfiles) {
// prefetch, from apache jfrog, reduces risking dockerhub pull rate limits
// also prefetch alpine:latest as its used as a utility in the scripts
def dockerfilesVar = dockerfiles.join(' ')
sh """#!/bin/bash
for dockerfile in ${dockerfilesVar} ; do
image_tag="\$(md5sum .build/docker/\${dockerfile}.docker | cut -d' ' -f1)"
image_name="apache/cassandra-\${dockerfile}:\${image_tag}"
if ! ( [[ "" != "\$(docker images -q \${image_name} 2>/dev/null)" ]] ) ; then
docker pull -q apache.jfrog.io/cassan-docker/\${image_name} &
fi
done
docker pull -q apache.jfrog.io/cassan-docker/alpine:3.19.1 &
wait
"""
}
def getNodeLabel(command, cell) {
echo "using node label: cassandra-${cell.arch}-${command.size}"
return "cassandra-${cell.arch}-${command.size}"
}
def copyToNightlies(sourceFiles, remoteDirectory='') {
if (isCanonical() && sourceFiles?.trim()) {
def remotePath = remoteDirectory.startsWith("cassandra/") ? "${remoteDirectory}" : "cassandra/${JOB_NAME}/${BUILD_NUMBER}/${remoteDirectory}"
def attempt = 1
retry(9) {
if (attempt > 1) { sleep(60 * attempt) }
sshPublisher(
continueOnError: true, failOnError: false,
publishers: [
sshPublisherDesc(
configName: "Nightlies",
transfers: [ sshTransfer( sourceFiles: sourceFiles, remoteDirectory: remotePath) ]
)
])
}
echo "archived to https://nightlies.apache.org/${remotePath}"
}
}
def cleanAgent(job_name) {
sh "hostname"
if (isCanonical()) {
def agentScriptsUrl = "https://raw.githubusercontent.com/apache/cassandra-builds/trunk/jenkins-dsl/agent_scripts/"
cleanAgentDocker(job_name, agentScriptsUrl)
logAgentInfo(job_name, agentScriptsUrl)
}
cleanWs()
}
def cleanAgentDocker(job_name, agentScriptsUrl) {
// we don't expect any build to have been running for longer than maxBuildHours
def maxBuildHours = 12
echo "Pruning docker for '${job_name}' on ${NODE_NAME}…" ;
sh """#!/bin/bash
set +e
wget -q ${agentScriptsUrl}/docker_image_pruner.py
wget -q ${agentScriptsUrl}/docker_agent_cleaner.sh
bash docker_agent_cleaner.sh ${maxBuildHours}
"""
}
def logAgentInfo(job_name, agentScriptsUrl) {
sh """#!/bin/bash
set +e -o pipefail
wget -q ${agentScriptsUrl}/agent_report.sh
bash -x agent_report.sh | tee -a \$(date +"%Y%m%d%H%M")-disk-usage-stats.txt
"""
copyToNightlies("*-disk-usage-stats.txt", "cassandra/ci-cassandra.apache.org/agents/${NODE_NAME}/disk-usage/")
}
/////////////////////////////////////////
////// scripting support for summary ////
/////////////////////////////////////////
def generateTestReports() {
node("cassandra-medium") {
cleanAgent("generateTestReports")
checkout changelog: false, scm: scmGit(branches: [[name: params.branch]], extensions: [cloneOption(depth: 1, noTags: true, reference: '', shallow: true)], userRemoteConfigs: [[url: params.repository]])
def logfile = "stage-logs/${JOB_NAME}_${BUILD_NUMBER}_generateTestReports.log.xz"
sh "mkdir -p build/stage-logs"
def teeSuffix = "2>&1 | tee >( xz -c > build/${logfile} )"
def script_vars = "#!/bin/bash -x \n "
if (isCanonical()) {
// copyArtifacts takes >4hrs, hack with manual download
sh """${script_vars}
( mkdir -p build/test
wget -q ${BUILD_URL}/artifact/test/output/*zip*/output.zip
unzip -x -d build/test -q output.zip ) ${teeSuffix}
"""
} else {
copyArtifacts filter: 'test/**/TEST-*.xml.xz,test/**/cqlshlib*.xml.xz,test/**/nosetests*.xml.xz', fingerprintArtifacts: true, projectName: env.JOB_NAME, selector: specific(env.BUILD_NUMBER), target: "build/", optional: true
}
if (fileExists('build/test/output')) {
// merge splits for each target's test report, other axes are kept separate
// TODO parallelised for loop
// TODO results_details.tar.xz needs to include all logs for failed tests
sh """${script_vars} (
find build/test/output -type f -name *.xml.xz | wc -l
find build/test/output -name *.xml.xz -exec sh -c 'xz -f --decompress {} &' ';' ; wait
for target in \$(ls build/test/output/) ; do
if test -d build/test/output/\${target} ; then
mkdir -p build/test/reports/\${target}
echo "Report for \${target} (\$(find build/test/output/\${target} -name '*.xml' | wc -l) test files)"
CASSANDRA_DOCKER_ANT_OPTS="-Dbuild.test.output.dir=build/test/output/\${target} -Dbuild.test.report.dir=build/test/reports/\${target}"
export CASSANDRA_DOCKER_ANT_OPTS
.build/docker/_docker_run.sh bullseye-build.docker ci/generate-test-report.sh
fi
done
.build/docker/_docker_run.sh bullseye-build.docker ci/generate-ci-summary.sh || echo "failed generate-ci-summary.sh"
tar -cf build/results_details.tar -C build/test/ reports
xz -8f build/results_details.tar ) ${teeSuffix}
"""
dir('build/') {
archiveArtifacts artifacts: "ci_summary.html,results_details.tar.xz,${logfile}", fingerprint: true
copyToNightlies('ci_summary.html,results_details.tar.xz,${logfile}')
}
}
}
}
def sendNotifications() {
if (isPostCommit() && isCanonical()) {
// the following is expected only to work on ci-cassandra.apache.org
def changes = '?'
try {
script {
changes = formatChangeLogChanges(currentBuild.changeSets)
echo "changes: ${changes}"
}
slackSend channel: '#cassandra-builds', message: ":apache: <${BUILD_URL}|${currentBuild.fullDisplayName}> completed: ${currentBuild.result}. <https://github.com/apache/cassandra/commit/${GIT_COMMIT}|${GIT_COMMIT}>\n${changes}"
emailext to: 'builds@cassandra.apache.org', subject: "Build complete: ${currentBuild.fullDisplayName} [${currentBuild.result}] ${GIT_COMMIT}", presendScript: 'msg.removeHeader("In-Reply-To"); msg.removeHeader("References")', body: emailContent()
} catch (Exception ex) {
echo 'failed to send notifications ' + ex.toString()
}
}
}
def formatChangeLogChanges(changeLogSets) {
def result = ''
for (int i = 0; i < changeLogSets.size(); i++) {
def entries = changeLogSets[i].items
for (int j = 0; j < entries.length; j++) {
def entry = entries[j]
result = result + "${entry.commitId} by ${entry.author} on ${new Date(entry.timestamp)}: ${entry.msg}\n"
}
}
return result
}
def emailContent() {
return '''
-------------------------------------------------------------------------------
Build ${ENV,var="JOB_NAME"} #${BUILD_NUMBER} ${BUILD_STATUS}
URL: ${BUILD_URL}
-------------------------------------------------------------------------------
Changes:
${CHANGES}
-------------------------------------------------------------------------------
Failed Tests:
${FAILED_TESTS,maxTests=500,showMessage=false,showStack=false}
-------------------------------------------------------------------------------
For complete test report and logs see https://nightlies.apache.org/cassandra/${JOB_NAME}/${BUILD_NUMBER}/
'''
}