From cabc5145e683660ccdba3b629d5b2f1a432690fe Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Fri, 25 Oct 2019 12:57:46 -0400 Subject: [PATCH] Re-add pipeline for flaky test runner job (#48781) * Revert "Revert "Add pipeline for flaky test runner job (#46740)"" This reverts commit 7d96a13fad5337f262743b51d5bdd6103d2c51a4. Also reconcile changes to Jenkinsfile since original revert happened * Fix param parsing and add missed change * Add missing variable --- .ci/Jenkinsfile_flaky | 113 ++++++ Jenkinsfile | 322 ++---------------- src/dev/ci_setup/setup_env.sh | 6 + src/dev/precommit_hook/casing_check_config.js | 3 +- ...enkins_build_kbn_tp_sample_panel_action.sh | 9 + test/scripts/jenkins_ci_group.sh | 5 +- vars/kibanaPipeline.groovy | 251 ++++++++++++++ vars/runbld.groovy | 11 + 8 files changed, 425 insertions(+), 295 deletions(-) create mode 100644 .ci/Jenkinsfile_flaky create mode 100755 test/scripts/jenkins_build_kbn_tp_sample_panel_action.sh create mode 100644 vars/kibanaPipeline.groovy create mode 100644 vars/runbld.groovy diff --git a/.ci/Jenkinsfile_flaky b/.ci/Jenkinsfile_flaky new file mode 100644 index 0000000000000..3f77243da2c1b --- /dev/null +++ b/.ci/Jenkinsfile_flaky @@ -0,0 +1,113 @@ +#!/bin/groovy + +library 'kibana-pipeline-library' +kibanaLibrary.load() + +// Looks like 'oss:ciGroup:1' or 'oss:firefoxSmoke' +def JOB_PARTS = params.CI_GROUP.split(':') +def IS_XPACK = JOB_PARTS[0] == 'xpack' +def JOB = JOB_PARTS[1] +def CI_GROUP = JOB_PARTS.size() > 2 ? JOB_PARTS[2] : '' + +def worker = getWorkerFromParams(IS_XPACK, JOB, CI_GROUP) + +def workerFailures = [] + +currentBuild.displayName += trunc(" ${params.GITHUB_OWNER}:${params.branch_specifier}", 24) +currentBuild.description = "${params.CI_GROUP}
Executions: ${params.NUMBER_EXECUTIONS}" + +// Note: If you increase agent count, it will execute NUMBER_EXECUTIONS per agent. It will not divide them up amongst the agents +// e.g. NUMBER_EXECUTIONS = 25, agentCount = 4 results in 100 total executions +def agentCount = 1 + +stage("Kibana Pipeline") { + timeout(time: 180, unit: 'MINUTES') { + timestamps { + ansiColor('xterm') { + def agents = [:] + for(def agentNumber = 1; agentNumber <= agentCount; agentNumber++) { + agents["agent-${agentNumber}"] = { + catchError { + kibanaPipeline.withWorkers('flaky-test-runner', { + if (!IS_XPACK) { + kibanaPipeline.buildOss() + if (CI_GROUP == '1') { + runbld "./test/scripts/jenkins_build_kbn_tp_sample_panel_action.sh" + } + } else { + kibanaPipeline.buildXpack() + } + }, getWorkerMap(agentNumber, params.NUMBER_EXECUTIONS.toInteger(), worker, workerFailures))() + } + } + } + + parallel(agents) + + currentBuild.description += ", Failures: ${workerFailures.size()}" + + if (workerFailures.size() > 0) { + print "There were ${workerFailures.size()} test suite failures." + print "The executions that failed were:" + print workerFailures.join("\n") + print "Please check 'Test Result' and 'Pipeline Steps' pages for more info" + } + } + } + } +} + +def getWorkerFromParams(isXpack, job, ciGroup) { + if (!isXpack) { + if (job == 'firefoxSmoke') { + return kibanaPipeline.getPostBuildWorker('firefoxSmoke', { runbld './test/scripts/jenkins_firefox_smoke.sh' }) + } else if(job == 'visualRegression') { + return kibanaPipeline.getPostBuildWorker('visualRegression', { runbld './test/scripts/jenkins_visual_regression.sh' }) + } else { + return kibanaPipeline.getOssCiGroupWorker(ciGroup) + } + } + + if (job == 'firefoxSmoke') { + return kibanaPipeline.getPostBuildWorker('xpack-firefoxSmoke', { runbld './test/scripts/jenkins_xpack_firefox_smoke.sh' }) + } else if(job == 'visualRegression') { + return kibanaPipeline.getPostBuildWorker('xpack-visualRegression', { runbld './test/scripts/jenkins_xpack_visual_regression.sh' }) + } else { + return kibanaPipeline.getXpackCiGroupWorker(ciGroup) + } +} + +def getWorkerMap(agentNumber, numberOfExecutions, worker, workerFailures, maxWorkers = 14) { + def workerMap = [:] + def numberOfWorkers = Math.min(numberOfExecutions, maxWorkers) + + for(def i = 1; i <= numberOfWorkers; i++) { + def workerExecutions = numberOfExecutions/numberOfWorkers + (i <= numberOfExecutions%numberOfWorkers ? 1 : 0) + + workerMap["agent-${agentNumber}-worker-${i}"] = { workerNumber -> + for(def j = 0; j < workerExecutions; j++) { + print "Execute agent-${agentNumber} worker-${workerNumber}: ${j}" + withEnv(["JOB=agent-${agentNumber}-worker-${workerNumber}-${j}"]) { + catchError { + try { + worker(workerNumber) + } catch (ex) { + workerFailures << "agent-${agentNumber} worker-${workerNumber}-${j}" + throw ex + } + } + } + } + } + } + + return workerMap +} + +def trunc(str, length) { + if (str.size() >= length) { + return str.take(length) + "..." + } + + return str; +} diff --git a/Jenkinsfile b/Jenkinsfile index f9aafa694d680..4820de9876737 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,6 +1,7 @@ #!/bin/groovy library 'kibana-pipeline-library' +kibanaLibrary.load() stage("Kibana Pipeline") { // This stage is just here to help the BlueOcean UI a little bit timeout(time: 180, unit: 'MINUTES') { @@ -8,301 +9,42 @@ stage("Kibana Pipeline") { // This stage is just here to help the BlueOcean UI a ansiColor('xterm') { catchError { parallel([ - 'kibana-intake-agent': legacyJobRunner('kibana-intake'), - 'x-pack-intake-agent': legacyJobRunner('x-pack-intake'), - 'kibana-oss-agent': withWorkers('kibana-oss-tests', { buildOss() }, [ - 'oss-ciGroup1': getOssCiGroupWorker(1), - 'oss-ciGroup2': getOssCiGroupWorker(2), - 'oss-ciGroup3': getOssCiGroupWorker(3), - 'oss-ciGroup4': getOssCiGroupWorker(4), - 'oss-ciGroup5': getOssCiGroupWorker(5), - 'oss-ciGroup6': getOssCiGroupWorker(6), - 'oss-ciGroup7': getOssCiGroupWorker(7), - 'oss-ciGroup8': getOssCiGroupWorker(8), - 'oss-ciGroup9': getOssCiGroupWorker(9), - 'oss-ciGroup10': getOssCiGroupWorker(10), - 'oss-ciGroup11': getOssCiGroupWorker(11), - 'oss-ciGroup12': getOssCiGroupWorker(12), - 'oss-firefoxSmoke': getPostBuildWorker('firefoxSmoke', { runbld './test/scripts/jenkins_firefox_smoke.sh' }), - // 'oss-visualRegression': getPostBuildWorker('visualRegression', { runbld './test/scripts/jenkins_visual_regression.sh' }), + 'kibana-intake-agent': kibanaPipeline.legacyJobRunner('kibana-intake'), + 'x-pack-intake-agent': kibanaPipeline.legacyJobRunner('x-pack-intake'), + 'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [ + 'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1), + 'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2), + 'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3), + 'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4), + 'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5), + 'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6), + 'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7), + 'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8), + 'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9), + 'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10), + 'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11), + 'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12), + 'oss-firefoxSmoke': kibanaPipeline.getPostBuildWorker('firefoxSmoke', { runbld './test/scripts/jenkins_firefox_smoke.sh' }), + // 'oss-visualRegression': kibanaPipeline.getPostBuildWorker('visualRegression', { runbld './test/scripts/jenkins_visual_regression.sh' }), ]), - 'kibana-xpack-agent': withWorkers('kibana-xpack-tests', { buildXpack() }, [ - 'xpack-ciGroup1': getXpackCiGroupWorker(1), - 'xpack-ciGroup2': getXpackCiGroupWorker(2), - 'xpack-ciGroup3': getXpackCiGroupWorker(3), - 'xpack-ciGroup4': getXpackCiGroupWorker(4), - 'xpack-ciGroup5': getXpackCiGroupWorker(5), - 'xpack-ciGroup6': getXpackCiGroupWorker(6), - 'xpack-ciGroup7': getXpackCiGroupWorker(7), - 'xpack-ciGroup8': getXpackCiGroupWorker(8), - 'xpack-ciGroup9': getXpackCiGroupWorker(9), - 'xpack-ciGroup10': getXpackCiGroupWorker(10), - 'xpack-firefoxSmoke': getPostBuildWorker('xpack-firefoxSmoke', { runbld './test/scripts/jenkins_xpack_firefox_smoke.sh' }), - // 'xpack-visualRegression': getPostBuildWorker('xpack-visualRegression', { runbld './test/scripts/jenkins_xpack_visual_regression.sh' }), + 'kibana-xpack-agent': kibanaPipeline.withWorkers('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [ + 'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1), + 'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2), + 'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3), + 'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4), + 'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5), + 'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6), + 'xpack-ciGroup7': kibanaPipeline.getXpackCiGroupWorker(7), + 'xpack-ciGroup8': kibanaPipeline.getXpackCiGroupWorker(8), + 'xpack-ciGroup9': kibanaPipeline.getXpackCiGroupWorker(9), + 'xpack-ciGroup10': kibanaPipeline.getXpackCiGroupWorker(10), + 'xpack-firefoxSmoke': kibanaPipeline.getPostBuildWorker('xpack-firefoxSmoke', { runbld './test/scripts/jenkins_xpack_firefox_smoke.sh' }), + // 'xpack-visualRegression': kibanaPipeline.getPostBuildWorker('xpack-visualRegression', { runbld './test/scripts/jenkins_xpack_visual_regression.sh' }), ]), ]) } - node('flyweight') { - // If the build doesn't have a result set by this point, there haven't been any errors and it can be marked as a success - // The e-mail plugin for the infra e-mail depends upon this being set - currentBuild.result = currentBuild.result ?: 'SUCCESS' - - sendMail() - } + kibanaPipeline.sendMail() } } } } - -def withWorkers(name, preWorkerClosure = {}, workerClosures = [:]) { - return { - jobRunner('tests-xl', true) { - try { - doSetup() - preWorkerClosure() - - def nextWorker = 1 - def worker = { workerClosure -> - def workerNumber = nextWorker - nextWorker++ - - return { - workerClosure(workerNumber) - } - } - - def workers = [:] - workerClosures.each { workerName, workerClosure -> - workers[workerName] = worker(workerClosure) - } - - parallel(workers) - } finally { - catchError { - uploadAllGcsArtifacts(name) - } - - catchError { - runbldJunit() - } - - catchError { - publishJunit() - } - - catchError { - runErrorReporter() - } - } - } - } -} - -def getPostBuildWorker(name, closure) { - return { workerNumber -> - def kibanaPort = "61${workerNumber}1" - def esPort = "61${workerNumber}2" - def esTransportPort = "61${workerNumber}3" - - withEnv([ - "CI_WORKER_NUMBER=${workerNumber}", - "TEST_KIBANA_HOST=localhost", - "TEST_KIBANA_PORT=${kibanaPort}", - "TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}", - "TEST_ES_URL=http://elastic:changeme@localhost:${esPort}", - "TEST_ES_TRANSPORT_PORT=${esTransportPort}", - "IS_PIPELINE_JOB=1", - ]) { - closure() - } - } -} - -def getOssCiGroupWorker(ciGroup) { - return getPostBuildWorker("ciGroup" + ciGroup, { - withEnv([ - "CI_GROUP=${ciGroup}", - "JOB=kibana-ciGroup${ciGroup}", - ]) { - runbld "./test/scripts/jenkins_ci_group.sh" - } - }) -} - -def getXpackCiGroupWorker(ciGroup) { - return getPostBuildWorker("xpack-ciGroup" + ciGroup, { - withEnv([ - "CI_GROUP=${ciGroup}", - "JOB=xpack-kibana-ciGroup${ciGroup}", - ]) { - runbld "./test/scripts/jenkins_xpack_ci_group.sh" - } - }) -} - -def legacyJobRunner(name) { - return { - parallel([ - "${name}": { - withEnv([ - "JOB=${name}", - ]) { - jobRunner('linux && immutable', false) { - try { - runbld('.ci/run.sh', true) - } finally { - catchError { - uploadAllGcsArtifacts(name) - } - catchError { - publishJunit() - } - catchError { - runErrorReporter() - } - } - } - } - } - ]) - } -} - -def jobRunner(label, useRamDisk, closure) { - node(label) { - if (useRamDisk) { - // Move to a temporary workspace, so that we can symlink the real workspace into /dev/shm - def originalWorkspace = env.WORKSPACE - ws('/tmp/workspace') { - sh """ - mkdir -p /dev/shm/workspace - mkdir -p '${originalWorkspace}' # create all of the directories leading up to the workspace, if they don't exist - rm --preserve-root -rf '${originalWorkspace}' # then remove just the workspace, just in case there's stuff in it - ln -s /dev/shm/workspace '${originalWorkspace}' - """ - } - } - - def scmVars = checkout scm - - withEnv([ - "CI=true", - "HOME=${env.JENKINS_HOME}", - "PR_SOURCE_BRANCH=${env.ghprbSourceBranch}", - "PR_TARGET_BRANCH=${env.ghprbTargetBranch}", - "PR_AUTHOR=${env.ghprbPullAuthorLogin}", - "TEST_BROWSER_HEADLESS=1", - "GIT_BRANCH=${scmVars.GIT_BRANCH}", - ]) { - withCredentials([ - string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'), - string(credentialsId: 'vault-role-id', variable: 'VAULT_ROLE_ID'), - string(credentialsId: 'vault-secret-id', variable: 'VAULT_SECRET_ID'), - ]) { - // scm is configured to check out to the ./kibana directory - dir('kibana') { - closure() - } - } - } - } -} - -// TODO what should happen if GCS, Junit, or email publishing fails? Unstable build? Failed build? - -def uploadGcsArtifact(workerName, pattern) { - def storageLocation = "gs://kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/${workerName}" // TODO - // def storageLocation = "gs://kibana-pipeline-testing/jobs/pipeline-test/${BUILD_NUMBER}/${workerName}" - - googleStorageUpload( - credentialsId: 'kibana-ci-gcs-plugin', - bucket: storageLocation, - pattern: pattern, - sharedPublicly: true, - showInline: true, - ) -} - -def uploadAllGcsArtifacts(workerName) { - def ARTIFACT_PATTERNS = [ - 'target/kibana-*', - 'target/junit/**/*', - 'test/**/screenshots/**/*.png', - 'test/functional/failure_debug/html/*.html', - 'x-pack/test/**/screenshots/**/*.png', - 'x-pack/test/functional/failure_debug/html/*.html', - 'x-pack/test/functional/apps/reporting/reports/session/*.pdf', - ] - - ARTIFACT_PATTERNS.each { pattern -> - uploadGcsArtifact(workerName, pattern) - } -} - -def publishJunit() { - junit(testResults: 'target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true) -} - -def sendMail() { - sendInfraMail() - sendKibanaMail() -} - -def sendInfraMail() { - catchError { - step([ - $class: 'Mailer', - notifyEveryUnstableBuild: true, - recipients: 'infra-root+build@elastic.co', - sendToIndividuals: false - ]) - } -} - -def sendKibanaMail() { - catchError { - def buildStatus = buildUtils.getBuildStatus() - - if(params.NOTIFY_ON_FAILURE && buildStatus != 'SUCCESS' && buildStatus != 'ABORTED') { - emailext( - to: 'build-kibana@elastic.co', - subject: "${env.JOB_NAME} - Build # ${env.BUILD_NUMBER} - ${buildStatus}", - body: '${SCRIPT,template="groovy-html.template"}', - mimeType: 'text/html', - ) - } - } -} - -def runbld(script, enableJunitProcessing = false) { - def extraConfig = enableJunitProcessing ? "" : "--config ${env.WORKSPACE}/kibana/.ci/runbld_no_junit.yml" - - sh "/usr/local/bin/runbld -d '${pwd()}' ${extraConfig} ${script}" -} - -def runbldJunit() { - sh "/usr/local/bin/runbld -d '${pwd()}' ${env.WORKSPACE}/kibana/test/scripts/jenkins_runbld_junit.sh" -} - -def bash(script) { - sh "#!/bin/bash\n${script}" -} - -def doSetup() { - runbld "./test/scripts/jenkins_setup.sh" -} - -def buildOss() { - runbld "./test/scripts/jenkins_build_kibana.sh" -} - -def buildXpack() { - runbld "./test/scripts/jenkins_xpack_build_kibana.sh" -} - -def runErrorReporter() { - bash """ - source src/dev/ci_setup/setup_env.sh - node scripts/report_failed_tests - """ -} diff --git a/src/dev/ci_setup/setup_env.sh b/src/dev/ci_setup/setup_env.sh index 3b239bd3ff731..805b77365e624 100644 --- a/src/dev/ci_setup/setup_env.sh +++ b/src/dev/ci_setup/setup_env.sh @@ -2,6 +2,10 @@ set -e +if [[ "$CI_ENV_SETUP" ]]; then + return 0 +fi + installNode=$1 dir="$(pwd)" @@ -152,3 +156,5 @@ if [[ -d "$ES_DIR" && -f "$ES_JAVA_PROP_PATH" ]]; then echo "Setting JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA" export JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA fi + +export CI_ENV_SETUP=true diff --git a/src/dev/precommit_hook/casing_check_config.js b/src/dev/precommit_hook/casing_check_config.js index 9829de2fd3920..edd818e1b42de 100644 --- a/src/dev/precommit_hook/casing_check_config.js +++ b/src/dev/precommit_hook/casing_check_config.js @@ -43,8 +43,9 @@ export const IGNORE_FILE_GLOBS = [ 'x-pack/docs/**/*', 'src/legacy/ui/public/assets/fonts/**/*', 'packages/kbn-utility-types/test-d/**/*', - 'Jenkinsfile', + '**/Jenkinsfile*', 'Dockerfile*', + 'vars/*', // Files in this directory must match a pre-determined name in some cases. 'x-pack/legacy/plugins/canvas/.storybook/*', diff --git a/test/scripts/jenkins_build_kbn_tp_sample_panel_action.sh b/test/scripts/jenkins_build_kbn_tp_sample_panel_action.sh new file mode 100755 index 0000000000000..4b16e3b32fefd --- /dev/null +++ b/test/scripts/jenkins_build_kbn_tp_sample_panel_action.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +source src/dev/ci_setup/setup_env.sh + +cd test/plugin_functional/plugins/kbn_tp_sample_panel_action; +if [[ ! -d "target" ]]; then + checks-reporter-with-killswitch "Build kbn_tp_sample_panel_action" yarn build; +fi +cd -; diff --git a/test/scripts/jenkins_ci_group.sh b/test/scripts/jenkins_ci_group.sh index 4581be754a924..6c991f0ae1079 100755 --- a/test/scripts/jenkins_ci_group.sh +++ b/test/scripts/jenkins_ci_group.sh @@ -22,10 +22,7 @@ fi checks-reporter-with-killswitch "Functional tests / Group ${CI_GROUP}" yarn run grunt "run:functionalTests_ciGroup${CI_GROUP}"; if [ "$CI_GROUP" == "1" ]; then - # build kbn_tp_sample_panel_action - cd test/plugin_functional/plugins/kbn_tp_sample_panel_action; - checks-reporter-with-killswitch "Build kbn_tp_sample_panel_action" yarn build; - cd -; + source test/scripts/jenkins_build_kbn_tp_sample_panel_action.sh yarn run grunt run:pluginFunctionalTestsRelease --from=source; yarn run grunt run:interpreterFunctionalTestsRelease; fi \ No newline at end of file diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy new file mode 100644 index 0000000000000..e8d7cc03edad0 --- /dev/null +++ b/vars/kibanaPipeline.groovy @@ -0,0 +1,251 @@ +def withWorkers(name, preWorkerClosure = {}, workerClosures = [:]) { + return { + jobRunner('tests-xl', true) { + try { + doSetup() + preWorkerClosure() + + def nextWorker = 1 + def worker = { workerClosure -> + def workerNumber = nextWorker + nextWorker++ + + return { + workerClosure(workerNumber) + } + } + + def workers = [:] + workerClosures.each { workerName, workerClosure -> + workers[workerName] = worker(workerClosure) + } + + parallel(workers) + } finally { + catchError { + uploadAllGcsArtifacts(name) + } + + catchError { + runbld.junit() + } + + catchError { + publishJunit() + } + + catchError { + runErrorReporter() + } + } + } + } +} + +def getPostBuildWorker(name, closure) { + return { workerNumber -> + def kibanaPort = "61${workerNumber}1" + def esPort = "61${workerNumber}2" + def esTransportPort = "61${workerNumber}3" + + withEnv([ + "CI_WORKER_NUMBER=${workerNumber}", + "TEST_KIBANA_HOST=localhost", + "TEST_KIBANA_PORT=${kibanaPort}", + "TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}", + "TEST_ES_URL=http://elastic:changeme@localhost:${esPort}", + "TEST_ES_TRANSPORT_PORT=${esTransportPort}", + "IS_PIPELINE_JOB=1", + ]) { + closure() + } + } +} + +def getOssCiGroupWorker(ciGroup) { + return getPostBuildWorker("ciGroup" + ciGroup, { + withEnv([ + "CI_GROUP=${ciGroup}", + "JOB=kibana-ciGroup${ciGroup}", + ]) { + runbld "./test/scripts/jenkins_ci_group.sh" + } + }) +} + +def getXpackCiGroupWorker(ciGroup) { + return getPostBuildWorker("xpack-ciGroup" + ciGroup, { + withEnv([ + "CI_GROUP=${ciGroup}", + "JOB=xpack-kibana-ciGroup${ciGroup}", + ]) { + runbld "./test/scripts/jenkins_xpack_ci_group.sh" + } + }) +} + +def legacyJobRunner(name) { + return { + parallel([ + "${name}": { + withEnv([ + "JOB=${name}", + ]) { + jobRunner('linux && immutable', false) { + try { + runbld('.ci/run.sh', true) + } finally { + catchError { + uploadAllGcsArtifacts(name) + } + catchError { + publishJunit() + } + catchError { + runErrorReporter() + } + } + } + } + } + ]) + } +} + +def jobRunner(label, useRamDisk, closure) { + node(label) { + if (useRamDisk) { + // Move to a temporary workspace, so that we can symlink the real workspace into /dev/shm + def originalWorkspace = env.WORKSPACE + ws('/tmp/workspace') { + sh """ + mkdir -p /dev/shm/workspace + mkdir -p '${originalWorkspace}' # create all of the directories leading up to the workspace, if they don't exist + rm --preserve-root -rf '${originalWorkspace}' # then remove just the workspace, just in case there's stuff in it + ln -s /dev/shm/workspace '${originalWorkspace}' + """ + } + } + + def scmVars = checkout scm + + withEnv([ + "CI=true", + "HOME=${env.JENKINS_HOME}", + "PR_SOURCE_BRANCH=${env.ghprbSourceBranch ?: ''}", + "PR_TARGET_BRANCH=${env.ghprbTargetBranch ?: ''}", + "PR_AUTHOR=${env.ghprbPullAuthorLogin ?: ''}", + "TEST_BROWSER_HEADLESS=1", + "GIT_BRANCH=${scmVars.GIT_BRANCH}", + ]) { + withCredentials([ + string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'), + string(credentialsId: 'vault-role-id', variable: 'VAULT_ROLE_ID'), + string(credentialsId: 'vault-secret-id', variable: 'VAULT_SECRET_ID'), + ]) { + // scm is configured to check out to the ./kibana directory + dir('kibana') { + closure() + } + } + } + } +} + +// TODO what should happen if GCS, Junit, or email publishing fails? Unstable build? Failed build? + +def uploadGcsArtifact(workerName, pattern) { + def storageLocation = "gs://kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/${workerName}" // TODO + + googleStorageUpload( + credentialsId: 'kibana-ci-gcs-plugin', + bucket: storageLocation, + pattern: pattern, + sharedPublicly: true, + showInline: true, + ) +} + +def uploadAllGcsArtifacts(workerName) { + def ARTIFACT_PATTERNS = [ + 'target/kibana-*', + 'target/junit/**/*', + 'test/**/screenshots/**/*.png', + 'test/functional/failure_debug/html/*.html', + 'x-pack/test/**/screenshots/**/*.png', + 'x-pack/test/functional/failure_debug/html/*.html', + 'x-pack/test/functional/apps/reporting/reports/session/*.pdf', + ] + + ARTIFACT_PATTERNS.each { pattern -> + uploadGcsArtifact(workerName, pattern) + } +} + +def publishJunit() { + junit(testResults: 'target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true) +} + +def sendMail() { + // If the build doesn't have a result set by this point, there haven't been any errors and it can be marked as a success + // The e-mail plugin for the infra e-mail depends upon this being set + currentBuild.result = currentBuild.result ?: 'SUCCESS' + + def buildStatus = buildUtils.getBuildStatus() + if (buildStatus != 'SUCCESS' && buildStatus != 'ABORTED') { + node('flyweight') { + sendInfraMail() + sendKibanaMail() + } + } +} + +def sendInfraMail() { + catchError { + step([ + $class: 'Mailer', + notifyEveryUnstableBuild: true, + recipients: 'infra-root+build@elastic.co', + sendToIndividuals: false + ]) + } +} + +def sendKibanaMail() { + catchError { + def buildStatus = buildUtils.getBuildStatus() + if(params.NOTIFY_ON_FAILURE && buildStatus != 'SUCCESS' && buildStatus != 'ABORTED') { + emailext( + to: 'build-kibana@elastic.co', + subject: "${env.JOB_NAME} - Build # ${env.BUILD_NUMBER} - ${buildStatus}", + body: '${SCRIPT,template="groovy-html.template"}', + mimeType: 'text/html', + ) + } + } +} + +def bash(script) { + sh "#!/bin/bash\n${script}" +} + +def doSetup() { + runbld "./test/scripts/jenkins_setup.sh" +} + +def buildOss() { + runbld "./test/scripts/jenkins_build_kibana.sh" +} + +def buildXpack() { + runbld "./test/scripts/jenkins_xpack_build_kibana.sh" +} + +def runErrorReporter() { + bash """ + source src/dev/ci_setup/setup_env.sh + node scripts/report_failed_tests + """ +} + +return this diff --git a/vars/runbld.groovy b/vars/runbld.groovy new file mode 100644 index 0000000000000..501e2421ca65b --- /dev/null +++ b/vars/runbld.groovy @@ -0,0 +1,11 @@ +def call(script, enableJunitProcessing = false) { + def extraConfig = enableJunitProcessing ? "" : "--config ${env.WORKSPACE}/kibana/.ci/runbld_no_junit.yml" + + sh "/usr/local/bin/runbld -d '${pwd()}' ${extraConfig} ${script}" +} + +def junit() { + sh "/usr/local/bin/runbld -d '${pwd()}' ${env.WORKSPACE}/kibana/test/scripts/jenkins_runbld_junit.sh" +} + +return this