Skip to content

Commit

Permalink
HBASE-28678 Make nightly builds for 3.x java 17 only and add java 17 …
Browse files Browse the repository at this point in the history
…test for 2.x (#6032)

Signed-off-by: Xin Sun <[email protected]>
(cherry picked from commit 8ff8748)
  • Loading branch information
Apache9 committed Jun 27, 2024
1 parent 98572e5 commit 5cbf2a3
Show file tree
Hide file tree
Showing 2 changed files with 137 additions and 22 deletions.
135 changes: 132 additions & 3 deletions dev-support/Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -203,8 +203,8 @@ pipeline {
environment {
BASEDIR = "${env.WORKSPACE}/component"
TESTS = "${env.SHALLOW_CHECKS}"
SET_JAVA_HOME = "/usr/lib/jvm/java-11"
JAVA8_HOME="/usr/lib/jvm/java-8"
SET_JAVA_HOME = getJavaHomeForYetusGeneralCheck(env.BRANCH_NAME)
JAVA8_HOME = "/usr/lib/jvm/java-8"
// Activates hadoop 3.0 profile in maven runs.
HADOOP_PROFILE = '3.0'
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
Expand Down Expand Up @@ -305,7 +305,7 @@ pipeline {
}
}
when {
branch 'branch-2*'
branch '*branch-2*'
}
environment {
BASEDIR = "${env.WORKSPACE}/component"
Expand Down Expand Up @@ -415,6 +415,9 @@ pipeline {
label 'hbase'
}
}
when {
branch '*branch-2*'
}
environment {
BASEDIR = "${env.WORKSPACE}/component"
TESTS = "${env.DEEP_CHECKS}"
Expand Down Expand Up @@ -525,6 +528,9 @@ pipeline {
label 'hbase'
}
}
when {
branch '*branch-2*'
}
environment {
BASEDIR = "${env.WORKSPACE}/component"
TESTS = "${env.DEEP_CHECKS}"
Expand Down Expand Up @@ -629,6 +635,118 @@ pipeline {
}
}
}

stage ('yetus jdk17 hadoop3 checks') {
agent {
node {
label 'hbase'
}
}
environment {
BASEDIR = "${env.WORKSPACE}/component"
TESTS = "${env.DEEP_CHECKS}"
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}"
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}"
SET_JAVA_HOME = "/usr/lib/jvm/java-17"
// Activates hadoop 3.0 profile in maven runs.
HADOOP_PROFILE = '3.0'
SKIP_ERRORPRONE = true
}
steps {
// Must do prior to anything else, since if one of them timesout we'll stash the commentfile
sh '''#!/usr/bin/env bash
set -e
rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
echo '(x) {color:red}-1 jdk17 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
'''
unstash 'yetus'
dir('component') {
checkout scm
}
sh '''#!/usr/bin/env bash
set -e
rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
"${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
'''
script {
def ret = sh(
returnStatus: true,
script: '''#!/usr/bin/env bash
set -e
declare -i status=0
if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
echo '(/) {color:green}+1 jdk17 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
else
echo '(x) {color:red}-1 jdk17 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
status=1
fi
echo "-- For more information [see jdk17 report|${BUILD_URL}JDK17_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
exit "${status}"
'''
)
if (ret != 0) {
// mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
// test output. See HBASE-26339 for more details.
currentBuild.result = 'UNSTABLE'
}
}
}
post {
always {
stash name: 'jdk17-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
// zip surefire reports.
sh '''#!/bin/bash -e
if [ -d "${OUTPUT_DIR}/archiver" ]; then
count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
if [[ 0 -ne ${count} ]]; then
echo "zipping ${count} archived files"
zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
else
echo "No archived files, skipping compressing."
fi
else
echo "No archiver directory, skipping compressing."
fi
'''
sshPublisher(publishers: [
sshPublisherDesc(configName: 'Nightlies',
transfers: [
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
)
]
)
])
// remove the big test logs zip file, store the nightlies url in test_logs.html
sh '''#!/bin/bash -e
if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
rm -rf "${OUTPUT_DIR}/test_logs.zip"
python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
else
echo "No test_logs.zip, skipping"
fi
'''
// Has to be relative to WORKSPACE.
archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
publishHTML target: [
allowMissing : true,
keepAll : true,
alwaysLinkToLastBuild: true,
// Has to be relative to WORKSPACE.
reportDir : "${env.OUTPUT_DIR_RELATIVE}",
reportFiles : 'console-report.html',
reportName : 'JDK17 Nightly Build Report (Hadoop3)'
]
}
}
}

// This is meant to mimic what a release manager will do to create RCs.
// See http://hbase.apache.org/book.html#maven.release
// TODO (HBASE-23870): replace this with invocation of the release tool
Expand Down Expand Up @@ -824,12 +942,14 @@ pipeline {
unstash 'jdk8-hadoop2-result'
unstash 'jdk8-hadoop3-result'
unstash 'jdk11-hadoop3-result'
unstash 'jdk17-hadoop3-result'
unstash 'srctarball-result'
sh "printenv"
def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}/commentfile",
'output-srctarball/commentfile',
'output-integration/commentfile']
echo env.BRANCH_NAME
Expand Down Expand Up @@ -899,3 +1019,12 @@ List<String> getJirasToComment(CharSequence source, List<String> seen) {
}
return seen
}
@NonCPS
String getJavaHomeForYetusGeneralCheck(String branchName) {
// for 2.x, build with java 11, for 3.x, build with java 17
if (branchName.indexOf("branch-2") >=0) {
return "/usr/lib/jvm/java-11";
} else {
return "/usr/lib/jvm/java-17"
}
}
24 changes: 5 additions & 19 deletions dev-support/hbase-personality.sh
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ function personality_modules
# If we have HADOOP_PROFILE specified and we're on branch-2.x, pass along
# the hadoop.profile system property. Ensures that Hadoop2 and Hadoop3
# logic is not both activated within Maven.
if [[ -n "${HADOOP_PROFILE}" ]] && [[ "${PATCH_BRANCH}" = branch-2* ]] ; then
if [[ -n "${HADOOP_PROFILE}" ]] && [[ "${PATCH_BRANCH}" == *"branch-2"* ]] ; then
extra="${extra} -Dhadoop.profile=${HADOOP_PROFILE}"
fi

Expand Down Expand Up @@ -490,7 +490,7 @@ function shadedjars_rebuild
# If we have HADOOP_PROFILE specified and we're on branch-2.x, pass along
# the hadoop.profile system property. Ensures that Hadoop2 and Hadoop3
# logic is not both activated within Maven.
if [[ -n "${HADOOP_PROFILE}" ]] && [[ "${PATCH_BRANCH}" = branch-2* ]] ; then
if [[ -n "${HADOOP_PROFILE}" ]] && [[ "${PATCH_BRANCH}" = *"branch-2"* ]] ; then
maven_args+=("-Dhadoop.profile=${HADOOP_PROFILE}")
fi

Expand Down Expand Up @@ -580,29 +580,15 @@ function hadoopcheck_rebuild

# All supported Hadoop versions that we want to test the compilation with
# See the Hadoop section on prereqs in the HBase Reference Guide
if [[ "${PATCH_BRANCH}" = branch-2.4 ]]; then
yetus_info "Setting Hadoop 2 versions to test based on branch-2.4 rules."
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
hbase_hadoop2_versions="2.10.2"
else
hbase_hadoop2_versions="2.10.0 2.10.1 2.10.2"
fi
elif [[ "${PATCH_BRANCH}" = branch-2* ]]; then
if [[ "${PATCH_BRANCH}" = *"branch-2"* ]]; then
yetus_info "Setting Hadoop 2 versions to test based on branch-2.5+ rules."
hbase_hadoop2_versions="2.10.2"
else
yetus_info "Setting Hadoop 2 versions to null on master/feature branch rules since we do not support hadoop 2 for hbase 3.x any more."
hbase_hadoop2_versions=""
fi

if [[ "${PATCH_BRANCH}" = branch-2.4 ]]; then
yetus_info "Setting Hadoop 3 versions to test based on branch-2.4 rules"
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
hbase_hadoop3_versions="3.1.4 3.2.4 3.3.6"
else
hbase_hadoop3_versions="3.1.1 3.1.2 3.1.3 3.1.4 3.2.0 3.2.1 3.2.2 3.2.3 3.2.4 3.3.0 3.3.1 3.3.2 3.3.3 3.3.4 3.3.5 3.3.6"
fi
elif [[ "${PATCH_BRANCH}" = branch-2.5 ]]; then
if [[ "${PATCH_BRANCH}" = *"branch-2.5"* ]]; then
yetus_info "Setting Hadoop 3 versions to test based on branch-2.5 rules"
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
hbase_hadoop3_versions="3.2.4 3.3.6"
Expand Down Expand Up @@ -642,7 +628,7 @@ function hadoopcheck_rebuild
done

hadoop_profile=""
if [[ "${PATCH_BRANCH}" = branch-2* ]]; then
if [[ "${PATCH_BRANCH}" == *"branch-2"* ]]; then
hadoop_profile="-Dhadoop.profile=3.0"
fi
for hadoopver in ${hbase_hadoop3_versions}; do
Expand Down

0 comments on commit 5cbf2a3

Please sign in to comment.