1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements. See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership. The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License. You may obtain a copy of the License at
9 // http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied. See the License for the
15 // specific language governing permissions and limitations
27 buildDiscarder(logRotator(numToKeepStr: '15'))
28 timeout (time: 16, unit: 'HOURS')
31 disableConcurrentBuilds()
34 YETUS_RELEASE = '0.12.0'
35 // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
36 OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
37 OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
38 OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
39 OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
40 OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
43 PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
44 PERSONALITY_FILE = 'tools/personality.sh'
45 // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
46 AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
47 WHITESPACE_IGNORE_LIST = '.*/generated/.*'
48 // output from surefire; sadly the archive function in yetus only works on file names.
49 ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
50 // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
51 TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
52 EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/output/excludes"
53 // TODO does hadoopcheck need to be jdk specific?
54 SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
55 DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
56 ASF_NIGHTLIES = 'https://nightlies.apache.org'
59 booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
61 Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
62 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
65 stage ('scm-checkout') {
72 stage ('thirdparty installs') {
74 stage ('yetus install') {
76 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
77 dir('downloads-yetus') {
78 // can't just do a simple echo or the directory won't be created. :(
79 sh '''#!/usr/bin/env bash
80 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
83 sh '''#!/usr/bin/env bash
85 echo "Ensure we have a copy of Apache Yetus."
86 if [[ true != "${USE_YETUS_PRERELEASE}" ]]; then
87 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
88 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
89 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
91 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
92 --working-dir "${WORKSPACE}/downloads-yetus" \
93 --keys 'https://www.apache.org/dist/yetus/KEYS' \
94 "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
95 "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
96 mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
98 echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
101 YETUS_DIR="${WORKSPACE}/yetus-git"
102 rm -rf "${YETUS_DIR}"
103 echo "downloading from github"
104 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
106 if [ ! -d "${YETUS_DIR}" ]; then
107 echo "unpacking yetus into '${YETUS_DIR}'"
108 mkdir -p "${YETUS_DIR}"
109 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
112 // Set up the file we need at PERSONALITY_FILE location
114 sh """#!/usr/bin/env bash
116 echo "Downloading Project personality from ${env.PROJECT_PERSONALITY}"
117 curl -L -o personality.sh "${env.PROJECT_PERSONALITY}"
120 stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
123 stage ('hadoop 2 cache') {
125 HADOOP2_VERSION="2.10.0"
128 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
129 dir('downloads-hadoop-2') {
130 sh '''#!/usr/bin/env bash
131 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
134 sh '''#!/usr/bin/env bash
136 echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
137 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
138 --working-dir "${WORKSPACE}/downloads-hadoop-2" \
139 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
140 "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
141 "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
142 for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
143 echo "Delete stale hadoop 2 cache ${stale}"
147 stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
150 stage ('hadoop 3 cache') {
152 HADOOP3_VERSION="3.1.1"
155 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
156 dir('downloads-hadoop-3') {
157 sh '''#!/usr/bin/env bash
158 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
161 sh '''#!/usr/bin/env bash
163 echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
164 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
165 --working-dir "${WORKSPACE}/downloads-hadoop-3" \
166 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
167 "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
168 "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
169 for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
170 echo "Delete stale hadoop 3 cache ${stale}"
174 stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
179 stage ('init health results') {
181 // stash with given name for all tests we might run, so that we can unstash all of them even if
182 // we skip some due to e.g. branch-specific JDK or Hadoop support
183 stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
184 stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
185 stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
186 stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
187 stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
188 stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
191 stage ('health checks') {
193 stage ('yetus general check') {
200 BASEDIR = "${env.WORKSPACE}/component"
201 TESTS = "${env.SHALLOW_CHECKS}"
202 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
203 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
204 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
207 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
208 sh '''#!/usr/bin/env bash
210 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
211 echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
212 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
215 // since we have a new node definition we need to re-do the scm checkout
219 sh '''#!/usr/bin/env bash
221 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
222 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
223 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
224 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
226 // TODO roll this into the hbase_nightly_yetus script
230 script: '''#!/usr/bin/env bash
233 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
234 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
236 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
239 echo "-- For more information [see general report|${BUILD_URL}General_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
244 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
245 // test output. See HBASE-26339 for more details.
246 currentBuild.result = 'UNSTABLE'
252 stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
253 // Has to be relative to WORKSPACE.
254 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
255 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
256 publishHTML target: [
259 alwaysLinkToLastBuild: true,
260 // Has to be relative to WORKSPACE
261 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
262 reportFiles: 'console-report.html',
263 reportName: 'General Nightly Build Report'
268 stage ('yetus jdk7 checks') {
278 BASEDIR = "${env.WORKSPACE}/component"
279 TESTS = "${env.DEEP_CHECKS}"
280 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
281 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
282 SET_JAVA_HOME = "/usr/lib/jvm/java-7"
285 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
286 sh '''#!/usr/bin/env bash
288 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
289 echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
290 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
296 sh '''#!/usr/bin/env bash
298 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
299 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
300 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
301 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
306 script: '''#!/usr/bin/env bash
309 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
310 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
312 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
315 echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
320 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
321 // test output. See HBASE-26339 for more details.
322 currentBuild.result = 'UNSTABLE'
328 stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
329 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
330 // zip surefire reports.
332 if [ -d "${OUTPUT_DIR}/archiver" ]; then
333 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
334 if [[ 0 -ne ${count} ]]; then
335 echo "zipping ${count} archived files"
336 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
338 echo "No archived files, skipping compressing."
341 echo "No archiver directory, skipping compressing."
344 sshPublisher(publishers: [
345 sshPublisherDesc(configName: 'Nightlies',
347 sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
348 sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
353 // remove the big test logs zip file, store the nightlies url in test_logs.txt
355 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
356 echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
357 rm -rf "${OUTPUT_DIR}/test_logs.zip"
358 echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
360 echo "No test_logs.zip, skipping"
363 // Has to be relative to WORKSPACE.
364 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
365 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
366 publishHTML target: [
369 alwaysLinkToLastBuild: true,
370 // Has to be relative to WORKSPACE.
371 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
372 reportFiles : 'console-report.html',
373 reportName : 'JDK7 Nightly Build Report'
378 stage ('yetus jdk8 hadoop2 checks') {
385 anyOf { branch 'branch-1*'; branch 'branch-2*' }
388 BASEDIR = "${env.WORKSPACE}/component"
389 TESTS = "${env.DEEP_CHECKS}"
390 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
391 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
392 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
395 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
396 sh '''#!/usr/bin/env bash
398 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
399 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
400 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
406 sh '''#!/usr/bin/env bash
408 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
409 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
410 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
411 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
416 script: '''#!/usr/bin/env bash
419 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
420 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
422 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
425 echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop2_29/]" >> "${OUTPUT_DIR}/commentfile"
430 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
431 // test output. See HBASE-26339 for more details.
432 currentBuild.result = 'UNSTABLE'
438 stash name: 'jdk8-hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
439 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
440 // zip surefire reports.
442 if [ -d "${OUTPUT_DIR}/archiver" ]; then
443 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
444 if [[ 0 -ne ${count} ]]; then
445 echo "zipping ${count} archived files"
446 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
448 echo "No archived files, skipping compressing."
451 echo "No archiver directory, skipping compressing."
454 sshPublisher(publishers: [
455 sshPublisherDesc(configName: 'Nightlies',
457 sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
458 sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
463 // remove the big test logs zip file, store the nightlies url in test_logs.txt
465 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
466 echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
467 rm -rf "${OUTPUT_DIR}/test_logs.zip"
468 echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
470 echo "No test_logs.zip, skipping"
473 // Has to be relative to WORKSPACE.
474 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
475 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
476 publishHTML target: [
479 alwaysLinkToLastBuild: true,
480 // Has to be relative to WORKSPACE.
481 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
482 reportFiles : 'console-report.html',
483 reportName : 'JDK8 Nightly Build Report (Hadoop2)'
488 stage ('yetus jdk8 hadoop3 checks') {
500 BASEDIR = "${env.WORKSPACE}/component"
501 TESTS = "${env.DEEP_CHECKS}"
502 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
503 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
504 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
505 // Activates hadoop 3.0 profile in maven runs.
506 HADOOP_PROFILE = '3.0'
509 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
510 sh '''#!/usr/bin/env bash
512 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
513 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
514 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
520 sh '''#!/usr/bin/env bash
522 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
523 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
524 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
525 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
530 script: '''#!/usr/bin/env bash
533 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
534 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
536 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
539 echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
544 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
545 // test output. See HBASE-26339 for more details.
546 currentBuild.result = 'UNSTABLE'
552 stash name: 'jdk8-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
553 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
554 // zip surefire reports.
556 if [ -d "${OUTPUT_DIR}/archiver" ]; then
557 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
558 if [[ 0 -ne ${count} ]]; then
559 echo "zipping ${count} archived files"
560 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
562 echo "No archived files, skipping compressing."
565 echo "No archiver directory, skipping compressing."
568 sshPublisher(publishers: [
569 sshPublisherDesc(configName: 'Nightlies',
571 sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
572 sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
577 // remove the big test logs zip file, store the nightlies url in test_logs.txt
579 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
580 echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
581 rm -rf "${OUTPUT_DIR}/test_logs.zip"
582 echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
584 echo "No test_logs.zip, skipping"
587 // Has to be relative to WORKSPACE.
588 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
589 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
590 publishHTML target: [
593 alwaysLinkToLastBuild: true,
594 // Has to be relative to WORKSPACE.
595 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
596 reportFiles : 'console-report.html',
597 reportName : 'JDK8 Nightly Build Report (Hadoop3)'
602 stage ('yetus jdk11 hadoop3 checks') {
614 BASEDIR = "${env.WORKSPACE}/component"
615 TESTS = "${env.DEEP_CHECKS}"
616 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
617 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
618 SET_JAVA_HOME = "/usr/lib/jvm/java-11"
619 // Activates hadoop 3.0 profile in maven runs.
620 HADOOP_PROFILE = '3.0'
621 // ErrorProne is broken on JDK11, see HBASE-23894
622 SKIP_ERROR_PRONE = 'true'
625 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
626 sh '''#!/usr/bin/env bash
628 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
629 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
630 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
636 sh '''#!/usr/bin/env bash
638 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
639 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
640 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
641 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
646 script: '''#!/usr/bin/env bash
649 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
650 echo '(/) {color:green}+1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
652 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
655 echo "-- For more information [see jdk11 report|${BUILD_URL}JDK11_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
660 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
661 // test output. See HBASE-26339 for more details.
662 currentBuild.result = 'UNSTABLE'
668 stash name: 'jdk11-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
669 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
670 // zip surefire reports.
672 if [ -d "${OUTPUT_DIR}/archiver" ]; then
673 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
674 if [[ 0 -ne ${count} ]]; then
675 echo "zipping ${count} archived files"
676 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
678 echo "No archived files, skipping compressing."
681 echo "No archiver directory, skipping compressing."
684 sshPublisher(publishers: [
685 sshPublisherDesc(configName: 'Nightlies',
687 sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
688 sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
693 // remove the big test logs zip file, store the nightlies url in test_logs.txt
695 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
696 echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
697 rm -rf "${OUTPUT_DIR}/test_logs.zip"
698 echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
700 echo "No test_logs.zip, skipping"
703 // Has to be relative to WORKSPACE.
704 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
705 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
706 publishHTML target: [
709 alwaysLinkToLastBuild: true,
710 // Has to be relative to WORKSPACE.
711 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
712 reportFiles : 'console-report.html',
713 reportName : 'JDK11 Nightly Build Report (Hadoop3)'
718 // This is meant to mimic what a release manager will do to create RCs.
719 // See http://hbase.apache.org/book.html#maven.release
720 // TODO (HBASE-23870): replace this with invocation of the release tool
721 stage ('packaging and integration') {
724 // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
728 BASEDIR = "${env.WORKSPACE}/component"
729 BRANCH = "${env.BRANCH_NAME}"
733 echo "Setting up directories"
734 rm -rf "output-srctarball" && mkdir "output-srctarball"
735 rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
736 rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
737 rm -rf "hbase-install" && mkdir "hbase-install"
738 rm -rf "hbase-client" && mkdir "hbase-client"
739 rm -rf "hadoop-2" && mkdir "hadoop-2"
740 rm -rf "hadoop-3" && mkdir "hadoop-3"
741 rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
742 rm -rf ".m2-for-src" && mkdir ".m2-for-src"
743 echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
744 echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
746 sh '''#!/usr/bin/env bash
748 rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
749 "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
750 echo "got the following saved stats in 'output-srctarball/machine'"
751 ls -lh "output-srctarball/machine"
754 echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
755 if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
756 --intermediate-file-dir output-srctarball \
757 --unpack-temp-dir unpacked_src_tarball \
758 --maven-m2-initial .m2-for-repo \
759 --maven-m2-src-build .m2-for-src \
760 --clean-source-checkout \
761 "${env.BASEDIR}" ; then
762 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
764 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
768 echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
770 if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
771 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
774 install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
775 tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
776 client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
777 tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
780 sh '''#!/bin/bash -xe
781 if [[ "${BRANCH}" = branch-2* ]] || [[ "${BRANCH}" = branch-1* ]]; then
782 echo "Attempting to use run an instance on top of Hadoop 2."
783 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
784 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
785 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
787 --working-dir output-integration/hadoop-2 \
788 --hbase-client-install "hbase-client" \
790 "hadoop-2/bin/hadoop" \
791 hadoop-2/share/hadoop/yarn/timelineservice \
792 hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
793 hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
794 hadoop-2/bin/mapred \
795 >output-integration/hadoop-2.log 2>&1 ; then
796 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
800 echo "Skipping to run against Hadoop 2 for branch ${BRANCH}"
805 if [[ "${BRANCH}" = branch-1* ]]; then
806 echo "Skipping to run against Hadoop 3 for branch ${BRANCH}"
808 echo "Attempting to use run an instance on top of Hadoop 3."
809 artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
810 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
811 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
813 --working-dir output-integration/hadoop-3 \
814 --hbase-client-install hbase-client \
816 hadoop-3/bin/hadoop \
817 hadoop-3/share/hadoop/yarn/timelineservice \
818 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
819 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
820 hadoop-3/bin/mapred \
821 >output-integration/hadoop-3.log 2>&1 ; then
822 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
825 echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
826 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
828 --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
829 --working-dir output-integration/hadoop-3-shaded \
830 --hbase-client-install hbase-client \
832 hadoop-3/bin/hadoop \
833 hadoop-3/share/hadoop/yarn/timelineservice \
834 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
835 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
836 hadoop-3/bin/mapred \
837 >output-integration/hadoop-3-shaded.log 2>&1 ; then
838 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
841 echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
849 stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
850 archiveArtifacts artifacts: 'output-srctarball/*'
851 archiveArtifacts artifacts: 'output-srctarball/**/*'
852 archiveArtifacts artifacts: 'output-integration/*'
853 archiveArtifacts artifacts: 'output-integration/**/*'
864 unstash 'general-result'
865 unstash 'jdk7-result'
866 unstash 'jdk8-hadoop2-result'
867 unstash 'jdk8-hadoop3-result'
868 unstash 'jdk11-hadoop3-result'
869 unstash 'srctarball-result'
871 def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
872 "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
873 "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
874 "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
875 "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
876 'output-srctarball/commentfile',
877 'output-integration/commentfile']
880 echo currentBuild.result
881 echo currentBuild.durationString
882 def comment = "Results for branch ${env.BRANCH_NAME}\n"
883 comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
884 if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
885 comment += "(/) *{color:green}+1 overall{color}*\n"
887 comment += "(x) *{color:red}-1 overall{color}*\n"
888 // Ideally get the committer our of the change and @ mention them in the per-jira comment
890 comment += "----\ndetails (if available):\n\n"
892 echo "[DEBUG] trying to aggregate step-wise results"
893 comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
894 echo "[INFO] Comment:"
897 echo "[DEBUG] checking to see if feature branch"
898 def jiras = getJirasToComment(env.BRANCH_NAME, [])
899 if (jiras.isEmpty()) {
900 echo "[DEBUG] non-feature branch, checking change messages for jira keys."
901 echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
902 jiras = getJirasToCommentFromChangesets(currentBuild)
904 jiras.each { currentIssue ->
905 jiraComment issueKey: currentIssue, body: comment
907 } catch (Exception exception) {
908 echo "Got exception: ${exception}"
909 echo " ${exception.getStackTrace()}"
915 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
917 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
919 thisBuild.changeSets.each { cs ->
920 cs.getItems().each { change ->
921 CharSequence msg = change.msg
922 echo "change: ${change}"
924 echo " ${change.commitId}"
925 echo " ${change.author}"
927 seenJiras = getJirasToComment(msg, seenJiras)
933 List<String> getJirasToComment(CharSequence source, List<String> seen) {
934 source.eachMatch("HBASE-[0-9]+") { currentIssue ->
935 echo "[DEBUG] found jira key: ${currentIssue}"
936 if (currentIssue in seen) {
937 echo "[DEBUG] already commented on ${currentIssue}."
939 echo "[INFO] commenting on ${currentIssue}."