1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements. See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership. The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License. You may obtain a copy of the License at
9 // http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied. See the License for the
15 // specific language governing permissions and limitations
27 buildDiscarder(logRotator(numToKeepStr: '20'))
28 timeout (time: 16, unit: 'HOURS')
31 disableConcurrentBuilds()
34 YETUS_RELEASE = '0.12.0'
35 // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
36 OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
37 OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
38 OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
39 OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
40 OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
43 PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
44 PERSONALITY_FILE = 'tools/personality.sh'
45 // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
46 AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
47 WHITESPACE_IGNORE_LIST = '.*/generated/.*'
48 // output from surefire; sadly the archive function in yetus only works on file names.
49 ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
50 // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
51 TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
52 EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/output/excludes"
53 // TODO does hadoopcheck need to be jdk specific?
54 SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
55 DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
56 ASF_NIGHTLIES = 'https://nightlies.apache.org'
57 ASF_NIGHTLIES_BASE_ORI = "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}"
58 ASF_NIGHTLIES_BASE = "${ASF_NIGHTLIES_BASE_ORI.replaceAll(' ', '%20')}"
61 booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
63 Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
64 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
67 stage ('scm-checkout') {
74 stage ('thirdparty installs') {
76 stage ('yetus install') {
78 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
79 dir('downloads-yetus') {
80 // can't just do a simple echo or the directory won't be created. :(
81 sh '''#!/usr/bin/env bash
82 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
85 sh '''#!/usr/bin/env bash
87 echo "Ensure we have a copy of Apache Yetus."
88 if [[ true != "${USE_YETUS_PRERELEASE}" ]]; then
89 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
90 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
91 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
93 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
94 --working-dir "${WORKSPACE}/downloads-yetus" \
95 --keys 'https://www.apache.org/dist/yetus/KEYS' \
97 "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
98 "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
99 mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
101 echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
104 YETUS_DIR="${WORKSPACE}/yetus-git"
105 rm -rf "${YETUS_DIR}"
106 echo "downloading from github"
107 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
109 if [ ! -d "${YETUS_DIR}" ]; then
110 echo "unpacking yetus into '${YETUS_DIR}'"
111 mkdir -p "${YETUS_DIR}"
112 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
115 // Set up the file we need at PERSONALITY_FILE location
117 sh """#!/usr/bin/env bash
119 echo "Downloading Project personality from ${env.PROJECT_PERSONALITY}"
120 curl -L -o personality.sh "${env.PROJECT_PERSONALITY}"
123 stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
126 stage ('hadoop 2 cache') {
128 HADOOP2_VERSION="2.10.0"
131 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
132 dir('downloads-hadoop-2') {
133 sh '''#!/usr/bin/env bash
134 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
137 sh '''#!/usr/bin/env bash
139 echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
140 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
141 --working-dir "${WORKSPACE}/downloads-hadoop-2" \
142 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
144 "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
145 "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
146 for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
147 echo "Delete stale hadoop 2 cache ${stale}"
151 stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
154 stage ('hadoop 3 cache') {
156 HADOOP3_VERSION="3.1.1"
159 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
160 dir('downloads-hadoop-3') {
161 sh '''#!/usr/bin/env bash
162 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
165 sh '''#!/usr/bin/env bash
167 echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
168 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
169 --working-dir "${WORKSPACE}/downloads-hadoop-3" \
170 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
172 "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
173 "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
174 for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
175 echo "Delete stale hadoop 3 cache ${stale}"
179 stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
184 stage ('init health results') {
186 // stash with given name for all tests we might run, so that we can unstash all of them even if
187 // we skip some due to e.g. branch-specific JDK or Hadoop support
188 stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
189 stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
190 stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
191 stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
192 stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
193 stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
196 stage ('health checks') {
198 stage ('yetus general check') {
205 BASEDIR = "${env.WORKSPACE}/component"
206 TESTS = "${env.SHALLOW_CHECKS}"
207 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
208 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
209 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
210 ASF_NIGHTLIES_GENERAL_CHECK_BASE="${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}"
213 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
214 sh '''#!/usr/bin/env bash
216 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
217 echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
218 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
221 // since we have a new node definition we need to re-do the scm checkout
225 sh '''#!/usr/bin/env bash
227 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
228 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
229 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
230 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
232 // TODO roll this into the hbase_nightly_yetus script
236 script: '''#!/usr/bin/env bash
239 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
240 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
242 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
245 echo "-- For more information [see general report|${BUILD_URL}General_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
250 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
251 // test output. See HBASE-26339 for more details.
252 currentBuild.result = 'UNSTABLE'
258 stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
259 sshPublisher(publishers: [
260 sshPublisherDesc(configName: 'Nightlies',
262 sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
263 sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/*-site/*,${env.OUTPUT_DIR_RELATIVE}/*-site/**/*"
269 if [ -d "${OUTPUT_DIR}/branch-site" ]; then
270 echo "Remove ${OUTPUT_DIR}/branch-site for saving space"
271 rm -rf "${OUTPUT_DIR}/branch-site"
272 python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/branch-site" > "${OUTPUT_DIR}/branch-site.html"
274 echo "No branch-site, skipping"
276 if [ -d "${OUTPUT_DIR}/patch-site" ]; then
277 echo "Remove ${OUTPUT_DIR}/patch-site for saving space"
278 rm -rf "${OUTPUT_DIR}/patch-site"
279 python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/patch-site" > "${OUTPUT_DIR}/patch-site.html"
281 echo "No patch-site, skipping"
284 // Has to be relative to WORKSPACE.
285 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
286 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
287 publishHTML target: [
290 alwaysLinkToLastBuild: true,
291 // Has to be relative to WORKSPACE
292 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
293 reportFiles: 'console-report.html',
294 reportName: 'General Nightly Build Report'
299 stage ('yetus jdk7 checks') {
309 BASEDIR = "${env.WORKSPACE}/component"
310 TESTS = "${env.DEEP_CHECKS}"
311 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
312 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
313 SET_JAVA_HOME = "/usr/lib/jvm/java-7"
316 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
317 sh '''#!/usr/bin/env bash
319 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
320 echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
321 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
327 sh '''#!/usr/bin/env bash
329 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
330 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
331 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
332 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
337 script: '''#!/usr/bin/env bash
340 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
341 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
343 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
346 echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
351 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
352 // test output. See HBASE-26339 for more details.
353 currentBuild.result = 'UNSTABLE'
359 stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
360 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
361 // zip surefire reports.
363 if [ -d "${OUTPUT_DIR}/archiver" ]; then
364 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
365 if [[ 0 -ne ${count} ]]; then
366 echo "zipping ${count} archived files"
367 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
369 echo "No archived files, skipping compressing."
372 echo "No archiver directory, skipping compressing."
375 sshPublisher(publishers: [
376 sshPublisherDesc(configName: 'Nightlies',
378 sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
379 sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
384 // remove the big test logs zip file, store the nightlies url in test_logs.html
386 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
387 echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
388 rm -rf "${OUTPUT_DIR}/test_logs.zip"
389 python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
391 echo "No test_logs.zip, skipping"
394 // Has to be relative to WORKSPACE.
395 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
396 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
397 publishHTML target: [
400 alwaysLinkToLastBuild: true,
401 // Has to be relative to WORKSPACE.
402 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
403 reportFiles : 'console-report.html',
404 reportName : 'JDK7 Nightly Build Report'
409 stage ('yetus jdk8 hadoop2 checks') {
416 anyOf { branch 'branch-1*'; branch 'branch-2*' }
419 BASEDIR = "${env.WORKSPACE}/component"
420 TESTS = "${env.DEEP_CHECKS}"
421 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
422 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
423 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
426 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
427 sh '''#!/usr/bin/env bash
429 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
430 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
431 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
437 sh '''#!/usr/bin/env bash
439 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
440 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
441 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
442 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
447 script: '''#!/usr/bin/env bash
450 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
451 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
453 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
456 echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop2_29/]" >> "${OUTPUT_DIR}/commentfile"
461 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
462 // test output. See HBASE-26339 for more details.
463 currentBuild.result = 'UNSTABLE'
469 stash name: 'jdk8-hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
470 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
471 // zip surefire reports.
473 if [ -d "${OUTPUT_DIR}/archiver" ]; then
474 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
475 if [[ 0 -ne ${count} ]]; then
476 echo "zipping ${count} archived files"
477 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
479 echo "No archived files, skipping compressing."
482 echo "No archiver directory, skipping compressing."
485 sshPublisher(publishers: [
486 sshPublisherDesc(configName: 'Nightlies',
488 sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
489 sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
494 // remove the big test logs zip file, store the nightlies url in test_logs.html
496 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
497 echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
498 rm -rf "${OUTPUT_DIR}/test_logs.zip"
499 python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
501 echo "No test_logs.zip, skipping"
504 // Has to be relative to WORKSPACE.
505 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
506 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
507 publishHTML target: [
510 alwaysLinkToLastBuild: true,
511 // Has to be relative to WORKSPACE.
512 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
513 reportFiles : 'console-report.html',
514 reportName : 'JDK8 Nightly Build Report (Hadoop2)'
519 stage ('yetus jdk8 hadoop3 checks') {
531 BASEDIR = "${env.WORKSPACE}/component"
532 TESTS = "${env.DEEP_CHECKS}"
533 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
534 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
535 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
536 // Activates hadoop 3.0 profile in maven runs.
537 HADOOP_PROFILE = '3.0'
540 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
541 sh '''#!/usr/bin/env bash
543 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
544 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
545 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
551 sh '''#!/usr/bin/env bash
553 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
554 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
555 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
556 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
561 script: '''#!/usr/bin/env bash
564 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
565 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
567 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
570 echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
575 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
576 // test output. See HBASE-26339 for more details.
577 currentBuild.result = 'UNSTABLE'
583 stash name: 'jdk8-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
584 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
585 // zip surefire reports.
587 if [ -d "${OUTPUT_DIR}/archiver" ]; then
588 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
589 if [[ 0 -ne ${count} ]]; then
590 echo "zipping ${count} archived files"
591 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
593 echo "No archived files, skipping compressing."
596 echo "No archiver directory, skipping compressing."
599 sshPublisher(publishers: [
600 sshPublisherDesc(configName: 'Nightlies',
602 sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
603 sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
608 // remove the big test logs zip file, store the nightlies url in test_logs.html
610 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
611 echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
612 rm -rf "${OUTPUT_DIR}/test_logs.zip"
613 python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
615 echo "No test_logs.zip, skipping"
618 // Has to be relative to WORKSPACE.
619 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
620 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
621 publishHTML target: [
624 alwaysLinkToLastBuild: true,
625 // Has to be relative to WORKSPACE.
626 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
627 reportFiles : 'console-report.html',
628 reportName : 'JDK8 Nightly Build Report (Hadoop3)'
633 stage ('yetus jdk11 hadoop3 checks') {
645 BASEDIR = "${env.WORKSPACE}/component"
646 TESTS = "${env.DEEP_CHECKS}"
647 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
648 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
649 SET_JAVA_HOME = "/usr/lib/jvm/java-11"
650 // Activates hadoop 3.0 profile in maven runs.
651 HADOOP_PROFILE = '3.0'
652 // ErrorProne is broken on JDK11, see HBASE-23894
653 SKIP_ERROR_PRONE = 'true'
656 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
657 sh '''#!/usr/bin/env bash
659 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
660 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
661 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
667 sh '''#!/usr/bin/env bash
669 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
670 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
671 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
672 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
677 script: '''#!/usr/bin/env bash
680 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
681 echo '(/) {color:green}+1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
683 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
686 echo "-- For more information [see jdk11 report|${BUILD_URL}JDK11_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
691 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
692 // test output. See HBASE-26339 for more details.
693 currentBuild.result = 'UNSTABLE'
699 stash name: 'jdk11-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
700 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
701 // zip surefire reports.
703 if [ -d "${OUTPUT_DIR}/archiver" ]; then
704 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
705 if [[ 0 -ne ${count} ]]; then
706 echo "zipping ${count} archived files"
707 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
709 echo "No archived files, skipping compressing."
712 echo "No archiver directory, skipping compressing."
715 sshPublisher(publishers: [
716 sshPublisherDesc(configName: 'Nightlies',
718 sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
719 sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
724 // remove the big test logs zip file, store the nightlies url in test_logs.html
726 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
727 echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
728 rm -rf "${OUTPUT_DIR}/test_logs.zip"
729 python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
731 echo "No test_logs.zip, skipping"
734 // Has to be relative to WORKSPACE.
735 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
736 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
737 publishHTML target: [
740 alwaysLinkToLastBuild: true,
741 // Has to be relative to WORKSPACE.
742 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
743 reportFiles : 'console-report.html',
744 reportName : 'JDK11 Nightly Build Report (Hadoop3)'
749 // This is meant to mimic what a release manager will do to create RCs.
750 // See http://hbase.apache.org/book.html#maven.release
751 // TODO (HBASE-23870): replace this with invocation of the release tool
752 stage ('packaging and integration') {
755 // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
759 BASEDIR = "${env.WORKSPACE}/component"
760 BRANCH = "${env.BRANCH_NAME}"
764 echo "Setting up directories"
765 rm -rf "output-srctarball" && mkdir "output-srctarball"
766 rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
767 rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
768 rm -rf "hbase-install" && mkdir "hbase-install"
769 rm -rf "hbase-client" && mkdir "hbase-client"
770 rm -rf "hadoop-2" && mkdir "hadoop-2"
771 rm -rf "hadoop-3" && mkdir "hadoop-3"
772 rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
773 rm -rf ".m2-for-src" && mkdir ".m2-for-src"
774 echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
775 echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
777 sh '''#!/usr/bin/env bash
779 rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
780 "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
781 echo "got the following saved stats in 'output-srctarball/machine'"
782 ls -lh "output-srctarball/machine"
785 echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
786 if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
787 --intermediate-file-dir output-srctarball \
788 --unpack-temp-dir unpacked_src_tarball \
789 --maven-m2-initial .m2-for-repo \
790 --maven-m2-src-build .m2-for-src \
791 --clean-source-checkout \
792 "${env.BASEDIR}" ; then
793 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
795 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
799 echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
801 if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
802 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
805 install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
806 tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
807 client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
808 tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
811 sh '''#!/bin/bash -xe
812 if [[ "${BRANCH}" = branch-2* ]] || [[ "${BRANCH}" = branch-1* ]]; then
813 echo "Attempting to use run an instance on top of Hadoop 2."
814 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
815 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
816 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
818 --working-dir output-integration/hadoop-2 \
819 --hbase-client-install "hbase-client" \
821 "hadoop-2/bin/hadoop" \
822 hadoop-2/share/hadoop/yarn/timelineservice \
823 hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
824 hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
825 hadoop-2/bin/mapred \
826 >output-integration/hadoop-2.log 2>&1 ; then
827 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
831 echo "Skipping to run against Hadoop 2 for branch ${BRANCH}"
836 if [[ "${BRANCH}" = branch-1* ]]; then
837 echo "Skipping to run against Hadoop 3 for branch ${BRANCH}"
839 echo "Attempting to use run an instance on top of Hadoop 3."
840 artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
841 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
842 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
844 --working-dir output-integration/hadoop-3 \
845 --hbase-client-install hbase-client \
847 hadoop-3/bin/hadoop \
848 hadoop-3/share/hadoop/yarn/timelineservice \
849 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
850 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
851 hadoop-3/bin/mapred \
852 >output-integration/hadoop-3.log 2>&1 ; then
853 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
856 echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
857 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
859 --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
860 --working-dir output-integration/hadoop-3-shaded \
861 --hbase-client-install hbase-client \
863 hadoop-3/bin/hadoop \
864 hadoop-3/share/hadoop/yarn/timelineservice \
865 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
866 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
867 hadoop-3/bin/mapred \
868 >output-integration/hadoop-3-shaded.log 2>&1 ; then
869 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
872 echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
878 stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
879 sshPublisher(publishers: [
880 sshPublisherDesc(configName: 'Nightlies',
882 sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
883 sourceFiles: "output-srctarball/hbase-src.tar.gz"
888 // remove the big src tarball, store the nightlies url in hbase-src.html
890 SRC_TAR="${WORKSPACE}/output-srctarball/hbase-src.tar.gz"
891 if [ -f "${SRC_TAR}" ]; then
892 echo "Remove ${SRC_TAR} for saving space"
894 python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/output-srctarball" > "${WORKSPACE}/output-srctarball/hbase-src.html"
896 echo "No hbase-src.tar.gz, skipping"
899 archiveArtifacts artifacts: 'output-srctarball/*'
900 archiveArtifacts artifacts: 'output-srctarball/**/*'
901 archiveArtifacts artifacts: 'output-integration/*'
902 archiveArtifacts artifacts: 'output-integration/**/*'
913 unstash 'general-result'
914 unstash 'jdk7-result'
915 unstash 'jdk8-hadoop2-result'
916 unstash 'jdk8-hadoop3-result'
917 unstash 'jdk11-hadoop3-result'
918 unstash 'srctarball-result'
920 def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
921 "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
922 "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
923 "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
924 "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
925 'output-srctarball/commentfile',
926 'output-integration/commentfile']
929 echo currentBuild.result
930 echo currentBuild.durationString
931 def comment = "Results for branch ${env.BRANCH_NAME}\n"
932 comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
933 if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
934 comment += "(/) *{color:green}+1 overall{color}*\n"
936 comment += "(x) *{color:red}-1 overall{color}*\n"
937 // Ideally get the committer our of the change and @ mention them in the per-jira comment
939 comment += "----\ndetails (if available):\n\n"
941 echo "[DEBUG] trying to aggregate step-wise results"
942 comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
943 echo "[INFO] Comment:"
946 echo "[DEBUG] checking to see if feature branch"
947 def jiras = getJirasToComment(env.BRANCH_NAME, [])
948 if (jiras.isEmpty()) {
949 echo "[DEBUG] non-feature branch, checking change messages for jira keys."
950 echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
951 jiras = getJirasToCommentFromChangesets(currentBuild)
953 jiras.each { currentIssue ->
954 jiraComment issueKey: currentIssue, body: comment
956 } catch (Exception exception) {
957 echo "Got exception: ${exception}"
958 echo " ${exception.getStackTrace()}"
964 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
966 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
968 thisBuild.changeSets.each { cs ->
969 cs.getItems().each { change ->
970 CharSequence msg = change.msg
971 echo "change: ${change}"
973 echo " ${change.commitId}"
974 echo " ${change.author}"
976 seenJiras = getJirasToComment(msg, seenJiras)
982 List<String> getJirasToComment(CharSequence source, List<String> seen) {
983 source.eachMatch("HBASE-[0-9]+") { currentIssue ->
984 echo "[DEBUG] found jira key: ${currentIssue}"
985 if (currentIssue in seen) {
986 echo "[DEBUG] already commented on ${currentIssue}."
988 echo "[INFO] commenting on ${currentIssue}."