1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements. See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership. The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License. You may obtain a copy of the License at
9 // http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied. See the License for the
15 // specific language governing permissions and limitations
27 buildDiscarder(logRotator(numToKeepStr: '15'))
28 timeout (time: 16, unit: 'HOURS')
31 disableConcurrentBuilds()
34 YETUS_RELEASE = '0.12.0'
35 // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
36 OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
37 OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
38 OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
39 OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
40 OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
43 PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
44 PERSONALITY_FILE = 'tools/personality.sh'
45 // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
46 AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
47 WHITESPACE_IGNORE_LIST = '.*/generated/.*'
48 // output from surefire; sadly the archive function in yetus only works on file names.
49 ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
50 // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
51 TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
52 EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/output/excludes"
53 // TODO does hadoopcheck need to be jdk specific?
54 SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
55 DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
58 booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
60 Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
61 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
64 stage ('scm-checkout') {
71 stage ('thirdparty installs') {
73 stage ('yetus install') {
75 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
76 dir('downloads-yetus') {
77 // can't just do a simple echo or the directory won't be created. :(
78 sh '''#!/usr/bin/env bash
79 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
82 sh '''#!/usr/bin/env bash
84 echo "Ensure we have a copy of Apache Yetus."
85 if [[ true != "${USE_YETUS_PRERELEASE}" ]]; then
86 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
87 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
88 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
90 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
91 --working-dir "${WORKSPACE}/downloads-yetus" \
92 --keys 'https://www.apache.org/dist/yetus/KEYS' \
93 "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
94 "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
95 mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
97 echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
100 YETUS_DIR="${WORKSPACE}/yetus-git"
101 rm -rf "${YETUS_DIR}"
102 echo "downloading from github"
103 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
105 if [ ! -d "${YETUS_DIR}" ]; then
106 echo "unpacking yetus into '${YETUS_DIR}'"
107 mkdir -p "${YETUS_DIR}"
108 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
111 // Set up the file we need at PERSONALITY_FILE location
113 sh """#!/usr/bin/env bash
115 echo "Downloading Project personality from ${env.PROJECT_PERSONALITY}"
116 curl -L -o personality.sh "${env.PROJECT_PERSONALITY}"
119 stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
122 stage ('hadoop 2 cache') {
124 HADOOP2_VERSION="2.10.0"
127 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
128 dir('downloads-hadoop-2') {
129 sh '''#!/usr/bin/env bash
130 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
133 sh '''#!/usr/bin/env bash
135 echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
136 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
137 --working-dir "${WORKSPACE}/downloads-hadoop-2" \
138 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
139 "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
140 "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
141 for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
142 echo "Delete stale hadoop 2 cache ${stale}"
146 stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
149 stage ('hadoop 3 cache') {
151 HADOOP3_VERSION="3.1.1"
154 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
155 dir('downloads-hadoop-3') {
156 sh '''#!/usr/bin/env bash
157 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
160 sh '''#!/usr/bin/env bash
162 echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
163 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
164 --working-dir "${WORKSPACE}/downloads-hadoop-3" \
165 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
166 "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
167 "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
168 for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
169 echo "Delete stale hadoop 3 cache ${stale}"
173 stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
178 stage ('init health results') {
180 // stash with given name for all tests we might run, so that we can unstash all of them even if
181 // we skip some due to e.g. branch-specific JDK or Hadoop support
182 stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
183 stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
184 stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
185 stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
186 stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
187 stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
190 stage ('health checks') {
192 stage ('yetus general check') {
199 BASEDIR = "${env.WORKSPACE}/component"
200 TESTS = "${env.SHALLOW_CHECKS}"
201 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
202 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
203 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
206 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
207 sh '''#!/usr/bin/env bash
209 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
210 echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
211 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
214 // since we have a new node definition we need to re-do the scm checkout
218 sh '''#!/usr/bin/env bash
220 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
221 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
222 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
223 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
225 // TODO roll this into the hbase_nightly_yetus script
226 sh '''#!/usr/bin/env bash
229 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
230 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
232 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
235 echo "-- For more information [see general report|${BUILD_URL}General_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
241 stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
242 // Has to be relative to WORKSPACE.
243 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
244 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
245 publishHTML target: [
248 alwaysLinkToLastBuild: true,
249 // Has to be relative to WORKSPACE
250 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
251 reportFiles: 'console-report.html',
252 reportName: 'General Nightly Build Report'
257 stage ('yetus jdk7 checks') {
267 BASEDIR = "${env.WORKSPACE}/component"
268 TESTS = "${env.DEEP_CHECKS}"
269 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
270 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
271 SET_JAVA_HOME = "/usr/lib/jvm/java-7"
274 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
275 sh '''#!/usr/bin/env bash
277 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
278 echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
279 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
285 sh '''#!/usr/bin/env bash
287 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
288 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
289 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
290 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
292 sh '''#!/usr/bin/env bash
295 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
296 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
298 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
301 echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
307 stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
308 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
309 // zip surefire reports.
311 if [ -d "${OUTPUT_DIR}/archiver" ]; then
312 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
313 if [[ 0 -ne ${count} ]]; then
314 echo "zipping ${count} archived files"
315 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
317 echo "No archived files, skipping compressing."
320 echo "No archiver directory, skipping compressing."
323 // Has to be relative to WORKSPACE.
324 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
325 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
326 publishHTML target: [
329 alwaysLinkToLastBuild: true,
330 // Has to be relative to WORKSPACE.
331 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
332 reportFiles : 'console-report.html',
333 reportName : 'JDK7 Nightly Build Report'
338 stage ('yetus jdk8 hadoop2 checks') {
345 anyOf { branch 'branch-1*'; branch 'branch-2*' }
348 BASEDIR = "${env.WORKSPACE}/component"
349 TESTS = "${env.DEEP_CHECKS}"
350 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
351 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
352 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
355 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
356 sh '''#!/usr/bin/env bash
358 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
359 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
360 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
366 sh '''#!/usr/bin/env bash
368 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
369 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
370 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
371 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
373 sh '''#!/usr/bin/env bash
376 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
377 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
379 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
382 echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop2_29/]" >> "${OUTPUT_DIR}/commentfile"
388 stash name: 'jdk8-hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
389 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
390 // zip surefire reports.
392 if [ -d "${OUTPUT_DIR}/archiver" ]; then
393 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
394 if [[ 0 -ne ${count} ]]; then
395 echo "zipping ${count} archived files"
396 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
398 echo "No archived files, skipping compressing."
401 echo "No archiver directory, skipping compressing."
404 // Has to be relative to WORKSPACE.
405 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
406 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
407 publishHTML target: [
410 alwaysLinkToLastBuild: true,
411 // Has to be relative to WORKSPACE.
412 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
413 reportFiles : 'console-report.html',
414 reportName : 'JDK8 Nightly Build Report (Hadoop2)'
419 stage ('yetus jdk8 hadoop3 checks') {
431 BASEDIR = "${env.WORKSPACE}/component"
432 TESTS = "${env.DEEP_CHECKS}"
433 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
434 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
435 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
436 // Activates hadoop 3.0 profile in maven runs.
437 HADOOP_PROFILE = '3.0'
440 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
441 sh '''#!/usr/bin/env bash
443 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
444 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
445 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
451 sh '''#!/usr/bin/env bash
453 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
454 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
455 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
456 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
458 sh '''#!/usr/bin/env bash
461 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
462 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
464 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
467 echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
473 stash name: 'jdk8-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
474 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
475 // zip surefire reports.
477 if [ -d "${OUTPUT_DIR}/archiver" ]; then
478 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
479 if [[ 0 -ne ${count} ]]; then
480 echo "zipping ${count} archived files"
481 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
483 echo "No archived files, skipping compressing."
486 echo "No archiver directory, skipping compressing."
489 // Has to be relative to WORKSPACE.
490 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
491 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
492 publishHTML target: [
495 alwaysLinkToLastBuild: true,
496 // Has to be relative to WORKSPACE.
497 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
498 reportFiles : 'console-report.html',
499 reportName : 'JDK8 Nightly Build Report (Hadoop3)'
504 stage ('yetus jdk11 hadoop3 checks') {
516 BASEDIR = "${env.WORKSPACE}/component"
517 TESTS = "${env.DEEP_CHECKS}"
518 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
519 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
520 SET_JAVA_HOME = "/usr/lib/jvm/java-11"
521 // Activates hadoop 3.0 profile in maven runs.
522 HADOOP_PROFILE = '3.0'
523 // ErrorProne is broken on JDK11, see HBASE-23894
524 SKIP_ERROR_PRONE = 'true'
527 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
528 sh '''#!/usr/bin/env bash
530 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
531 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
532 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
538 sh '''#!/usr/bin/env bash
540 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
541 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
542 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
543 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
545 sh '''#!/usr/bin/env bash
548 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
549 echo '(/) {color:green}+1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
551 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
554 echo "-- For more information [see jdk11 report|${BUILD_URL}JDK11_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
560 stash name: 'jdk11-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
561 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
562 // zip surefire reports.
564 if [ -d "${OUTPUT_DIR}/archiver" ]; then
565 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
566 if [[ 0 -ne ${count} ]]; then
567 echo "zipping ${count} archived files"
568 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
570 echo "No archived files, skipping compressing."
573 echo "No archiver directory, skipping compressing."
576 // Has to be relative to WORKSPACE.
577 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
578 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
579 publishHTML target: [
582 alwaysLinkToLastBuild: true,
583 // Has to be relative to WORKSPACE.
584 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
585 reportFiles : 'console-report.html',
586 reportName : 'JDK11 Nightly Build Report (Hadoop3)'
591 // This is meant to mimic what a release manager will do to create RCs.
592 // See http://hbase.apache.org/book.html#maven.release
593 // TODO (HBASE-23870): replace this with invocation of the release tool
594 stage ('packaging and integration') {
597 // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
601 BASEDIR = "${env.WORKSPACE}/component"
602 BRANCH = "${env.BRANCH_NAME}"
606 echo "Setting up directories"
607 rm -rf "output-srctarball" && mkdir "output-srctarball"
608 rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
609 rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
610 rm -rf "hbase-install" && mkdir "hbase-install"
611 rm -rf "hbase-client" && mkdir "hbase-client"
612 rm -rf "hadoop-2" && mkdir "hadoop-2"
613 rm -rf "hadoop-3" && mkdir "hadoop-3"
614 rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
615 rm -rf ".m2-for-src" && mkdir ".m2-for-src"
616 echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
617 echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
619 sh '''#!/usr/bin/env bash
621 rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
622 "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
623 echo "got the following saved stats in 'output-srctarball/machine'"
624 ls -lh "output-srctarball/machine"
627 echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
628 if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
629 --intermediate-file-dir output-srctarball \
630 --unpack-temp-dir unpacked_src_tarball \
631 --maven-m2-initial .m2-for-repo \
632 --maven-m2-src-build .m2-for-src \
633 --clean-source-checkout \
634 "${env.BASEDIR}" ; then
635 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
637 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
641 echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
643 if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
644 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
647 install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
648 tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
649 client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
650 tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
653 sh '''#!/bin/bash -xe
654 if [[ "${BRANCH}" = branch-2* ]] || [[ "${BRANCH}" = branch-1* ]]; then
655 echo "Attempting to use run an instance on top of Hadoop 2."
656 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
657 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
658 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
660 --working-dir output-integration/hadoop-2 \
661 --hbase-client-install "hbase-client" \
663 "hadoop-2/bin/hadoop" \
664 hadoop-2/share/hadoop/yarn/timelineservice \
665 hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
666 hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
667 hadoop-2/bin/mapred \
668 >output-integration/hadoop-2.log 2>&1 ; then
669 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
673 echo "Skipping to run against Hadoop 2 for branch ${BRANCH}"
678 if [[ "${BRANCH}" = branch-1* ]]; then
679 echo "Skipping to run against Hadoop 3 for branch ${BRANCH}"
681 echo "Attempting to use run an instance on top of Hadoop 3."
682 artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
683 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
684 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
686 --working-dir output-integration/hadoop-3 \
687 --hbase-client-install hbase-client \
689 hadoop-3/bin/hadoop \
690 hadoop-3/share/hadoop/yarn/timelineservice \
691 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
692 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
693 hadoop-3/bin/mapred \
694 >output-integration/hadoop-3.log 2>&1 ; then
695 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
698 echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
699 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
701 --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
702 --working-dir output-integration/hadoop-3-shaded \
703 --hbase-client-install hbase-client \
705 hadoop-3/bin/hadoop \
706 hadoop-3/share/hadoop/yarn/timelineservice \
707 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
708 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
709 hadoop-3/bin/mapred \
710 >output-integration/hadoop-3-shaded.log 2>&1 ; then
711 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
714 echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
722 stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
723 archiveArtifacts artifacts: 'output-srctarball/*'
724 archiveArtifacts artifacts: 'output-srctarball/**/*'
725 archiveArtifacts artifacts: 'output-integration/*'
726 archiveArtifacts artifacts: 'output-integration/**/*'
737 unstash 'general-result'
738 unstash 'jdk7-result'
739 unstash 'jdk8-hadoop2-result'
740 unstash 'jdk8-hadoop3-result'
741 unstash 'jdk11-hadoop3-result'
742 unstash 'srctarball-result'
744 def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
745 "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
746 "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
747 "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
748 "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
749 'output-srctarball/commentfile',
750 'output-integration/commentfile']
753 echo currentBuild.result
754 echo currentBuild.durationString
755 def comment = "Results for branch ${env.BRANCH_NAME}\n"
756 comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
757 if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
758 comment += "(/) *{color:green}+1 overall{color}*\n"
760 comment += "(x) *{color:red}-1 overall{color}*\n"
761 // Ideally get the committer our of the change and @ mention them in the per-jira comment
763 comment += "----\ndetails (if available):\n\n"
765 echo "[DEBUG] trying to aggregate step-wise results"
766 comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
767 echo "[INFO] Comment:"
770 echo "[DEBUG] checking to see if feature branch"
771 def jiras = getJirasToComment(env.BRANCH_NAME, [])
772 if (jiras.isEmpty()) {
773 echo "[DEBUG] non-feature branch, checking change messages for jira keys."
774 echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
775 jiras = getJirasToCommentFromChangesets(currentBuild)
777 jiras.each { currentIssue ->
778 jiraComment issueKey: currentIssue, body: comment
780 } catch (Exception exception) {
781 echo "Got exception: ${exception}"
782 echo " ${exception.getStackTrace()}"
788 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
790 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
792 thisBuild.changeSets.each { cs ->
793 cs.getItems().each { change ->
794 CharSequence msg = change.msg
795 echo "change: ${change}"
797 echo " ${change.commitId}"
798 echo " ${change.author}"
800 seenJiras = getJirasToComment(msg, seenJiras)
806 List<String> getJirasToComment(CharSequence source, List<String> seen) {
807 source.eachMatch("HBASE-[0-9]+") { currentIssue ->
808 echo "[DEBUG] found jira key: ${currentIssue}"
809 if (currentIssue in seen) {
810 echo "[DEBUG] already commented on ${currentIssue}."
812 echo "[INFO] commenting on ${currentIssue}."