1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements. See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership. The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License. You may obtain a copy of the License at
9 // http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied. See the License for the
15 // specific language governing permissions and limitations
27 buildDiscarder(logRotator(numToKeepStr: '15'))
28 timeout (time: 9, unit: 'HOURS')
31 disableConcurrentBuilds()
34 YETUS_RELEASE = '0.11.1'
35 // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
36 OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
37 OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
38 OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
39 OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
40 OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
43 PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
44 PERSONALITY_FILE = 'tools/personality.sh'
45 // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
46 AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
47 WHITESPACE_IGNORE_LIST = '.*/generated/.*'
48 // output from surefire; sadly the archive function in yetus only works on file names.
49 ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
50 // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
51 TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
52 EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/excludes"
53 // TODO does hadoopcheck need to be jdk specific?
54 SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
55 DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
58 booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
60 Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
61 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
64 stage ('scm-checkout') {
71 stage ('thirdparty installs') {
73 stage ('yetus install') {
75 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
76 dir('downloads-yetus') {
77 // can't just do a simple echo or the directory won't be created. :(
78 sh '''#!/usr/bin/env bash
79 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
82 sh '''#!/usr/bin/env bash
84 echo "Ensure we have a copy of Apache Yetus."
85 if [[ true != "${USE_YETUS_PRERELEASE}" ]]; then
86 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
87 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
88 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
90 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
91 --working-dir "${WORKSPACE}/downloads-yetus" \
92 --keys 'https://www.apache.org/dist/yetus/KEYS' \
93 "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
94 "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
95 mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
97 echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
100 YETUS_DIR="${WORKSPACE}/yetus-git"
101 rm -rf "${YETUS_DIR}"
102 echo "downloading from github"
103 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
105 if [ ! -d "${YETUS_DIR}" ]; then
106 echo "unpacking yetus into '${YETUS_DIR}'"
107 mkdir -p "${YETUS_DIR}"
108 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
111 // Set up the file we need at PERSONALITY_FILE location
113 sh """#!/usr/bin/env bash
115 echo "Downloading Project personality from ${env.PROJECT_PERSONALITY}"
116 curl -L -o personality.sh "${env.PROJECT_PERSONALITY}"
119 stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
122 stage ('hadoop 2 cache') {
124 HADOOP2_VERSION="2.8.5"
127 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
128 dir('downloads-hadoop-2') {
129 sh '''#!/usr/bin/env bash
130 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
133 sh '''#!/usr/bin/env bash
135 echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
136 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
137 --working-dir "${WORKSPACE}/downloads-hadoop-2" \
138 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
139 "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
140 "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
141 for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
142 echo "Delete stale hadoop 2 cache ${stale}"
146 stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
149 stage ('hadoop 3 cache') {
151 HADOOP3_VERSION="3.1.1"
154 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
155 dir('downloads-hadoop-3') {
156 sh '''#!/usr/bin/env bash
157 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
160 sh '''#!/usr/bin/env bash
162 echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
163 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
164 --working-dir "${WORKSPACE}/downloads-hadoop-3" \
165 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
166 "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
167 "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
168 for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
169 echo "Delete stale hadoop 3 cache ${stale}"
173 stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
178 stage ('init health results') {
180 // stash with given name for all tests we might run, so that we can unstash all of them even if
181 // we skip some due to e.g. branch-specific JDK or Hadoop support
182 stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
183 stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
184 stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
185 stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
186 stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
187 stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
190 stage ('health checks') {
192 stage ('yetus general check') {
199 BASEDIR = "${env.WORKSPACE}/component"
200 TESTS = "${env.SHALLOW_CHECKS}"
201 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
202 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
203 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
206 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
207 sh '''#!/usr/bin/env bash
209 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
210 echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
211 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
214 // since we have a new node definition we need to re-do the scm checkout
218 sh '''#!/usr/bin/env bash
220 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
221 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
222 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
223 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
225 // TODO roll this into the hbase_nightly_yetus script
226 sh '''#!/usr/bin/env bash
229 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
230 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
232 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
235 echo "-- For more information [see general report|${BUILD_URL}/General_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
241 stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
242 // Has to be relative to WORKSPACE.
243 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
244 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
245 publishHTML target: [
248 alwaysLinkToLastBuild: true,
249 // Has to be relative to WORKSPACE
250 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
251 reportFiles: 'console-report.html',
252 reportName: 'General Nightly Build Report'
257 stage ('yetus jdk7 checks') {
267 BASEDIR = "${env.WORKSPACE}/component"
268 TESTS = "${env.DEEP_CHECKS}"
269 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
270 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
271 SET_JAVA_HOME = "/usr/lib/jvm/java-7"
274 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
275 sh '''#!/usr/bin/env bash
277 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
278 echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
279 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
285 sh '''#!/usr/bin/env bash
287 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
288 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
289 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
290 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
292 sh '''#!/usr/bin/env bash
295 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
296 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
298 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
301 echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
307 stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
308 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
309 // zip surefire reports.
311 if [ -d "${OUTPUT_DIR}/archiver" ]; then
312 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
313 if [[ 0 -ne ${count} ]]; then
314 echo "zipping ${count} archived files"
315 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
317 echo "No archived files, skipping compressing."
320 echo "No archiver directory, skipping compressing."
323 // Has to be relative to WORKSPACE.
324 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
325 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
326 publishHTML target: [
329 alwaysLinkToLastBuild: true,
330 // Has to be relative to WORKSPACE.
331 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
332 reportFiles : 'console-report.html',
333 reportName : 'JDK7 Nightly Build Report'
338 stage ('yetus jdk8 hadoop2 checks') {
345 BASEDIR = "${env.WORKSPACE}/component"
346 TESTS = "${env.DEEP_CHECKS}"
347 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
348 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
349 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
352 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
353 sh '''#!/usr/bin/env bash
355 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
356 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
357 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
363 sh '''#!/usr/bin/env bash
365 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
366 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
367 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
368 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
370 sh '''#!/usr/bin/env bash
373 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
374 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
376 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
379 echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop2)/]" >> "${OUTPUT_DIR}/commentfile"
385 stash name: 'jdk8-hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
386 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
387 // zip surefire reports.
389 if [ -d "${OUTPUT_DIR}/archiver" ]; then
390 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
391 if [[ 0 -ne ${count} ]]; then
392 echo "zipping ${count} archived files"
393 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
395 echo "No archived files, skipping compressing."
398 echo "No archiver directory, skipping compressing."
401 // Has to be relative to WORKSPACE.
402 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
403 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
404 publishHTML target: [
407 alwaysLinkToLastBuild: true,
408 // Has to be relative to WORKSPACE.
409 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
410 reportFiles : 'console-report.html',
411 reportName : 'JDK8 Nightly Build Report (Hadoop2)'
416 stage ('yetus jdk8 hadoop3 checks') {
428 BASEDIR = "${env.WORKSPACE}/component"
429 TESTS = "${env.DEEP_CHECKS}"
430 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
431 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
432 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
433 // Activates hadoop 3.0 profile in maven runs.
434 HADOOP_PROFILE = '3.0'
437 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
438 sh '''#!/usr/bin/env bash
440 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
441 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
442 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
448 sh '''#!/usr/bin/env bash
450 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
451 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
452 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
453 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
455 sh '''#!/usr/bin/env bash
458 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
459 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
461 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
464 echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop3)/]" >> "${OUTPUT_DIR}/commentfile"
470 stash name: 'jdk8-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
471 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
472 // zip surefire reports.
474 if [ -d "${OUTPUT_DIR}/archiver" ]; then
475 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
476 if [[ 0 -ne ${count} ]]; then
477 echo "zipping ${count} archived files"
478 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
480 echo "No archived files, skipping compressing."
483 echo "No archiver directory, skipping compressing."
486 // Has to be relative to WORKSPACE.
487 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
488 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
489 publishHTML target: [
492 alwaysLinkToLastBuild: true,
493 // Has to be relative to WORKSPACE.
494 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
495 reportFiles : 'console-report.html',
496 reportName : 'JDK8 Nightly Build Report (Hadoop3)'
501 stage ('yetus jdk11 hadoop3 checks') {
513 BASEDIR = "${env.WORKSPACE}/component"
514 TESTS = "${env.DEEP_CHECKS}"
515 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
516 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
517 SET_JAVA_HOME = "/usr/lib/jvm/java-11"
518 // Activates hadoop 3.0 profile in maven runs.
519 HADOOP_PROFILE = '3.0'
520 // ErrorProne is broken on JDK11, see HBASE-23894
521 SKIP_ERROR_PRONE = 'true'
524 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
525 sh '''#!/usr/bin/env bash
527 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
528 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
529 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
535 sh '''#!/usr/bin/env bash
537 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
538 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
539 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
540 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
542 sh '''#!/usr/bin/env bash
545 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
546 echo '(/) {color:green}+1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
548 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
551 echo "-- For more information [see jdk11 report|${BUILD_URL}/JDK11_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
557 stash name: 'jdk11-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
558 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
559 // zip surefire reports.
561 if [ -d "${OUTPUT_DIR}/archiver" ]; then
562 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
563 if [[ 0 -ne ${count} ]]; then
564 echo "zipping ${count} archived files"
565 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
567 echo "No archived files, skipping compressing."
570 echo "No archiver directory, skipping compressing."
573 // Has to be relative to WORKSPACE.
574 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
575 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
576 publishHTML target: [
579 alwaysLinkToLastBuild: true,
580 // Has to be relative to WORKSPACE.
581 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
582 reportFiles : 'console-report.html',
583 reportName : 'JDK11 Nightly Build Report'
588 // This is meant to mimic what a release manager will do to create RCs.
589 // See http://hbase.apache.org/book.html#maven.release
590 // TODO (HBASE-23870): replace this with invocation of the release tool
591 stage ('packaging and integration') {
593 maven 'Maven (latest)'
594 // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
595 jdk "JDK 1.8 (latest)"
598 BASEDIR = "${env.WORKSPACE}/component"
602 echo "Setting up directories"
603 rm -rf "output-srctarball" && mkdir "output-srctarball"
604 rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
605 rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
606 rm -rf "hbase-install" && mkdir "hbase-install"
607 rm -rf "hbase-client" && mkdir "hbase-client"
608 rm -rf "hadoop-2" && mkdir "hadoop-2"
609 rm -rf "hadoop-3" && mkdir "hadoop-3"
610 rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
611 rm -rf ".m2-for-src" && mkdir ".m2-for-src"
612 echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
613 echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
615 sh '''#!/usr/bin/env bash
617 rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
618 "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
619 echo "got the following saved stats in 'output-srctarball/machine'"
620 ls -lh "output-srctarball/machine"
623 echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
624 if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
625 --intermediate-file-dir output-srctarball \
626 --unpack-temp-dir unpacked_src_tarball \
627 --maven-m2-initial .m2-for-repo \
628 --maven-m2-src-build .m2-for-src \
629 --clean-source-checkout \
630 "${env.BASEDIR}" ; then
631 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
633 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
637 echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
639 if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
640 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
643 install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
644 tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
645 client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
646 tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
649 echo "Attempting to use run an instance on top of Hadoop 2."
650 sh '''#!/bin/bash -xe
651 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
652 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
653 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
655 --working-dir output-integration/hadoop-2 \
656 --hbase-client-install "hbase-client" \
658 "hadoop-2/bin/hadoop" \
659 hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
660 hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
661 hadoop-2/bin/mapred \
662 >output-integration/hadoop-2.log 2>&1 ; then
663 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
668 echo "Attempting to use run an instance on top of Hadoop 3."
670 artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
671 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
672 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
674 --working-dir output-integration/hadoop-3 \
675 --hbase-client-install hbase-client \
677 hadoop-3/bin/hadoop \
678 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
679 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
680 hadoop-3/bin/mapred \
681 >output-integration/hadoop-3.log 2>&1 ; then
682 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
685 echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
686 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
688 --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
689 --working-dir output-integration/hadoop-3-shaded \
690 --hbase-client-install hbase-client \
692 hadoop-3/bin/hadoop \
693 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
694 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
695 hadoop-3/bin/mapred \
696 >output-integration/hadoop-3-shaded.log 2>&1 ; then
697 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
700 echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
707 stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
708 archiveArtifacts artifacts: 'output-srctarball/*'
709 archiveArtifacts artifacts: 'output-srctarball/**/*'
710 archiveArtifacts artifacts: 'output-integration/*'
711 archiveArtifacts artifacts: 'output-integration/**/*'
722 unstash 'general-result'
723 unstash 'jdk7-result'
724 unstash 'jdk8-hadoop2-result'
725 unstash 'jdk8-hadoop3-result'
726 unstash 'jdk11-hadoop3-result'
727 unstash 'srctarball-result'
729 def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
730 "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
731 "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
732 "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
733 "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
734 'output-srctarball/commentfile',
735 'output-integration/commentfile']
738 echo currentBuild.result
739 echo currentBuild.durationString
740 def comment = "Results for branch ${env.BRANCH_NAME}\n"
741 comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
742 if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
743 comment += "(/) *{color:green}+1 overall{color}*\n"
745 comment += "(x) *{color:red}-1 overall{color}*\n"
746 // Ideally get the committer our of the change and @ mention them in the per-jira comment
748 comment += "----\ndetails (if available):\n\n"
750 echo "[DEBUG] trying to aggregate step-wise results"
751 comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
752 echo "[INFO] Comment:"
755 echo "[DEBUG] checking to see if feature branch"
756 def jiras = getJirasToComment(env.BRANCH_NAME, [])
757 if (jiras.isEmpty()) {
758 echo "[DEBUG] non-feature branch, checking change messages for jira keys."
759 echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
760 jiras = getJirasToCommentFromChangesets(currentBuild)
762 jiras.each { currentIssue ->
763 jiraComment issueKey: currentIssue, body: comment
765 } catch (Exception exception) {
766 echo "Got exception: ${exception}"
767 echo " ${exception.getStackTrace()}"
773 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
775 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
777 thisBuild.changeSets.each { cs ->
778 cs.getItems().each { change ->
779 CharSequence msg = change.msg
780 echo "change: ${change}"
782 echo " ${change.commitId}"
783 echo " ${change.author}"
785 seenJiras = getJirasToComment(msg, seenJiras)
791 List<String> getJirasToComment(CharSequence source, List<String> seen) {
792 source.eachMatch("HBASE-[0-9]+") { currentIssue ->
793 echo "[DEBUG] found jira key: ${currentIssue}"
794 if (currentIssue in seen) {
795 echo "[DEBUG] already commented on ${currentIssue}."
797 echo "[INFO] commenting on ${currentIssue}."