1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements. See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership. The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License. You may obtain a copy of the License at
9 // http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied. See the License for the
15 // specific language governing permissions and limitations
27 buildDiscarder(logRotator(numToKeepStr: '15'))
28 timeout (time: 9, unit: 'HOURS')
31 disableConcurrentBuilds()
34 YETUS_RELEASE = '0.11.1'
35 // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
36 OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
37 OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
38 OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
39 OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
40 OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
43 PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
44 PERSONALITY_FILE = 'tools/personality.sh'
45 // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
46 AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
47 WHITESPACE_IGNORE_LIST = '.*/generated/.*'
48 // output from surefire; sadly the archive function in yetus only works on file names.
49 ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
50 // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
51 TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
52 EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/excludes"
53 // TODO does hadoopcheck need to be jdk specific?
54 SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
55 DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
58 booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
60 Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
61 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
64 stage ('scm-checkout') {
71 stage ('thirdparty installs') {
73 stage ('yetus install') {
75 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
76 dir('downloads-yetus') {
77 // can't just do a simple echo or the directory won't be created. :(
78 sh '''#!/usr/bin/env bash
79 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
82 sh '''#!/usr/bin/env bash
84 echo "Ensure we have a copy of Apache Yetus."
85 if [[ true != "${USE_YETUS_PRERELEASE}" ]]; then
86 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
87 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
88 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
90 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
91 --working-dir "${WORKSPACE}/downloads-yetus" \
92 --keys 'https://www.apache.org/dist/yetus/KEYS' \
93 "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
94 "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
95 mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
97 echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
100 YETUS_DIR="${WORKSPACE}/yetus-git"
101 rm -rf "${YETUS_DIR}"
102 echo "downloading from github"
103 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
105 if [ ! -d "${YETUS_DIR}" ]; then
106 echo "unpacking yetus into '${YETUS_DIR}'"
107 mkdir -p "${YETUS_DIR}"
108 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
111 // Set up the file we need at PERSONALITY_FILE location
113 sh """#!/usr/bin/env bash
115 echo "Downloading Project personality from ${env.PROJECT_PERSONALITY}"
116 curl -L -o personality.sh "${env.PROJECT_PERSONALITY}"
119 stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
122 stage ('hadoop 2 cache') {
124 HADOOP2_VERSION="2.10.0"
127 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
128 dir('downloads-hadoop-2') {
129 sh '''#!/usr/bin/env bash
130 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
133 sh '''#!/usr/bin/env bash
135 echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
136 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
137 --working-dir "${WORKSPACE}/downloads-hadoop-2" \
138 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
139 "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
140 "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
141 for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
142 echo "Delete stale hadoop 2 cache ${stale}"
146 stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
149 stage ('hadoop 3 cache') {
151 HADOOP3_VERSION="3.1.1"
154 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
155 dir('downloads-hadoop-3') {
156 sh '''#!/usr/bin/env bash
157 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
160 sh '''#!/usr/bin/env bash
162 echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
163 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
164 --working-dir "${WORKSPACE}/downloads-hadoop-3" \
165 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
166 "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
167 "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
168 for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
169 echo "Delete stale hadoop 3 cache ${stale}"
173 stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
178 stage ('init health results') {
180 // stash with given name for all tests we might run, so that we can unstash all of them even if
181 // we skip some due to e.g. branch-specific JDK or Hadoop support
182 stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
183 stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
184 stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
185 stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
186 stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
187 stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
190 stage ('health checks') {
192 stage ('yetus general check') {
199 BASEDIR = "${env.WORKSPACE}/component"
200 TESTS = "${env.SHALLOW_CHECKS}"
201 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
202 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
203 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
206 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
207 sh '''#!/usr/bin/env bash
209 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
210 echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
211 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
214 // since we have a new node definition we need to re-do the scm checkout
218 sh '''#!/usr/bin/env bash
220 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
221 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
222 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
223 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
225 // TODO roll this into the hbase_nightly_yetus script
226 sh '''#!/usr/bin/env bash
229 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
230 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
232 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
235 echo "-- For more information [see general report|${BUILD_URL}General_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
241 stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
242 // Has to be relative to WORKSPACE.
243 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
244 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
245 publishHTML target: [
248 alwaysLinkToLastBuild: true,
249 // Has to be relative to WORKSPACE
250 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
251 reportFiles: 'console-report.html',
252 reportName: 'General Nightly Build Report'
257 stage ('yetus jdk7 checks') {
267 BASEDIR = "${env.WORKSPACE}/component"
268 TESTS = "${env.DEEP_CHECKS}"
269 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
270 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
271 SET_JAVA_HOME = "/usr/lib/jvm/java-7"
274 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
275 sh '''#!/usr/bin/env bash
277 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
278 echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
279 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
285 sh '''#!/usr/bin/env bash
287 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
288 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
289 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
290 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
292 sh '''#!/usr/bin/env bash
295 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
296 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
298 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
301 echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
307 stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
308 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
309 // zip surefire reports.
311 if [ -d "${OUTPUT_DIR}/archiver" ]; then
312 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
313 if [[ 0 -ne ${count} ]]; then
314 echo "zipping ${count} archived files"
315 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
317 echo "No archived files, skipping compressing."
320 echo "No archiver directory, skipping compressing."
323 // Has to be relative to WORKSPACE.
324 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
325 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
326 publishHTML target: [
329 alwaysLinkToLastBuild: true,
330 // Has to be relative to WORKSPACE.
331 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
332 reportFiles : 'console-report.html',
333 reportName : 'JDK7 Nightly Build Report'
338 stage ('yetus jdk8 hadoop2 checks') {
345 anyOf { branch 'branch-1*'; branch 'branch-2*' }
348 BASEDIR = "${env.WORKSPACE}/component"
349 TESTS = "${env.DEEP_CHECKS}"
350 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
351 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
352 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
355 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
356 sh '''#!/usr/bin/env bash
358 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
359 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
360 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
366 sh '''#!/usr/bin/env bash
368 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
369 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
370 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
371 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
373 sh '''#!/usr/bin/env bash
376 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
377 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
379 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
382 echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop2_29/]" >> "${OUTPUT_DIR}/commentfile"
388 stash name: 'jdk8-hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
389 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
390 // zip surefire reports.
392 if [ -d "${OUTPUT_DIR}/archiver" ]; then
393 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
394 if [[ 0 -ne ${count} ]]; then
395 echo "zipping ${count} archived files"
396 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
398 echo "No archived files, skipping compressing."
401 echo "No archiver directory, skipping compressing."
404 // Has to be relative to WORKSPACE.
405 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
406 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
407 publishHTML target: [
410 alwaysLinkToLastBuild: true,
411 // Has to be relative to WORKSPACE.
412 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
413 reportFiles : 'console-report.html',
414 reportName : 'JDK8 Nightly Build Report (Hadoop2)'
419 stage ('yetus jdk8 hadoop3 checks') {
431 BASEDIR = "${env.WORKSPACE}/component"
432 TESTS = "${env.DEEP_CHECKS}"
433 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
434 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
435 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
436 // Activates hadoop 3.0 profile in maven runs.
437 HADOOP_PROFILE = '3.0'
440 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
441 sh '''#!/usr/bin/env bash
443 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
444 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
445 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
451 sh '''#!/usr/bin/env bash
453 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
454 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
455 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
456 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
458 sh '''#!/usr/bin/env bash
461 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
462 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
464 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
467 echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
473 stash name: 'jdk8-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
474 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
475 // zip surefire reports.
477 if [ -d "${OUTPUT_DIR}/archiver" ]; then
478 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
479 if [[ 0 -ne ${count} ]]; then
480 echo "zipping ${count} archived files"
481 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
483 echo "No archived files, skipping compressing."
486 echo "No archiver directory, skipping compressing."
489 // Has to be relative to WORKSPACE.
490 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
491 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
492 publishHTML target: [
495 alwaysLinkToLastBuild: true,
496 // Has to be relative to WORKSPACE.
497 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
498 reportFiles : 'console-report.html',
499 reportName : 'JDK8 Nightly Build Report (Hadoop3)'
504 stage ('yetus jdk11 hadoop3 checks') {
516 BASEDIR = "${env.WORKSPACE}/component"
517 TESTS = "${env.DEEP_CHECKS}"
518 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
519 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
520 SET_JAVA_HOME = "/usr/lib/jvm/java-11"
521 // Activates hadoop 3.0 profile in maven runs.
522 HADOOP_PROFILE = '3.0'
523 // ErrorProne is broken on JDK11, see HBASE-23894
524 SKIP_ERROR_PRONE = 'true'
527 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
528 sh '''#!/usr/bin/env bash
530 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
531 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
532 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
538 sh '''#!/usr/bin/env bash
540 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
541 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
542 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
543 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
545 sh '''#!/usr/bin/env bash
548 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
549 echo '(/) {color:green}+1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
551 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
554 echo "-- For more information [see jdk11 report|${BUILD_URL}JDK11_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
560 stash name: 'jdk11-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
561 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
562 // zip surefire reports.
564 if [ -d "${OUTPUT_DIR}/archiver" ]; then
565 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
566 if [[ 0 -ne ${count} ]]; then
567 echo "zipping ${count} archived files"
568 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
570 echo "No archived files, skipping compressing."
573 echo "No archiver directory, skipping compressing."
576 // Has to be relative to WORKSPACE.
577 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
578 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
579 publishHTML target: [
582 alwaysLinkToLastBuild: true,
583 // Has to be relative to WORKSPACE.
584 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
585 reportFiles : 'console-report.html',
586 reportName : 'JDK11 Nightly Build Report (Hadoop3)'
591 // This is meant to mimic what a release manager will do to create RCs.
592 // See http://hbase.apache.org/book.html#maven.release
593 // TODO (HBASE-23870): replace this with invocation of the release tool
594 stage ('packaging and integration') {
596 maven 'Maven (latest)'
597 // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
598 jdk "JDK 1.8 (latest)"
601 BASEDIR = "${env.WORKSPACE}/component"
605 echo "Setting up directories"
606 rm -rf "output-srctarball" && mkdir "output-srctarball"
607 rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
608 rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
609 rm -rf "hbase-install" && mkdir "hbase-install"
610 rm -rf "hbase-client" && mkdir "hbase-client"
611 rm -rf "hadoop-2" && mkdir "hadoop-2"
612 rm -rf "hadoop-3" && mkdir "hadoop-3"
613 rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
614 rm -rf ".m2-for-src" && mkdir ".m2-for-src"
615 echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
616 echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
618 sh '''#!/usr/bin/env bash
620 rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
621 "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
622 echo "got the following saved stats in 'output-srctarball/machine'"
623 ls -lh "output-srctarball/machine"
626 echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
627 if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
628 --intermediate-file-dir output-srctarball \
629 --unpack-temp-dir unpacked_src_tarball \
630 --maven-m2-initial .m2-for-repo \
631 --maven-m2-src-build .m2-for-src \
632 --clean-source-checkout \
633 "${env.BASEDIR}" ; then
634 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
636 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
640 echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
642 if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
643 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
646 install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
647 tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
648 client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
649 tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
652 echo "Attempting to use run an instance on top of Hadoop 2."
653 sh '''#!/bin/bash -xe
654 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
655 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
656 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
658 --working-dir output-integration/hadoop-2 \
659 --hbase-client-install "hbase-client" \
661 "hadoop-2/bin/hadoop" \
662 hadoop-2/share/hadoop/yarn/timelineservice \
663 hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
664 hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
665 hadoop-2/bin/mapred \
666 >output-integration/hadoop-2.log 2>&1 ; then
667 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
672 echo "Attempting to use run an instance on top of Hadoop 3."
674 artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
675 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
676 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
678 --working-dir output-integration/hadoop-3 \
679 --hbase-client-install hbase-client \
681 hadoop-3/bin/hadoop \
682 hadoop-3/share/hadoop/yarn/timelineservice \
683 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
684 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
685 hadoop-3/bin/mapred \
686 >output-integration/hadoop-3.log 2>&1 ; then
687 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
690 echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
691 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
693 --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
694 --working-dir output-integration/hadoop-3-shaded \
695 --hbase-client-install hbase-client \
697 hadoop-3/bin/hadoop \
698 hadoop-3/share/hadoop/yarn/timelineservice \
699 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
700 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
701 hadoop-3/bin/mapred \
702 >output-integration/hadoop-3-shaded.log 2>&1 ; then
703 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
706 echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
713 stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
714 archiveArtifacts artifacts: 'output-srctarball/*'
715 archiveArtifacts artifacts: 'output-srctarball/**/*'
716 archiveArtifacts artifacts: 'output-integration/*'
717 archiveArtifacts artifacts: 'output-integration/**/*'
728 unstash 'general-result'
729 unstash 'jdk7-result'
730 unstash 'jdk8-hadoop2-result'
731 unstash 'jdk8-hadoop3-result'
732 unstash 'jdk11-hadoop3-result'
733 unstash 'srctarball-result'
735 def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
736 "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
737 "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
738 "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
739 "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
740 'output-srctarball/commentfile',
741 'output-integration/commentfile']
744 echo currentBuild.result
745 echo currentBuild.durationString
746 def comment = "Results for branch ${env.BRANCH_NAME}\n"
747 comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
748 if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
749 comment += "(/) *{color:green}+1 overall{color}*\n"
751 comment += "(x) *{color:red}-1 overall{color}*\n"
752 // Ideally get the committer our of the change and @ mention them in the per-jira comment
754 comment += "----\ndetails (if available):\n\n"
756 echo "[DEBUG] trying to aggregate step-wise results"
757 comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
758 echo "[INFO] Comment:"
761 echo "[DEBUG] checking to see if feature branch"
762 def jiras = getJirasToComment(env.BRANCH_NAME, [])
763 if (jiras.isEmpty()) {
764 echo "[DEBUG] non-feature branch, checking change messages for jira keys."
765 echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
766 jiras = getJirasToCommentFromChangesets(currentBuild)
768 jiras.each { currentIssue ->
769 jiraComment issueKey: currentIssue, body: comment
771 } catch (Exception exception) {
772 echo "Got exception: ${exception}"
773 echo " ${exception.getStackTrace()}"
779 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
781 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
783 thisBuild.changeSets.each { cs ->
784 cs.getItems().each { change ->
785 CharSequence msg = change.msg
786 echo "change: ${change}"
788 echo " ${change.commitId}"
789 echo " ${change.author}"
791 seenJiras = getJirasToComment(msg, seenJiras)
797 List<String> getJirasToComment(CharSequence source, List<String> seen) {
798 source.eachMatch("HBASE-[0-9]+") { currentIssue ->
799 echo "[DEBUG] found jira key: ${currentIssue}"
800 if (currentIssue in seen) {
801 echo "[DEBUG] already commented on ${currentIssue}."
803 echo "[INFO] commenting on ${currentIssue}."