1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements. See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership. The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License. You may obtain a copy of the License at
9 // http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied. See the License for the
15 // specific language governing permissions and limitations
27 buildDiscarder(logRotator(numToKeepStr: '15'))
28 timeout (time: 16, unit: 'HOURS')
31 disableConcurrentBuilds()
34 YETUS_RELEASE = '0.12.0'
35 // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
36 OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
37 OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
38 OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
39 OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
40 OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
43 PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
44 PERSONALITY_FILE = 'tools/personality.sh'
45 // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
46 AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
47 WHITESPACE_IGNORE_LIST = '.*/generated/.*'
48 // output from surefire; sadly the archive function in yetus only works on file names.
49 ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
50 // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
51 TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
52 EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/output/excludes"
53 // TODO does hadoopcheck need to be jdk specific?
54 SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
55 DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
58 booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
60 Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
61 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
64 stage ('scm-checkout') {
71 stage ('thirdparty installs') {
73 stage ('yetus install') {
75 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
76 dir('downloads-yetus') {
77 // can't just do a simple echo or the directory won't be created. :(
78 sh '''#!/usr/bin/env bash
79 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
82 sh '''#!/usr/bin/env bash
84 echo "Ensure we have a copy of Apache Yetus."
85 if [[ true != "${USE_YETUS_PRERELEASE}" ]]; then
86 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
87 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
88 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
90 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
91 --working-dir "${WORKSPACE}/downloads-yetus" \
92 --keys 'https://www.apache.org/dist/yetus/KEYS' \
93 "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
94 "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
95 mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
97 echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
100 YETUS_DIR="${WORKSPACE}/yetus-git"
101 rm -rf "${YETUS_DIR}"
102 echo "downloading from github"
103 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
105 if [ ! -d "${YETUS_DIR}" ]; then
106 echo "unpacking yetus into '${YETUS_DIR}'"
107 mkdir -p "${YETUS_DIR}"
108 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
111 // Set up the file we need at PERSONALITY_FILE location
113 sh """#!/usr/bin/env bash
115 echo "Downloading Project personality from ${env.PROJECT_PERSONALITY}"
116 curl -L -o personality.sh "${env.PROJECT_PERSONALITY}"
119 stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
122 stage ('hadoop 2 cache') {
124 HADOOP2_VERSION="2.10.0"
127 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
128 dir('downloads-hadoop-2') {
129 sh '''#!/usr/bin/env bash
130 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
133 sh '''#!/usr/bin/env bash
135 echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
136 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
137 --working-dir "${WORKSPACE}/downloads-hadoop-2" \
138 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
139 "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
140 "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
141 for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
142 echo "Delete stale hadoop 2 cache ${stale}"
146 stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
149 stage ('hadoop 3 cache') {
151 HADOOP3_VERSION="3.1.1"
154 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
155 dir('downloads-hadoop-3') {
156 sh '''#!/usr/bin/env bash
157 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
160 sh '''#!/usr/bin/env bash
162 echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
163 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
164 --working-dir "${WORKSPACE}/downloads-hadoop-3" \
165 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
166 "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
167 "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
168 for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
169 echo "Delete stale hadoop 3 cache ${stale}"
173 stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
178 stage ('init health results') {
180 // stash with given name for all tests we might run, so that we can unstash all of them even if
181 // we skip some due to e.g. branch-specific JDK or Hadoop support
182 stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
183 stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
184 stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
185 stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
186 stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
187 stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
190 stage ('health checks') {
192 stage ('yetus general check') {
199 BASEDIR = "${env.WORKSPACE}/component"
200 TESTS = "${env.SHALLOW_CHECKS}"
201 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
202 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
203 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
206 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
207 sh '''#!/usr/bin/env bash
209 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
210 echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
211 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
214 // since we have a new node definition we need to re-do the scm checkout
218 sh '''#!/usr/bin/env bash
220 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
221 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
222 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
223 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
225 // TODO roll this into the hbase_nightly_yetus script
226 sh '''#!/usr/bin/env bash
229 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
230 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
232 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
235 echo "-- For more information [see general report|${BUILD_URL}General_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
241 stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
242 // Has to be relative to WORKSPACE.
243 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
244 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
245 publishHTML target: [
248 alwaysLinkToLastBuild: true,
249 // Has to be relative to WORKSPACE
250 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
251 reportFiles: 'console-report.html',
252 reportName: 'General Nightly Build Report'
257 stage ('yetus jdk7 checks') {
267 BASEDIR = "${env.WORKSPACE}/component"
268 TESTS = "${env.DEEP_CHECKS}"
269 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
270 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
271 SET_JAVA_HOME = "/usr/lib/jvm/java-7"
274 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
275 sh '''#!/usr/bin/env bash
277 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
278 echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
279 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
285 sh '''#!/usr/bin/env bash
287 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
288 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
289 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
290 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
292 sh '''#!/usr/bin/env bash
295 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
296 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
298 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
301 echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
307 stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
308 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
309 // zip surefire reports.
311 if [ -d "${OUTPUT_DIR}/archiver" ]; then
312 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
313 if [[ 0 -ne ${count} ]]; then
314 echo "zipping ${count} archived files"
315 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
317 echo "No archived files, skipping compressing."
320 echo "No archiver directory, skipping compressing."
323 // Has to be relative to WORKSPACE.
324 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
325 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
326 publishHTML target: [
329 alwaysLinkToLastBuild: true,
330 // Has to be relative to WORKSPACE.
331 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
332 reportFiles : 'console-report.html',
333 reportName : 'JDK7 Nightly Build Report'
338 stage ('yetus jdk8 hadoop2 checks') {
345 BASEDIR = "${env.WORKSPACE}/component"
346 TESTS = "${env.DEEP_CHECKS}"
347 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
348 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
349 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
352 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
353 sh '''#!/usr/bin/env bash
355 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
356 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
357 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
363 sh '''#!/usr/bin/env bash
365 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
366 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
367 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
368 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
370 sh '''#!/usr/bin/env bash
373 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
374 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
376 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
379 echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop2_29/]" >> "${OUTPUT_DIR}/commentfile"
385 stash name: 'jdk8-hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
386 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
387 // zip surefire reports.
389 if [ -d "${OUTPUT_DIR}/archiver" ]; then
390 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
391 if [[ 0 -ne ${count} ]]; then
392 echo "zipping ${count} archived files"
393 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
395 echo "No archived files, skipping compressing."
398 echo "No archiver directory, skipping compressing."
401 // Has to be relative to WORKSPACE.
402 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
403 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
404 publishHTML target: [
407 alwaysLinkToLastBuild: true,
408 // Has to be relative to WORKSPACE.
409 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
410 reportFiles : 'console-report.html',
411 reportName : 'JDK8 Nightly Build Report (Hadoop2)'
416 stage ('yetus jdk8 hadoop3 checks') {
428 BASEDIR = "${env.WORKSPACE}/component"
429 TESTS = "${env.DEEP_CHECKS}"
430 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
431 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
432 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
433 // Activates hadoop 3.0 profile in maven runs.
434 HADOOP_PROFILE = '3.0'
437 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
438 sh '''#!/usr/bin/env bash
440 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
441 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
442 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
448 sh '''#!/usr/bin/env bash
450 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
451 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
452 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
453 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
455 sh '''#!/usr/bin/env bash
458 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
459 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
461 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
464 echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
470 stash name: 'jdk8-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
471 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
472 // zip surefire reports.
474 if [ -d "${OUTPUT_DIR}/archiver" ]; then
475 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
476 if [[ 0 -ne ${count} ]]; then
477 echo "zipping ${count} archived files"
478 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
480 echo "No archived files, skipping compressing."
483 echo "No archiver directory, skipping compressing."
486 // Has to be relative to WORKSPACE.
487 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
488 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
489 publishHTML target: [
492 alwaysLinkToLastBuild: true,
493 // Has to be relative to WORKSPACE.
494 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
495 reportFiles : 'console-report.html',
496 reportName : 'JDK8 Nightly Build Report (Hadoop3)'
501 stage ('yetus jdk11 hadoop3 checks') {
513 BASEDIR = "${env.WORKSPACE}/component"
514 TESTS = "${env.DEEP_CHECKS}"
515 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
516 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
517 SET_JAVA_HOME = "/usr/lib/jvm/java-11"
518 // Activates hadoop 3.0 profile in maven runs.
519 HADOOP_PROFILE = '3.0'
520 // ErrorProne is broken on JDK11, see HBASE-23894
521 SKIP_ERROR_PRONE = 'true'
524 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
525 sh '''#!/usr/bin/env bash
527 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
528 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
529 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
535 sh '''#!/usr/bin/env bash
537 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
538 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
539 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
540 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
542 sh '''#!/usr/bin/env bash
545 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
546 echo '(/) {color:green}+1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
548 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
551 echo "-- For more information [see jdk11 report|${BUILD_URL}JDK11_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
557 stash name: 'jdk11-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
558 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
559 // zip surefire reports.
561 if [ -d "${OUTPUT_DIR}/archiver" ]; then
562 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
563 if [[ 0 -ne ${count} ]]; then
564 echo "zipping ${count} archived files"
565 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
567 echo "No archived files, skipping compressing."
570 echo "No archiver directory, skipping compressing."
573 // Has to be relative to WORKSPACE.
574 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
575 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
576 publishHTML target: [
579 alwaysLinkToLastBuild: true,
580 // Has to be relative to WORKSPACE.
581 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
582 reportFiles : 'console-report.html',
583 reportName : 'JDK11 Nightly Build Report (Hadoop3)'
588 // This is meant to mimic what a release manager will do to create RCs.
589 // See http://hbase.apache.org/book.html#maven.release
590 // TODO (HBASE-23870): replace this with invocation of the release tool
591 stage ('packaging and integration') {
594 // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
598 BASEDIR = "${env.WORKSPACE}/component"
599 BRANCH = "${env.BRANCH_NAME}"
603 echo "Setting up directories"
604 rm -rf "output-srctarball" && mkdir "output-srctarball"
605 rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
606 rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
607 rm -rf "hbase-install" && mkdir "hbase-install"
608 rm -rf "hbase-client" && mkdir "hbase-client"
609 rm -rf "hadoop-2" && mkdir "hadoop-2"
610 rm -rf "hadoop-3" && mkdir "hadoop-3"
611 rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
612 rm -rf ".m2-for-src" && mkdir ".m2-for-src"
613 echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
614 echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
616 sh '''#!/usr/bin/env bash
618 rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
619 "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
620 echo "got the following saved stats in 'output-srctarball/machine'"
621 ls -lh "output-srctarball/machine"
624 echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
625 if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
626 --intermediate-file-dir output-srctarball \
627 --unpack-temp-dir unpacked_src_tarball \
628 --maven-m2-initial .m2-for-repo \
629 --maven-m2-src-build .m2-for-src \
630 --clean-source-checkout \
631 "${env.BASEDIR}" ; then
632 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
634 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
638 echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
640 if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
641 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
644 install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
645 tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
646 client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
647 tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
650 sh '''#!/bin/bash -xe
651 if [[ "${BRANCH}" = branch-2* ]] || [[ "${BRANCH}" = branch-1* ]]; then
652 echo "Attempting to use run an instance on top of Hadoop 2."
653 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
654 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
655 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
657 --working-dir output-integration/hadoop-2 \
658 --hbase-client-install "hbase-client" \
660 "hadoop-2/bin/hadoop" \
661 hadoop-2/share/hadoop/yarn/timelineservice \
662 hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
663 hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
664 hadoop-2/bin/mapred \
665 >output-integration/hadoop-2.log 2>&1 ; then
666 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
670 echo "Skipping to run against Hadoop 2 for branch ${BRANCH}"
675 if [[ "${BRANCH}" = branch-1* ]]; then
676 echo "Skipping to run against Hadoop 3 for branch ${BRANCH}"
678 echo "Attempting to use run an instance on top of Hadoop 3."
679 artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
680 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
681 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
683 --working-dir output-integration/hadoop-3 \
684 --hbase-client-install hbase-client \
686 hadoop-3/bin/hadoop \
687 hadoop-3/share/hadoop/yarn/timelineservice \
688 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
689 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
690 hadoop-3/bin/mapred \
691 >output-integration/hadoop-3.log 2>&1 ; then
692 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
695 echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
696 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
698 --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
699 --working-dir output-integration/hadoop-3-shaded \
700 --hbase-client-install hbase-client \
702 hadoop-3/bin/hadoop \
703 hadoop-3/share/hadoop/yarn/timelineservice \
704 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
705 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
706 hadoop-3/bin/mapred \
707 >output-integration/hadoop-3-shaded.log 2>&1 ; then
708 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
711 echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
719 stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
720 archiveArtifacts artifacts: 'output-srctarball/*'
721 archiveArtifacts artifacts: 'output-srctarball/**/*'
722 archiveArtifacts artifacts: 'output-integration/*'
723 archiveArtifacts artifacts: 'output-integration/**/*'
734 unstash 'general-result'
735 unstash 'jdk7-result'
736 unstash 'jdk8-hadoop2-result'
737 unstash 'jdk8-hadoop3-result'
738 unstash 'jdk11-hadoop3-result'
739 unstash 'srctarball-result'
741 def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
742 "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
743 "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
744 "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
745 "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
746 'output-srctarball/commentfile',
747 'output-integration/commentfile']
750 echo currentBuild.result
751 echo currentBuild.durationString
752 def comment = "Results for branch ${env.BRANCH_NAME}\n"
753 comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
754 if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
755 comment += "(/) *{color:green}+1 overall{color}*\n"
757 comment += "(x) *{color:red}-1 overall{color}*\n"
758 // Ideally get the committer our of the change and @ mention them in the per-jira comment
760 comment += "----\ndetails (if available):\n\n"
762 echo "[DEBUG] trying to aggregate step-wise results"
763 comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
764 echo "[INFO] Comment:"
767 echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
768 getJirasToComment(currentBuild).each { currentIssue ->
769 jiraComment issueKey: currentIssue, body: comment
771 } catch (Exception exception) {
772 echo "Got exception: ${exception}"
773 echo " ${exception.getStackTrace()}"
779 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
781 List<String> getJirasToComment(RunWrapper thisBuild) {
783 thisBuild.changeSets.each { cs ->
784 cs.getItems().each { change ->
785 CharSequence msg = change.msg
786 echo "change: ${change}"
788 echo " ${change.commitId}"
789 echo " ${change.author}"
791 msg.eachMatch("HBASE-[0-9]+") { currentIssue ->
792 echo "[DEBUG] found jira key: ${currentIssue}"
793 if (currentIssue in seenJiras) {
794 echo "[DEBUG] already commented on ${currentIssue}."
796 echo "[INFO] commenting on ${currentIssue}."
797 seenJiras << currentIssue