1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements. See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership. The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License. You may obtain a copy of the License at
9 // http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied. See the License for the
15 // specific language governing permissions and limitations
27 buildDiscarder(logRotator(numToKeepStr: '15'))
28 timeout (time: 9, unit: 'HOURS')
31 disableConcurrentBuilds()
34 YETUS_RELEASE = '0.11.1'
35 // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
36 OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
37 OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
38 OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
39 OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
40 OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
43 PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
44 PERSONALITY_FILE = 'tools/personality.sh'
45 // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
46 AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
47 WHITESPACE_IGNORE_LIST = '.*/generated/.*'
48 // output from surefire; sadly the archive function in yetus only works on file names.
49 ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
50 // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
51 TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
52 EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/excludes"
53 // TODO does hadoopcheck need to be jdk specific?
54 SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
55 DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
58 booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
60 Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
61 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
64 stage ('scm-checkout') {
71 stage ('thirdparty installs') {
73 stage ('yetus install') {
75 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
76 dir('downloads-yetus') {
77 // can't just do a simple echo or the directory won't be created. :(
78 sh '''#!/usr/bin/env bash
79 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
82 sh '''#!/usr/bin/env bash
84 echo "Ensure we have a copy of Apache Yetus."
85 if [[ true != "${USE_YETUS_PRERELEASE}" ]]; then
86 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
87 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
88 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
90 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
91 --working-dir "${WORKSPACE}/downloads-yetus" \
92 --keys 'https://www.apache.org/dist/yetus/KEYS' \
93 "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
94 "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
95 mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
97 echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
100 YETUS_DIR="${WORKSPACE}/yetus-git"
101 rm -rf "${YETUS_DIR}"
102 echo "downloading from github"
103 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
105 if [ ! -d "${YETUS_DIR}" ]; then
106 echo "unpacking yetus into '${YETUS_DIR}'"
107 mkdir -p "${YETUS_DIR}"
108 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
111 // Set up the file we need at PERSONALITY_FILE location
113 sh """#!/usr/bin/env bash
115 echo "Downloading Project personality from ${env.PROJECT_PERSONALITY}"
116 curl -L -o personality.sh "${env.PROJECT_PERSONALITY}"
119 stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
122 stage ('hadoop 2 cache') {
124 HADOOP2_VERSION="2.8.5"
127 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
128 dir('downloads-hadoop-2') {
129 sh '''#!/usr/bin/env bash
130 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
133 sh '''#!/usr/bin/env bash
135 echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
136 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
137 --working-dir "${WORKSPACE}/downloads-hadoop-2" \
138 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
139 "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
140 "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
141 for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
142 echo "Delete stale hadoop 2 cache ${stale}"
146 stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
149 stage ('hadoop 3 cache') {
151 HADOOP3_VERSION="3.1.1"
154 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
155 dir('downloads-hadoop-3') {
156 sh '''#!/usr/bin/env bash
157 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
160 sh '''#!/usr/bin/env bash
162 echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
163 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
164 --working-dir "${WORKSPACE}/downloads-hadoop-3" \
165 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
166 "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
167 "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
168 for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
169 echo "Delete stale hadoop 3 cache ${stale}"
173 stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
178 stage ('init health results') {
180 // stash with given name for all tests we might run, so that we can unstash all of them even if
181 // we skip some due to e.g. branch-specific JDK or Hadoop support
182 stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
183 stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
184 stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
185 stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
186 stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
187 stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
190 stage ('health checks') {
192 stage ('yetus general check') {
199 BASEDIR = "${env.WORKSPACE}/component"
200 TESTS = "${env.SHALLOW_CHECKS}"
201 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
202 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
203 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
206 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
207 sh '''#!/usr/bin/env bash
209 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
210 echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
211 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
214 // since we have a new node definition we need to re-do the scm checkout
218 sh '''#!/usr/bin/env bash
220 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
221 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
222 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
223 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
225 // TODO roll this into the hbase_nightly_yetus script
226 sh '''#!/usr/bin/env bash
229 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
230 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
232 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
235 echo "-- For more information [see general report|${BUILD_URL}/General_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
241 stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
242 // Has to be relative to WORKSPACE.
243 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
244 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
245 publishHTML target: [
248 alwaysLinkToLastBuild: true,
249 // Has to be relative to WORKSPACE
250 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
251 reportFiles: 'console-report.html',
252 reportName: 'General Nightly Build Report'
257 stage ('yetus jdk7 checks') {
267 BASEDIR = "${env.WORKSPACE}/component"
268 TESTS = "${env.DEEP_CHECKS}"
269 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
270 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
271 SET_JAVA_HOME = "/usr/lib/jvm/java-7"
274 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
275 sh '''#!/usr/bin/env bash
277 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
278 echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
279 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
285 sh '''#!/usr/bin/env bash
287 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
288 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
289 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
290 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
292 sh '''#!/usr/bin/env bash
295 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
296 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
298 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
301 echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
307 stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
308 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
309 // zip surefire reports.
311 if [ -d "${OUTPUT_DIR}/archiver" ]; then
312 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
313 if [[ 0 -ne ${count} ]]; then
314 echo "zipping ${count} archived files"
315 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
317 echo "No archived files, skipping compressing."
320 echo "No archiver directory, skipping compressing."
323 // Has to be relative to WORKSPACE.
324 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
325 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
326 publishHTML target: [
329 alwaysLinkToLastBuild: true,
330 // Has to be relative to WORKSPACE.
331 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
332 reportFiles : 'console-report.html',
333 reportName : 'JDK7 Nightly Build Report'
338 stage ('yetus jdk8 hadoop2 checks') {
345 BASEDIR = "${env.WORKSPACE}/component"
346 TESTS = "${env.DEEP_CHECKS}"
347 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
348 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
349 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
352 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
353 sh '''#!/usr/bin/env bash
355 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
356 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
357 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
363 sh '''#!/usr/bin/env bash
365 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
366 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
367 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
368 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
370 sh '''#!/usr/bin/env bash
373 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
374 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
376 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
379 echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop2)/]" >> "${OUTPUT_DIR}/commentfile"
385 stash name: 'jdk8-hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
386 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
387 // zip surefire reports.
389 if [ -d "${OUTPUT_DIR}/archiver" ]; then
390 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
391 if [[ 0 -ne ${count} ]]; then
392 echo "zipping ${count} archived files"
393 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
395 echo "No archived files, skipping compressing."
398 echo "No archiver directory, skipping compressing."
401 // Has to be relative to WORKSPACE.
402 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
403 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
404 publishHTML target: [
407 alwaysLinkToLastBuild: true,
408 // Has to be relative to WORKSPACE.
409 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
410 reportFiles : 'console-report.html',
411 reportName : 'JDK8 Nightly Build Report (Hadoop2)'
416 stage ('yetus jdk8 hadoop3 checks') {
428 BASEDIR = "${env.WORKSPACE}/component"
429 TESTS = "${env.DEEP_CHECKS}"
430 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
431 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
432 SET_JAVA_HOME = '/usr/lib/jvm/java-8'
433 // Activates hadoop 3.0 profile in maven runs.
434 HADOOP_PROFILE = '3.0'
437 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
438 sh '''#!/usr/bin/env bash
440 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
441 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
442 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
448 sh '''#!/usr/bin/env bash
450 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
451 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
452 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
453 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
455 sh '''#!/usr/bin/env bash
458 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
459 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
461 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
464 echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop3)/]" >> "${OUTPUT_DIR}/commentfile"
470 stash name: 'jdk8-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
471 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
472 // zip surefire reports.
474 if [ -d "${OUTPUT_DIR}/archiver" ]; then
475 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
476 if [[ 0 -ne ${count} ]]; then
477 echo "zipping ${count} archived files"
478 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
480 echo "No archived files, skipping compressing."
483 echo "No archiver directory, skipping compressing."
486 // Has to be relative to WORKSPACE.
487 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
488 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
489 publishHTML target: [
492 alwaysLinkToLastBuild: true,
493 // Has to be relative to WORKSPACE.
494 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
495 reportFiles : 'console-report.html',
496 reportName : 'JDK8 Nightly Build Report (Hadoop3)'
501 stage ('yetus jdk11 hadoop3 checks') {
513 BASEDIR = "${env.WORKSPACE}/component"
514 TESTS = "${env.DEEP_CHECKS}"
515 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
516 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
517 SET_JAVA_HOME = "/usr/lib/jvm/java-11"
518 // Activates hadoop 3.0 profile in maven runs.
519 HADOOP_PROFILE = '3.0'
520 // ErrorProne is broken on JDK11, see HBASE-23894
521 SKIP_ERROR_PRONE = 'true'
522 // vote -0 on JDK11 unit failures until HBASE-22972
523 TESTS_FILTER = "${TESTS_FILTER},unit"
526 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
527 sh '''#!/usr/bin/env bash
529 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
530 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
531 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
537 sh '''#!/usr/bin/env bash
539 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
540 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
541 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
542 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
544 sh '''#!/usr/bin/env bash
547 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
548 echo '(/) {color:green}+1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
550 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
553 echo "-- For more information [see jdk11 report|${BUILD_URL}/JDK11_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
559 stash name: 'jdk11-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
560 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
561 // zip surefire reports.
563 if [ -d "${OUTPUT_DIR}/archiver" ]; then
564 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
565 if [[ 0 -ne ${count} ]]; then
566 echo "zipping ${count} archived files"
567 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
569 echo "No archived files, skipping compressing."
572 echo "No archiver directory, skipping compressing."
575 // Has to be relative to WORKSPACE.
576 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
577 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
578 publishHTML target: [
581 alwaysLinkToLastBuild: true,
582 // Has to be relative to WORKSPACE.
583 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
584 reportFiles : 'console-report.html',
585 reportName : 'JDK11 Nightly Build Report (Hadoop3)'
590 // This is meant to mimic what a release manager will do to create RCs.
591 // See http://hbase.apache.org/book.html#maven.release
592 // TODO (HBASE-23870): replace this with invocation of the release tool
593 stage ('packaging and integration') {
595 maven 'Maven (latest)'
596 // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
597 jdk "JDK 1.8 (latest)"
600 BASEDIR = "${env.WORKSPACE}/component"
604 echo "Setting up directories"
605 rm -rf "output-srctarball" && mkdir "output-srctarball"
606 rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
607 rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
608 rm -rf "hbase-install" && mkdir "hbase-install"
609 rm -rf "hbase-client" && mkdir "hbase-client"
610 rm -rf "hadoop-2" && mkdir "hadoop-2"
611 rm -rf "hadoop-3" && mkdir "hadoop-3"
612 rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
613 rm -rf ".m2-for-src" && mkdir ".m2-for-src"
614 echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
615 echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
617 sh '''#!/usr/bin/env bash
619 rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
620 "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
621 echo "got the following saved stats in 'output-srctarball/machine'"
622 ls -lh "output-srctarball/machine"
625 echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
626 if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
627 --intermediate-file-dir output-srctarball \
628 --unpack-temp-dir unpacked_src_tarball \
629 --maven-m2-initial .m2-for-repo \
630 --maven-m2-src-build .m2-for-src \
631 --clean-source-checkout \
632 "${env.BASEDIR}" ; then
633 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
635 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
639 echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
641 if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
642 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
645 install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
646 tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
647 client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
648 tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
651 echo "Attempting to use run an instance on top of Hadoop 2."
652 sh '''#!/bin/bash -xe
653 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
654 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
655 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
657 --working-dir output-integration/hadoop-2 \
658 --hbase-client-install "hbase-client" \
660 "hadoop-2/bin/hadoop" \
661 hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
662 hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
663 hadoop-2/bin/mapred \
664 >output-integration/hadoop-2.log 2>&1 ; then
665 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
670 echo "Attempting to use run an instance on top of Hadoop 3."
672 artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
673 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
674 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
676 --working-dir output-integration/hadoop-3 \
677 --hbase-client-install hbase-client \
679 hadoop-3/bin/hadoop \
680 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
681 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
682 hadoop-3/bin/mapred \
683 >output-integration/hadoop-3.log 2>&1 ; then
684 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
687 echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
688 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
690 --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
691 --working-dir output-integration/hadoop-3-shaded \
692 --hbase-client-install hbase-client \
694 hadoop-3/bin/hadoop \
695 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
696 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
697 hadoop-3/bin/mapred \
698 >output-integration/hadoop-3-shaded.log 2>&1 ; then
699 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
702 echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
709 stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
710 archiveArtifacts artifacts: 'output-srctarball/*'
711 archiveArtifacts artifacts: 'output-srctarball/**/*'
712 archiveArtifacts artifacts: 'output-integration/*'
713 archiveArtifacts artifacts: 'output-integration/**/*'
724 unstash 'general-result'
725 unstash 'jdk7-result'
726 unstash 'jdk8-hadoop2-result'
727 unstash 'jdk8-hadoop3-result'
728 unstash 'jdk11-hadoop3-result'
729 unstash 'srctarball-result'
731 def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
732 "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
733 "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
734 "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
735 "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
736 'output-srctarball/commentfile',
737 'output-integration/commentfile']
740 echo currentBuild.result
741 echo currentBuild.durationString
742 def comment = "Results for branch ${env.BRANCH_NAME}\n"
743 comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
744 if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
745 comment += "(/) *{color:green}+1 overall{color}*\n"
747 comment += "(x) *{color:red}-1 overall{color}*\n"
748 // Ideally get the committer our of the change and @ mention them in the per-jira comment
750 comment += "----\ndetails (if available):\n\n"
752 echo "[DEBUG] trying to aggregate step-wise results"
753 comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
754 echo "[INFO] Comment:"
757 echo "[DEBUG] checking to see if feature branch"
758 def jiras = getJirasToComment(env.BRANCH_NAME, [])
759 if (jiras.isEmpty()) {
760 echo "[DEBUG] non-feature branch, checking change messages for jira keys."
761 echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
762 jiras = getJirasToCommentFromChangesets(currentBuild)
764 jiras.each { currentIssue ->
765 jiraComment issueKey: currentIssue, body: comment
767 } catch (Exception exception) {
768 echo "Got exception: ${exception}"
769 echo " ${exception.getStackTrace()}"
775 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
777 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
779 thisBuild.changeSets.each { cs ->
780 cs.getItems().each { change ->
781 CharSequence msg = change.msg
782 echo "change: ${change}"
784 echo " ${change.commitId}"
785 echo " ${change.author}"
787 seenJiras = getJirasToComment(msg, seenJiras)
793 List<String> getJirasToComment(CharSequence source, List<String> seen) {
794 source.eachMatch("HBASE-[0-9]+") { currentIssue ->
795 echo "[DEBUG] found jira key: ${currentIssue}"
796 if (currentIssue in seen) {
797 echo "[DEBUG] already commented on ${currentIssue}."
799 echo "[INFO] commenting on ${currentIssue}."