1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements. See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership. The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License. You may obtain a copy of the License at
9 // http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied. See the License for the
15 // specific language governing permissions and limitations
27 buildDiscarder(logRotator(numToKeepStr: '30'))
28 timeout (time: 9, unit: 'HOURS')
33 YETUS_RELEASE = '0.7.0'
34 // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
35 OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
36 OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
37 OUTPUT_DIR_RELATIVE_HADOOP2 = 'output-jdk8-hadoop2'
38 OUTPUT_DIR_RELATIVE_HADOOP3 = 'output-jdk8-hadoop3'
41 PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
42 PERSONALITY_FILE = 'tools/personality.sh'
43 // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
44 AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
45 WHITESPACE_IGNORE_LIST = '.*/generated/.*'
46 // output from surefire; sadly the archive function in yetus only works on file names.
47 ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
48 // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
49 TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
50 EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/excludes"
53 booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
55 Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
56 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
59 stage ('scm-checkout') {
66 stage ('thirdparty installs') {
68 stage ('yetus install') {
70 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
71 dir('downloads-yetus') {
72 // can't just do a simple echo or the directory won't be created. :(
73 sh '''#!/usr/bin/env bash
74 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
77 sh '''#!/usr/bin/env bash
79 echo "Ensure we have a copy of Apache Yetus."
80 if [[ true != "${USE_YETUS_PRERELEASE}" ]]; then
81 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
82 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
83 if [ ! -d "${YETUS_DIR}" ]; then
84 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
85 --working-dir "${WORKSPACE}/downloads-yetus" \
86 --keys 'https://www.apache.org/dist/yetus/KEYS' \
87 "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
88 "yetus/${YETUS_RELEASE}/yetus-${YETUS_RELEASE}-bin.tar.gz"
89 mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
91 echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
94 YETUS_DIR="${WORKSPACE}/yetus-git"
96 echo "downloading from github"
97 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
99 if [ ! -d "${YETUS_DIR}" ]; then
100 echo "unpacking yetus into '${YETUS_DIR}'"
101 mkdir -p "${YETUS_DIR}"
102 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
105 // Set up the file we need at PERSONALITY_FILE location
107 sh """#!/usr/bin/env bash
109 echo "Downloading Project personality."
110 curl -L -o personality.sh "${env.PROJECT_PERSONALITY}"
113 stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
116 stage ('hadoop 2 cache') {
118 HADOOP2_VERSION="2.7.1"
121 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
122 dir('downloads-hadoop-2') {
123 sh '''#!/usr/bin/env bash
124 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
127 sh '''#!/usr/bin/env bash
129 echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
130 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
131 --working-dir "${WORKSPACE}/downloads-hadoop-2" \
132 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
133 "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
134 "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
136 stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
139 stage ('hadoop 3 cache') {
141 HADOOP3_VERSION="3.0.0"
144 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
145 dir('downloads-hadoop-3') {
146 sh '''#!/usr/bin/env bash
147 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
150 sh '''#!/usr/bin/env bash
152 echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
153 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
154 --working-dir "${WORKSPACE}/downloads-hadoop-3" \
155 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
156 "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
157 "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
159 stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
164 stage ('init health results') {
166 // stash with given name for all tests we might run, so that we can unstash all of them even if
167 // we skip some due to e.g. branch-specific JDK or Hadoop support
168 stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
169 stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
170 stash name: 'hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP2}/doesn't-match"
171 stash name: 'hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP3}/doesn't-match"
172 stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
175 stage ('health checks') {
177 stage ('yetus general check') {
184 BASEDIR = "${env.WORKSPACE}/component"
185 // TODO does hadoopcheck need to be jdk specific?
186 // Should be things that work with multijdk
187 TESTS = 'all,-unit,-findbugs'
188 // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not
189 // doing multijdk there.
190 MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64'
191 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
192 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
195 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
196 sh '''#!/usr/bin/env bash
198 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
199 echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
200 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
203 // since we have a new node definition we need to re-do the scm checkout
207 sh '''#!/usr/bin/env bash
209 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
210 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
211 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
212 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
214 // TODO roll this into the hbase_nightly_yetus script
215 sh '''#!/usr/bin/env bash
218 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
219 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
221 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
224 echo "-- For more information [see general report|${BUILD_URL}/General_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
230 stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
231 // Has to be relative to WORKSPACE.
232 archive "${env.OUTPUT_DIR_RELATIVE}/*"
233 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
234 publishHTML target: [
237 alwaysLinkToLastBuild: true,
238 // Has to be relative to WORKSPACE
239 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
240 reportFiles: 'console-report.html',
241 reportName: 'General Nightly Build Report'
246 stage ('yetus jdk7 checks') {
256 BASEDIR = "${env.WORKSPACE}/component"
257 TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
258 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
259 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
260 // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already.
263 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
264 sh '''#!/usr/bin/env bash
266 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
267 echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
268 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
274 sh '''#!/usr/bin/env bash
276 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
277 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
278 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
279 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
281 sh '''#!/usr/bin/env bash
284 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
285 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
287 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
290 echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
296 stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
297 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
298 // zip surefire reports.
300 if [ -d "${OUTPUT_DIR}/archiver" ]; then
301 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
302 if [[ 0 -ne ${count} ]]; then
303 echo "zipping ${count} archived files"
304 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
306 echo "No archived files, skipping compressing."
309 echo "No archiver directory, skipping compressing."
312 // Has to be relative to WORKSPACE.
313 archive "${env.OUTPUT_DIR_RELATIVE}/*"
314 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
315 publishHTML target: [
318 alwaysLinkToLastBuild: true,
319 // Has to be relative to WORKSPACE.
320 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
321 reportFiles : 'console-report.html',
322 reportName : 'JDK7 Nightly Build Report'
327 stage ('yetus jdk8 hadoop2 checks') {
334 BASEDIR = "${env.WORKSPACE}/component"
335 TESTS = 'maven,mvninstall,compile,javac,unit,findbugs,htmlout'
336 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
337 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
338 // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
339 // and is needed on branches that do both jdk7 and jdk8
340 SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
343 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
344 sh '''#!/usr/bin/env bash
346 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
347 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
348 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
354 sh '''#!/usr/bin/env bash
356 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
357 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
358 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
359 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
361 sh '''#!/usr/bin/env bash
364 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
365 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
367 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
370 echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop2)/]" >> "${OUTPUT_DIR}/commentfile"
376 stash name: 'hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
377 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
378 // zip surefire reports.
380 if [ -d "${OUTPUT_DIR}/archiver" ]; then
381 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
382 if [[ 0 -ne ${count} ]]; then
383 echo "zipping ${count} archived files"
384 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
386 echo "No archived files, skipping compressing."
389 echo "No archiver directory, skipping compressing."
392 // Has to be relative to WORKSPACE.
393 archive "${env.OUTPUT_DIR_RELATIVE}/*"
394 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
395 publishHTML target: [
398 alwaysLinkToLastBuild: true,
399 // Has to be relative to WORKSPACE.
400 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
401 reportFiles : 'console-report.html',
402 reportName : 'JDK8 Nightly Build Report (Hadoop2)'
407 stage ('yetus jdk8 hadoop3 checks') {
419 BASEDIR = "${env.WORKSPACE}/component"
420 TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
421 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
422 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
423 // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
424 // and is needed on branches that do both jdk7 and jdk8
425 SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
426 // Activates hadoop 3.0 profile in maven runs.
427 HADOOP_PROFILE = '3.0'
430 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
431 sh '''#!/usr/bin/env bash
433 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
434 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
435 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
441 sh '''#!/usr/bin/env bash
443 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
444 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
445 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
446 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
448 sh '''#!/usr/bin/env bash
451 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
452 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
454 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
457 echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop3)/]" >> "${OUTPUT_DIR}/commentfile"
463 stash name: 'hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
464 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
465 // zip surefire reports.
467 if [ -d "${OUTPUT_DIR}/archiver" ]; then
468 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
469 if [[ 0 -ne ${count} ]]; then
470 echo "zipping ${count} archived files"
471 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
473 echo "No archived files, skipping compressing."
476 echo "No archiver directory, skipping compressing."
479 // Has to be relative to WORKSPACE.
480 archive "${env.OUTPUT_DIR_RELATIVE}/*"
481 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
482 publishHTML target: [
485 alwaysLinkToLastBuild: true,
486 // Has to be relative to WORKSPACE.
487 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
488 reportFiles : 'console-report.html',
489 reportName : 'JDK8 Nightly Build Report (Hadoop3)'
494 // This is meant to mimic what a release manager will do to create RCs.
495 // See http://hbase.apache.org/book.html#maven.release
496 stage ('packaging and integration') {
498 maven 'Maven (latest)'
499 // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
500 jdk "JDK 1.8 (latest)"
503 BASEDIR = "${env.WORKSPACE}/component"
507 echo "Setting up directories"
508 rm -rf "output-srctarball" && mkdir "output-srctarball"
509 rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
510 rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
511 rm -rf "hbase-install" && mkdir "hbase-install"
512 rm -rf "hbase-client" && mkdir "hbase-client"
513 rm -rf "hadoop-2" && mkdir "hadoop-2"
514 rm -rf "hadoop-3" && mkdir "hadoop-3"
515 rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
516 rm -rf ".m2-for-src" && mkdir ".m2-for-src"
517 echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
518 echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
520 sh '''#!/usr/bin/env bash
522 rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
523 "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
524 echo "got the following saved stats in 'output-srctarball/machine'"
525 ls -lh "output-srctarball/machine"
528 echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
529 if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
530 --intermediate-file-dir output-srctarball \
531 --unpack-temp-dir unpacked_src_tarball \
532 --maven-m2-initial .m2-for-repo \
533 --maven-m2-src-build .m2-for-src \
534 --clean-source-checkout \
535 "${env.BASEDIR}" ; then
536 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
538 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
542 echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
544 if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
545 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
548 install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
549 tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
550 client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
551 tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
554 echo "Attempting to use run an instance on top of Hadoop 2."
555 sh '''#!/bin/bash -xe
556 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
557 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
558 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
560 --working-dir output-integration/hadoop-2 \
561 --hbase-client-install "hbase-client" \
563 "hadoop-2/bin/hadoop" \
564 hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
565 hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
566 >output-integration/hadoop-2.log 2>&1 ; then
567 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
572 echo "Attempting to use run an instance on top of Hadoop 3."
574 artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
575 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
576 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
578 --working-dir output-integration/hadoop-3 \
579 --hbase-client-install hbase-client \
581 hadoop-3/bin/hadoop \
582 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
583 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
584 >output-integration/hadoop-3.log 2>&1 ; then
585 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
588 echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
589 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
591 --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
592 --working-dir output-integration/hadoop-3-shaded \
593 --hbase-client-install hbase-client \
595 hadoop-3/bin/hadoop \
596 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
597 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
598 >output-integration/hadoop-3-shaded.log 2>&1 ; then
599 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
602 echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
609 stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
610 archive 'output-srctarball/*'
611 archive 'output-srctarball/**/*'
612 archive 'output-integration/*'
613 archive 'output-integration/**/*'
624 unstash 'general-result'
625 unstash 'jdk7-result'
626 unstash 'hadoop2-result'
627 unstash 'hadoop3-result'
628 unstash 'srctarball-result'
630 def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
631 "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
632 "${env.OUTPUT_DIR_RELATIVE_HADOOP2}/commentfile",
633 "${env.OUTPUT_DIR_RELATIVE_HADOOP3}/commentfile",
634 'output-srctarball/commentfile',
635 'output-integration/commentfile']
638 echo currentBuild.result
639 echo currentBuild.durationString
640 def comment = "Results for branch ${env.BRANCH_NAME}\n"
641 comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
642 if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
643 comment += "(/) *{color:green}+1 overall{color}*\n"
645 comment += "(x) *{color:red}-1 overall{color}*\n"
646 // Ideally get the committer our of the change and @ mention them in the per-jira comment
648 comment += "----\ndetails (if available):\n\n"
650 echo "[DEBUG] trying to aggregate step-wise results"
651 comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
652 echo "[INFO] Comment:"
655 echo "[DEBUG] checking to see if feature branch"
656 def jiras = getJirasToComment(env.BRANCH_NAME, [])
657 if (jiras.isEmpty()) {
658 echo "[DEBUG] non-feature branch, checking change messages for jira keys."
659 echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
660 jiras = getJirasToCommentFromChangesets(currentBuild)
662 jiras.each { currentIssue ->
663 jiraComment issueKey: currentIssue, body: comment
665 } catch (Exception exception) {
666 echo "Got exception: ${exception}"
667 echo " ${exception.getStackTrace()}"
673 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
675 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
677 thisBuild.changeSets.each { cs ->
678 cs.getItems().each { change ->
679 CharSequence msg = change.msg
680 echo "change: ${change}"
682 echo " ${change.commitId}"
683 echo " ${change.author}"
685 seenJiras = getJirasToComment(msg, seenJiras)
691 List<String> getJirasToComment(CharSequence source, List<String> seen) {
692 source.eachMatch("HBASE-[0-9]+") { currentIssue ->
693 echo "[DEBUG] found jira key: ${currentIssue}"
694 if (currentIssue in seen) {
695 echo "[DEBUG] already commented on ${currentIssue}."
697 echo "[INFO] commenting on ${currentIssue}."