1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements. See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership. The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License. You may obtain a copy of the License at
9 // http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied. See the License for the
15 // specific language governing permissions and limitations
27 buildDiscarder(logRotator(numToKeepStr: '15'))
28 timeout (time: 9, unit: 'HOURS')
33 YETUS_RELEASE = '0.11.0'
34 // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
35 OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
36 OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
37 OUTPUT_DIR_RELATIVE_HADOOP2 = 'output-jdk8-hadoop2'
38 OUTPUT_DIR_RELATIVE_HADOOP3 = 'output-jdk8-hadoop3'
41 PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
42 PERSONALITY_FILE = 'tools/personality.sh'
43 // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
44 AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
45 WHITESPACE_IGNORE_LIST = '.*/generated/.*'
46 // output from surefire; sadly the archive function in yetus only works on file names.
47 ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
48 // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
49 TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
50 EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/excludes"
53 booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
55 Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
56 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
59 stage ('scm-checkout') {
66 stage ('thirdparty installs') {
68 stage ('yetus install') {
70 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
71 dir('downloads-yetus') {
72 // can't just do a simple echo or the directory won't be created. :(
73 sh '''#!/usr/bin/env bash
74 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
77 sh '''#!/usr/bin/env bash
79 echo "Ensure we have a copy of Apache Yetus."
80 if [[ true != "${USE_YETUS_PRERELEASE}" ]]; then
81 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
82 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
83 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
85 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
86 --working-dir "${WORKSPACE}/downloads-yetus" \
87 --keys 'https://www.apache.org/dist/yetus/KEYS' \
88 "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
89 "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
90 mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
92 echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
95 YETUS_DIR="${WORKSPACE}/yetus-git"
97 echo "downloading from github"
98 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
100 if [ ! -d "${YETUS_DIR}" ]; then
101 echo "unpacking yetus into '${YETUS_DIR}'"
102 mkdir -p "${YETUS_DIR}"
103 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
106 // Set up the file we need at PERSONALITY_FILE location
108 sh """#!/usr/bin/env bash
110 echo "Downloading Project personality."
111 curl -L -o personality.sh "${env.PROJECT_PERSONALITY}"
114 stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
117 stage ('hadoop 2 cache') {
119 HADOOP2_VERSION="2.8.5"
122 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
123 dir('downloads-hadoop-2') {
124 sh '''#!/usr/bin/env bash
125 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
128 sh '''#!/usr/bin/env bash
130 echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
131 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
132 --working-dir "${WORKSPACE}/downloads-hadoop-2" \
133 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
134 "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
135 "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
136 for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
137 echo "Delete stale hadoop 2 cache ${stale}"
141 stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
144 stage ('hadoop 3 cache') {
146 HADOOP3_VERSION="3.1.1"
149 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
150 dir('downloads-hadoop-3') {
151 sh '''#!/usr/bin/env bash
152 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
155 sh '''#!/usr/bin/env bash
157 echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
158 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
159 --working-dir "${WORKSPACE}/downloads-hadoop-3" \
160 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
161 "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
162 "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
163 for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
164 echo "Delete stale hadoop 3 cache ${stale}"
168 stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
173 stage ('init health results') {
175 // stash with given name for all tests we might run, so that we can unstash all of them even if
176 // we skip some due to e.g. branch-specific JDK or Hadoop support
177 stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
178 stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
179 stash name: 'hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP2}/doesn't-match"
180 stash name: 'hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP3}/doesn't-match"
181 stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
184 stage ('health checks') {
186 stage ('yetus general check') {
193 BASEDIR = "${env.WORKSPACE}/component"
194 // TODO does hadoopcheck need to be jdk specific?
195 // Should be things that work with multijdk
196 TESTS = 'all,-unit,-findbugs'
197 // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not
198 // doing multijdk there.
199 MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64'
200 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
201 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
204 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
205 sh '''#!/usr/bin/env bash
207 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
208 echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
209 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
212 // since we have a new node definition we need to re-do the scm checkout
216 sh '''#!/usr/bin/env bash
218 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
219 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
220 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
221 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
223 // TODO roll this into the hbase_nightly_yetus script
224 sh '''#!/usr/bin/env bash
227 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
228 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
230 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
233 echo "-- For more information [see general report|${BUILD_URL}/General_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
239 stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
240 // Has to be relative to WORKSPACE.
241 archive "${env.OUTPUT_DIR_RELATIVE}/*"
242 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
243 publishHTML target: [
246 alwaysLinkToLastBuild: true,
247 // Has to be relative to WORKSPACE
248 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
249 reportFiles: 'console-report.html',
250 reportName: 'General Nightly Build Report'
255 stage ('yetus jdk7 checks') {
265 BASEDIR = "${env.WORKSPACE}/component"
266 TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
267 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
268 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
269 // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already.
272 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
273 sh '''#!/usr/bin/env bash
275 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
276 echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
277 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
283 sh '''#!/usr/bin/env bash
285 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
286 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
287 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
288 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
290 sh '''#!/usr/bin/env bash
293 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
294 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
296 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
299 echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
305 stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
306 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
307 // zip surefire reports.
309 if [ -d "${OUTPUT_DIR}/archiver" ]; then
310 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
311 if [[ 0 -ne ${count} ]]; then
312 echo "zipping ${count} archived files"
313 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
315 echo "No archived files, skipping compressing."
318 echo "No archiver directory, skipping compressing."
321 // Has to be relative to WORKSPACE.
322 archive "${env.OUTPUT_DIR_RELATIVE}/*"
323 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
324 publishHTML target: [
327 alwaysLinkToLastBuild: true,
328 // Has to be relative to WORKSPACE.
329 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
330 reportFiles : 'console-report.html',
331 reportName : 'JDK7 Nightly Build Report'
336 stage ('yetus jdk8 hadoop2 checks') {
343 BASEDIR = "${env.WORKSPACE}/component"
344 TESTS = 'maven,mvninstall,compile,javac,unit,findbugs,htmlout'
345 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
346 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
347 // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
348 // and is needed on branches that do both jdk7 and jdk8
349 SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
352 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
353 sh '''#!/usr/bin/env bash
355 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
356 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
357 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
363 sh '''#!/usr/bin/env bash
365 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
366 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
367 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
368 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
370 sh '''#!/usr/bin/env bash
373 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
374 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
376 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
379 echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop2)/]" >> "${OUTPUT_DIR}/commentfile"
385 stash name: 'hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
386 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
387 // zip surefire reports.
389 if [ -d "${OUTPUT_DIR}/archiver" ]; then
390 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
391 if [[ 0 -ne ${count} ]]; then
392 echo "zipping ${count} archived files"
393 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
395 echo "No archived files, skipping compressing."
398 echo "No archiver directory, skipping compressing."
401 // Has to be relative to WORKSPACE.
402 archive "${env.OUTPUT_DIR_RELATIVE}/*"
403 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
404 publishHTML target: [
407 alwaysLinkToLastBuild: true,
408 // Has to be relative to WORKSPACE.
409 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
410 reportFiles : 'console-report.html',
411 reportName : 'JDK8 Nightly Build Report (Hadoop2)'
416 stage ('yetus jdk8 hadoop3 checks') {
428 BASEDIR = "${env.WORKSPACE}/component"
429 TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
430 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
431 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
432 // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
433 // and is needed on branches that do both jdk7 and jdk8
434 SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
435 // Activates hadoop 3.0 profile in maven runs.
436 HADOOP_PROFILE = '3.0'
439 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
440 sh '''#!/usr/bin/env bash
442 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
443 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
444 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
450 sh '''#!/usr/bin/env bash
452 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
453 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
454 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
455 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
457 sh '''#!/usr/bin/env bash
460 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
461 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
463 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
466 echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop3)/]" >> "${OUTPUT_DIR}/commentfile"
472 stash name: 'hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
473 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
474 // zip surefire reports.
476 if [ -d "${OUTPUT_DIR}/archiver" ]; then
477 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
478 if [[ 0 -ne ${count} ]]; then
479 echo "zipping ${count} archived files"
480 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
482 echo "No archived files, skipping compressing."
485 echo "No archiver directory, skipping compressing."
488 // Has to be relative to WORKSPACE.
489 archive "${env.OUTPUT_DIR_RELATIVE}/*"
490 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
491 publishHTML target: [
494 alwaysLinkToLastBuild: true,
495 // Has to be relative to WORKSPACE.
496 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
497 reportFiles : 'console-report.html',
498 reportName : 'JDK8 Nightly Build Report (Hadoop3)'
503 // This is meant to mimic what a release manager will do to create RCs.
504 // See http://hbase.apache.org/book.html#maven.release
505 stage ('packaging and integration') {
507 maven 'Maven (latest)'
508 // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
509 jdk "JDK 1.8 (latest)"
512 BASEDIR = "${env.WORKSPACE}/component"
516 echo "Setting up directories"
517 rm -rf "output-srctarball" && mkdir "output-srctarball"
518 rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
519 rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
520 rm -rf "hbase-install" && mkdir "hbase-install"
521 rm -rf "hbase-client" && mkdir "hbase-client"
522 rm -rf "hadoop-2" && mkdir "hadoop-2"
523 rm -rf "hadoop-3" && mkdir "hadoop-3"
524 rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
525 rm -rf ".m2-for-src" && mkdir ".m2-for-src"
526 echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
527 echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
529 sh '''#!/usr/bin/env bash
531 rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
532 "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
533 echo "got the following saved stats in 'output-srctarball/machine'"
534 ls -lh "output-srctarball/machine"
537 echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
538 if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
539 --intermediate-file-dir output-srctarball \
540 --unpack-temp-dir unpacked_src_tarball \
541 --maven-m2-initial .m2-for-repo \
542 --maven-m2-src-build .m2-for-src \
543 --clean-source-checkout \
544 "${env.BASEDIR}" ; then
545 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
547 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
551 echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
553 if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
554 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
557 install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
558 tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
559 client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
560 tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
563 echo "Attempting to use run an instance on top of Hadoop 2."
564 sh '''#!/bin/bash -xe
565 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
566 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
567 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
569 --working-dir output-integration/hadoop-2 \
570 --hbase-client-install "hbase-client" \
572 "hadoop-2/bin/hadoop" \
573 hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
574 hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
575 hadoop-2/bin/mapred \
576 >output-integration/hadoop-2.log 2>&1 ; then
577 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
582 echo "Attempting to use run an instance on top of Hadoop 3."
584 artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
585 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
586 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
588 --working-dir output-integration/hadoop-3 \
589 --hbase-client-install hbase-client \
591 hadoop-3/bin/hadoop \
592 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
593 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
594 hadoop-3/bin/mapred \
595 >output-integration/hadoop-3.log 2>&1 ; then
596 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
599 echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
600 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
602 --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
603 --working-dir output-integration/hadoop-3-shaded \
604 --hbase-client-install hbase-client \
606 hadoop-3/bin/hadoop \
607 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
608 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
609 hadoop-3/bin/mapred \
610 >output-integration/hadoop-3-shaded.log 2>&1 ; then
611 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
614 echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
621 stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
622 archive 'output-srctarball/*'
623 archive 'output-srctarball/**/*'
624 archive 'output-integration/*'
625 archive 'output-integration/**/*'
636 unstash 'general-result'
637 unstash 'jdk7-result'
638 unstash 'hadoop2-result'
639 unstash 'hadoop3-result'
640 unstash 'srctarball-result'
642 def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
643 "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
644 "${env.OUTPUT_DIR_RELATIVE_HADOOP2}/commentfile",
645 "${env.OUTPUT_DIR_RELATIVE_HADOOP3}/commentfile",
646 'output-srctarball/commentfile',
647 'output-integration/commentfile']
650 echo currentBuild.result
651 echo currentBuild.durationString
652 def comment = "Results for branch ${env.BRANCH_NAME}\n"
653 comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
654 if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
655 comment += "(/) *{color:green}+1 overall{color}*\n"
657 comment += "(x) *{color:red}-1 overall{color}*\n"
658 // Ideally get the committer our of the change and @ mention them in the per-jira comment
660 comment += "----\ndetails (if available):\n\n"
662 echo "[DEBUG] trying to aggregate step-wise results"
663 comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
664 echo "[INFO] Comment:"
667 echo "[DEBUG] checking to see if feature branch"
668 def jiras = getJirasToComment(env.BRANCH_NAME, [])
669 if (jiras.isEmpty()) {
670 echo "[DEBUG] non-feature branch, checking change messages for jira keys."
671 echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
672 jiras = getJirasToCommentFromChangesets(currentBuild)
674 jiras.each { currentIssue ->
675 jiraComment issueKey: currentIssue, body: comment
677 } catch (Exception exception) {
678 echo "Got exception: ${exception}"
679 echo " ${exception.getStackTrace()}"
685 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
687 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
689 thisBuild.changeSets.each { cs ->
690 cs.getItems().each { change ->
691 CharSequence msg = change.msg
692 echo "change: ${change}"
694 echo " ${change.commitId}"
695 echo " ${change.author}"
697 seenJiras = getJirasToComment(msg, seenJiras)
703 List<String> getJirasToComment(CharSequence source, List<String> seen) {
704 source.eachMatch("HBASE-[0-9]+") { currentIssue ->
705 echo "[DEBUG] found jira key: ${currentIssue}"
706 if (currentIssue in seen) {
707 echo "[DEBUG] already commented on ${currentIssue}."
709 echo "[INFO] commenting on ${currentIssue}."