1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements. See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership. The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License. You may obtain a copy of the License at
9 // http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied. See the License for the
15 // specific language governing permissions and limitations
27 buildDiscarder(logRotator(numToKeepStr: '30'))
28 timeout (time: 9, unit: 'HOURS')
33 YETUS_RELEASE = '0.7.0'
34 // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
35 OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
36 OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
37 OUTPUT_DIR_RELATIVE_HADOOP2 = 'output-jdk8-hadoop2'
38 OUTPUT_DIR_RELATIVE_HADOOP3 = 'output-jdk8-hadoop3'
41 PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
42 PERSONALITY_FILE = 'tools/personality.sh'
43 // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
44 AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
45 WHITESPACE_IGNORE_LIST = '.*/generated/.*'
46 // output from surefire; sadly the archive function in yetus only works on file names.
47 ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
48 // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
49 TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
50 // Flaky urls for different branches. Replace '-' and '.' in branch name by '_' because those
51 // characters are not allowed in bash variable name.
52 // Not excluding flakies from the nightly build for now.
53 // EXCLUDE_TESTS_URL_master = 'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
54 // EXCLUDE_TESTS_URL_branch_2 = 'https://builds.apache.org/job/HBase-Find-Flaky-Tests-branch2.0/lastSuccessfulBuild/artifact/excludes/'
57 booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
59 Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
60 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
63 stage ('scm-checkout') {
70 stage ('thirdparty installs') {
72 stage ('yetus install') {
74 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
75 dir('downloads-yetus') {
76 // can't just do a simple echo or the directory won't be created. :(
77 sh '''#!/usr/bin/env bash
78 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
81 sh '''#!/usr/bin/env bash
83 echo "Ensure we have a copy of Apache Yetus."
84 if [[ true != "${USE_YETUS_PRERELEASE}" ]]; then
85 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
86 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
87 if [ ! -d "${YETUS_DIR}" ]; then
88 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
89 --working-dir "${WORKSPACE}/downloads-yetus" \
90 --keys 'https://www.apache.org/dist/yetus/KEYS' \
91 "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
92 "yetus/${YETUS_RELEASE}/yetus-${YETUS_RELEASE}-bin.tar.gz"
93 mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
95 echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
98 YETUS_DIR="${WORKSPACE}/yetus-git"
100 echo "downloading from github"
101 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
103 if [ ! -d "${YETUS_DIR}" ]; then
104 echo "unpacking yetus into '${YETUS_DIR}'"
105 mkdir -p "${YETUS_DIR}"
106 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
109 // Set up the file we need at PERSONALITY_FILE location
111 sh """#!/usr/bin/env bash
113 echo "Downloading Project personality."
114 curl -L -o personality.sh "${env.PROJECT_PERSONALITY}"
117 stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
120 stage ('hadoop 2 cache') {
122 HADOOP2_VERSION="2.7.1"
125 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
126 dir('downloads-hadoop-2') {
127 sh '''#!/usr/bin/env bash
128 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
131 sh '''#!/usr/bin/env bash
133 echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
134 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
135 --working-dir "${WORKSPACE}/downloads-hadoop-2" \
136 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
137 "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
138 "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
140 stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
143 stage ('hadoop 3 cache') {
145 HADOOP3_VERSION="3.0.0"
148 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
149 dir('downloads-hadoop-3') {
150 sh '''#!/usr/bin/env bash
151 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
154 sh '''#!/usr/bin/env bash
156 echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
157 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
158 --working-dir "${WORKSPACE}/downloads-hadoop-3" \
159 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
160 "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
161 "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
163 stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
168 stage ('init health results') {
170 // stash with given name for all tests we might run, so that we can unstash all of them even if
171 // we skip some due to e.g. branch-specific JDK or Hadoop support
172 stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
173 stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
174 stash name: 'hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP2}/doesn't-match"
175 stash name: 'hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP3}/doesn't-match"
176 stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
179 stage ('health checks') {
181 stage ('yetus general check') {
188 BASEDIR = "${env.WORKSPACE}/component"
189 // TODO does hadoopcheck need to be jdk specific?
190 // Should be things that work with multijdk
191 TESTS = 'all,-unit,-findbugs'
192 // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not
193 // doing multijdk there.
194 MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64'
195 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
196 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
199 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
200 sh '''#!/usr/bin/env bash
202 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
203 echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
204 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
207 // since we have a new node definition we need to re-do the scm checkout
211 sh '''#!/usr/bin/env bash
213 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
214 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
215 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
216 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
218 // TODO roll this into the hbase_nightly_yetus script
219 sh '''#!/usr/bin/env bash
222 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
223 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
225 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
228 echo "-- For more information [see general report|${BUILD_URL}/General_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
234 stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
235 // Has to be relative to WORKSPACE.
236 archive "${env.OUTPUT_DIR_RELATIVE}/*"
237 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
238 publishHTML target: [
241 alwaysLinkToLastBuild: true,
242 // Has to be relative to WORKSPACE
243 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
244 reportFiles: 'console-report.html',
245 reportName: 'General Nightly Build Report'
250 stage ('yetus jdk7 checks') {
260 BASEDIR = "${env.WORKSPACE}/component"
261 TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
262 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
263 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
264 // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already.
267 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
268 sh '''#!/usr/bin/env bash
270 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
271 echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
272 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
278 sh '''#!/usr/bin/env bash
280 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
281 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
282 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
283 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
285 sh '''#!/usr/bin/env bash
288 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
289 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
291 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
294 echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
300 stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
301 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
302 // zip surefire reports.
304 if [ -d "${OUTPUT_DIR}/archiver" ]; then
305 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
306 if [[ 0 -ne ${count} ]]; then
307 echo "zipping ${count} archived files"
308 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
310 echo "No archived files, skipping compressing."
313 echo "No archiver directory, skipping compressing."
316 // Has to be relative to WORKSPACE.
317 archive "${env.OUTPUT_DIR_RELATIVE}/*"
318 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
319 publishHTML target: [
322 alwaysLinkToLastBuild: true,
323 // Has to be relative to WORKSPACE.
324 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
325 reportFiles : 'console-report.html',
326 reportName : 'JDK7 Nightly Build Report'
331 stage ('yetus jdk8 hadoop2 checks') {
338 BASEDIR = "${env.WORKSPACE}/component"
339 TESTS = 'maven,mvninstall,compile,javac,unit,findbugs,htmlout'
340 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
341 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
342 // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
343 // and is needed on branches that do both jdk7 and jdk8
344 SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
347 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
348 sh '''#!/usr/bin/env bash
350 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
351 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
352 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
358 sh '''#!/usr/bin/env bash
360 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
361 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
362 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
363 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
365 sh '''#!/usr/bin/env bash
368 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
369 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
371 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
374 echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop2)/]" >> "${OUTPUT_DIR}/commentfile"
380 stash name: 'hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
381 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
382 // zip surefire reports.
384 if [ -d "${OUTPUT_DIR}/archiver" ]; then
385 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
386 if [[ 0 -ne ${count} ]]; then
387 echo "zipping ${count} archived files"
388 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
390 echo "No archived files, skipping compressing."
393 echo "No archiver directory, skipping compressing."
396 // Has to be relative to WORKSPACE.
397 archive "${env.OUTPUT_DIR_RELATIVE}/*"
398 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
399 publishHTML target: [
402 alwaysLinkToLastBuild: true,
403 // Has to be relative to WORKSPACE.
404 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
405 reportFiles : 'console-report.html',
406 reportName : 'JDK8 Nightly Build Report (Hadoop2)'
411 stage ('yetus jdk8 hadoop3 checks') {
423 BASEDIR = "${env.WORKSPACE}/component"
424 TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
425 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
426 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
427 // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
428 // and is needed on branches that do both jdk7 and jdk8
429 SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
430 // Activates hadoop 3.0 profile in maven runs.
431 HADOOP_PROFILE = '3.0'
434 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
435 sh '''#!/usr/bin/env bash
437 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
438 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
439 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
445 sh '''#!/usr/bin/env bash
447 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
448 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
449 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
450 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
452 sh '''#!/usr/bin/env bash
455 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
456 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
458 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
461 echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop3)/]" >> "${OUTPUT_DIR}/commentfile"
467 stash name: 'hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
468 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
469 // zip surefire reports.
471 if [ -d "${OUTPUT_DIR}/archiver" ]; then
472 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
473 if [[ 0 -ne ${count} ]]; then
474 echo "zipping ${count} archived files"
475 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
477 echo "No archived files, skipping compressing."
480 echo "No archiver directory, skipping compressing."
483 // Has to be relative to WORKSPACE.
484 archive "${env.OUTPUT_DIR_RELATIVE}/*"
485 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
486 publishHTML target: [
489 alwaysLinkToLastBuild: true,
490 // Has to be relative to WORKSPACE.
491 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
492 reportFiles : 'console-report.html',
493 reportName : 'JDK8 Nightly Build Report (Hadoop3)'
498 // This is meant to mimic what a release manager will do to create RCs.
499 // See http://hbase.apache.org/book.html#maven.release
500 stage ('packaging and integration') {
502 maven 'Maven (latest)'
503 // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
504 jdk "JDK 1.8 (latest)"
507 BASEDIR = "${env.WORKSPACE}/component"
511 echo "Setting up directories"
512 rm -rf "output-srctarball" && mkdir "output-srctarball"
513 rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
514 rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
515 rm -rf "hbase-install" && mkdir "hbase-install"
516 rm -rf "hbase-client" && mkdir "hbase-client"
517 rm -rf "hadoop-2" && mkdir "hadoop-2"
518 rm -rf "hadoop-3" && mkdir "hadoop-3"
519 rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
520 rm -rf ".m2-for-src" && mkdir ".m2-for-src"
521 echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
522 echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
524 sh '''#!/usr/bin/env bash
526 rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
527 "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
528 echo "got the following saved stats in 'output-srctarball/machine'"
529 ls -lh "output-srctarball/machine"
532 echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
533 if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
534 --intermediate-file-dir output-srctarball \
535 --unpack-temp-dir unpacked_src_tarball \
536 --maven-m2-initial .m2-for-repo \
537 --maven-m2-src-build .m2-for-src \
538 --clean-source-checkout \
539 "${env.BASEDIR}" ; then
540 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
542 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
546 echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
548 if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
549 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
552 install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
553 tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
554 client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
555 tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
558 echo "Attempting to use run an instance on top of Hadoop 2."
559 sh '''#!/bin/bash -xe
560 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
561 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
562 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
564 --working-dir output-integration/hadoop-2 \
565 --hbase-client-install "hbase-client" \
567 "hadoop-2/bin/hadoop" \
568 hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
569 hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
570 >output-integration/hadoop-2.log 2>&1 ; then
571 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
576 echo "Attempting to use run an instance on top of Hadoop 3."
578 artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
579 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
580 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
582 --working-dir output-integration/hadoop-3 \
583 --hbase-client-install hbase-client \
585 hadoop-3/bin/hadoop \
586 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
587 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
588 >output-integration/hadoop-3.log 2>&1 ; then
589 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
592 echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
593 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
595 --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
596 --working-dir output-integration/hadoop-3-shaded \
597 --hbase-client-install hbase-client \
599 hadoop-3/bin/hadoop \
600 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
601 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
602 >output-integration/hadoop-3-shaded.log 2>&1 ; then
603 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
606 echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
613 stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
614 archive 'output-srctarball/*'
615 archive 'output-srctarball/**/*'
616 archive 'output-integration/*'
617 archive 'output-integration/**/*'
628 unstash 'general-result'
629 unstash 'jdk7-result'
630 unstash 'hadoop2-result'
631 unstash 'hadoop3-result'
632 unstash 'srctarball-result'
634 def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
635 "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
636 "${env.OUTPUT_DIR_RELATIVE_HADOOP2}/commentfile",
637 "${env.OUTPUT_DIR_RELATIVE_HADOOP3}/commentfile",
638 'output-srctarball/commentfile',
639 'output-integration/commentfile']
642 echo currentBuild.result
643 echo currentBuild.durationString
644 def comment = "Results for branch ${env.BRANCH_NAME}\n"
645 comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
646 if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
647 comment += "(/) *{color:green}+1 overall{color}*\n"
649 comment += "(x) *{color:red}-1 overall{color}*\n"
650 // Ideally get the committer our of the change and @ mention them in the per-jira comment
652 comment += "----\ndetails (if available):\n\n"
654 echo "[DEBUG] trying to aggregate step-wise results"
655 comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
656 echo "[INFO] Comment:"
659 echo "[DEBUG] checking to see if feature branch"
660 def jiras = getJirasToComment(env.BRANCH_NAME, [])
661 if (jiras.isEmpty()) {
662 echo "[DEBUG] non-feature branch, checking change messages for jira keys."
663 echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
664 jiras = getJirasToCommentFromChangesets(currentBuild)
666 jiras.each { currentIssue ->
667 jiraComment issueKey: currentIssue, body: comment
669 } catch (Exception exception) {
670 echo "Got exception: ${exception}"
671 echo " ${exception.getStackTrace()}"
677 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
679 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
681 thisBuild.changeSets.each { cs ->
682 cs.getItems().each { change ->
683 CharSequence msg = change.msg
684 echo "change: ${change}"
686 echo " ${change.commitId}"
687 echo " ${change.author}"
689 seenJiras = getJirasToComment(msg, seenJiras)
695 List<String> getJirasToComment(CharSequence source, List<String> seen) {
696 source.eachMatch("HBASE-[0-9]+") { currentIssue ->
697 echo "[DEBUG] found jira key: ${currentIssue}"
698 if (currentIssue in seen) {
699 echo "[DEBUG] already commented on ${currentIssue}."
701 echo "[INFO] commenting on ${currentIssue}."