1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements. See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership. The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License. You may obtain a copy of the License at
9 // http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied. See the License for the
15 // specific language governing permissions and limitations
27 buildDiscarder(logRotator(numToKeepStr: '15'))
28 timeout (time: 9, unit: 'HOURS')
31 disableConcurrentBuilds()
34 YETUS_RELEASE = '0.11.1'
35 // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
36 OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
37 OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
38 OUTPUT_DIR_RELATIVE_HADOOP2 = 'output-jdk8-hadoop2'
39 OUTPUT_DIR_RELATIVE_HADOOP3 = 'output-jdk8-hadoop3'
42 PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
43 PERSONALITY_FILE = 'tools/personality.sh'
44 // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
45 AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
46 WHITESPACE_IGNORE_LIST = '.*/generated/.*'
47 // output from surefire; sadly the archive function in yetus only works on file names.
48 ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
49 // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
50 TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
51 EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/excludes"
54 booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
56 Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
57 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
60 stage ('scm-checkout') {
67 stage ('thirdparty installs') {
69 stage ('yetus install') {
71 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
72 dir('downloads-yetus') {
73 // can't just do a simple echo or the directory won't be created. :(
74 sh '''#!/usr/bin/env bash
75 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
78 sh '''#!/usr/bin/env bash
80 echo "Ensure we have a copy of Apache Yetus."
81 if [[ true != "${USE_YETUS_PRERELEASE}" ]]; then
82 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
83 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
84 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
86 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
87 --working-dir "${WORKSPACE}/downloads-yetus" \
88 --keys 'https://www.apache.org/dist/yetus/KEYS' \
89 "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
90 "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
91 mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
93 echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
96 YETUS_DIR="${WORKSPACE}/yetus-git"
98 echo "downloading from github"
99 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
101 if [ ! -d "${YETUS_DIR}" ]; then
102 echo "unpacking yetus into '${YETUS_DIR}'"
103 mkdir -p "${YETUS_DIR}"
104 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
107 // Set up the file we need at PERSONALITY_FILE location
109 sh """#!/usr/bin/env bash
111 echo "Downloading Project personality."
112 curl -L -o personality.sh "${env.PROJECT_PERSONALITY}"
115 stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
118 stage ('hadoop 2 cache') {
120 HADOOP2_VERSION="2.8.5"
123 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
124 dir('downloads-hadoop-2') {
125 sh '''#!/usr/bin/env bash
126 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
129 sh '''#!/usr/bin/env bash
131 echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
132 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
133 --working-dir "${WORKSPACE}/downloads-hadoop-2" \
134 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
135 "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
136 "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
137 for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
138 echo "Delete stale hadoop 2 cache ${stale}"
142 stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
145 stage ('hadoop 3 cache') {
147 HADOOP3_VERSION="3.1.1"
150 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
151 dir('downloads-hadoop-3') {
152 sh '''#!/usr/bin/env bash
153 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
156 sh '''#!/usr/bin/env bash
158 echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
159 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
160 --working-dir "${WORKSPACE}/downloads-hadoop-3" \
161 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
162 "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
163 "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
164 for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
165 echo "Delete stale hadoop 3 cache ${stale}"
169 stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
174 stage ('init health results') {
176 // stash with given name for all tests we might run, so that we can unstash all of them even if
177 // we skip some due to e.g. branch-specific JDK or Hadoop support
178 stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
179 stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
180 stash name: 'hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP2}/doesn't-match"
181 stash name: 'hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP3}/doesn't-match"
182 stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
185 stage ('health checks') {
187 stage ('yetus general check') {
194 BASEDIR = "${env.WORKSPACE}/component"
195 // TODO does hadoopcheck need to be jdk specific?
196 // Should be things that work with multijdk
197 TESTS = 'all,-unit,-findbugs'
198 // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not
199 // doing multijdk there.
200 MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64'
201 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
202 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
205 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
206 sh '''#!/usr/bin/env bash
208 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
209 echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
210 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
213 // since we have a new node definition we need to re-do the scm checkout
217 sh '''#!/usr/bin/env bash
219 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
220 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
221 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
222 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
224 // TODO roll this into the hbase_nightly_yetus script
225 sh '''#!/usr/bin/env bash
228 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
229 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
231 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
234 echo "-- For more information [see general report|${BUILD_URL}/General_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
240 stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
241 // Has to be relative to WORKSPACE.
242 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
243 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
244 publishHTML target: [
247 alwaysLinkToLastBuild: true,
248 // Has to be relative to WORKSPACE
249 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
250 reportFiles: 'console-report.html',
251 reportName: 'General Nightly Build Report'
256 stage ('yetus jdk7 checks') {
266 BASEDIR = "${env.WORKSPACE}/component"
267 TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
268 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
269 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
270 // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already.
273 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
274 sh '''#!/usr/bin/env bash
276 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
277 echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
278 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
284 sh '''#!/usr/bin/env bash
286 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
287 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
288 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
289 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
291 sh '''#!/usr/bin/env bash
294 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
295 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
297 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
300 echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
306 stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
307 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
308 // zip surefire reports.
310 if [ -d "${OUTPUT_DIR}/archiver" ]; then
311 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
312 if [[ 0 -ne ${count} ]]; then
313 echo "zipping ${count} archived files"
314 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
316 echo "No archived files, skipping compressing."
319 echo "No archiver directory, skipping compressing."
322 // Has to be relative to WORKSPACE.
323 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
324 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
325 publishHTML target: [
328 alwaysLinkToLastBuild: true,
329 // Has to be relative to WORKSPACE.
330 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
331 reportFiles : 'console-report.html',
332 reportName : 'JDK7 Nightly Build Report'
337 stage ('yetus jdk8 hadoop2 checks') {
344 BASEDIR = "${env.WORKSPACE}/component"
345 TESTS = 'maven,mvninstall,compile,javac,unit,findbugs,htmlout'
346 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
347 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
348 // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
349 // and is needed on branches that do both jdk7 and jdk8
350 SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
353 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
354 sh '''#!/usr/bin/env bash
356 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
357 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
358 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
364 sh '''#!/usr/bin/env bash
366 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
367 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
368 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
369 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
371 sh '''#!/usr/bin/env bash
374 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
375 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
377 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
380 echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop2)/]" >> "${OUTPUT_DIR}/commentfile"
386 stash name: 'hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
387 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
388 // zip surefire reports.
390 if [ -d "${OUTPUT_DIR}/archiver" ]; then
391 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
392 if [[ 0 -ne ${count} ]]; then
393 echo "zipping ${count} archived files"
394 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
396 echo "No archived files, skipping compressing."
399 echo "No archiver directory, skipping compressing."
402 // Has to be relative to WORKSPACE.
403 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
404 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
405 publishHTML target: [
408 alwaysLinkToLastBuild: true,
409 // Has to be relative to WORKSPACE.
410 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
411 reportFiles : 'console-report.html',
412 reportName : 'JDK8 Nightly Build Report (Hadoop2)'
417 stage ('yetus jdk8 hadoop3 checks') {
429 BASEDIR = "${env.WORKSPACE}/component"
430 TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
431 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
432 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
433 // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
434 // and is needed on branches that do both jdk7 and jdk8
435 SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
436 // Activates hadoop 3.0 profile in maven runs.
437 HADOOP_PROFILE = '3.0'
440 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
441 sh '''#!/usr/bin/env bash
443 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
444 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
445 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
451 sh '''#!/usr/bin/env bash
453 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
454 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
455 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
456 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
458 sh '''#!/usr/bin/env bash
461 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
462 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
464 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
467 echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop3)/]" >> "${OUTPUT_DIR}/commentfile"
473 stash name: 'hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
474 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
475 // zip surefire reports.
477 if [ -d "${OUTPUT_DIR}/archiver" ]; then
478 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
479 if [[ 0 -ne ${count} ]]; then
480 echo "zipping ${count} archived files"
481 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
483 echo "No archived files, skipping compressing."
486 echo "No archiver directory, skipping compressing."
489 // Has to be relative to WORKSPACE.
490 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
491 archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
492 publishHTML target: [
495 alwaysLinkToLastBuild: true,
496 // Has to be relative to WORKSPACE.
497 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
498 reportFiles : 'console-report.html',
499 reportName : 'JDK8 Nightly Build Report (Hadoop3)'
504 // This is meant to mimic what a release manager will do to create RCs.
505 // See http://hbase.apache.org/book.html#maven.release
506 stage ('packaging and integration') {
508 maven 'Maven (latest)'
509 // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
510 jdk "JDK 1.8 (latest)"
513 BASEDIR = "${env.WORKSPACE}/component"
517 echo "Setting up directories"
518 rm -rf "output-srctarball" && mkdir "output-srctarball"
519 rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
520 rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
521 rm -rf "hbase-install" && mkdir "hbase-install"
522 rm -rf "hbase-client" && mkdir "hbase-client"
523 rm -rf "hadoop-2" && mkdir "hadoop-2"
524 rm -rf "hadoop-3" && mkdir "hadoop-3"
525 rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
526 rm -rf ".m2-for-src" && mkdir ".m2-for-src"
527 echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
528 echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
530 sh '''#!/usr/bin/env bash
532 rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
533 "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
534 echo "got the following saved stats in 'output-srctarball/machine'"
535 ls -lh "output-srctarball/machine"
538 echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
539 if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
540 --intermediate-file-dir output-srctarball \
541 --unpack-temp-dir unpacked_src_tarball \
542 --maven-m2-initial .m2-for-repo \
543 --maven-m2-src-build .m2-for-src \
544 --clean-source-checkout \
545 "${env.BASEDIR}" ; then
546 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
548 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
552 echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
554 if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
555 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
558 install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
559 tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
560 client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
561 tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
564 echo "Attempting to use run an instance on top of Hadoop 2."
565 sh '''#!/bin/bash -xe
566 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
567 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
568 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
570 --working-dir output-integration/hadoop-2 \
571 --hbase-client-install "hbase-client" \
573 "hadoop-2/bin/hadoop" \
574 hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
575 hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
576 hadoop-2/bin/mapred \
577 >output-integration/hadoop-2.log 2>&1 ; then
578 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
583 echo "Attempting to use run an instance on top of Hadoop 3."
585 artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
586 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
587 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
589 --working-dir output-integration/hadoop-3 \
590 --hbase-client-install hbase-client \
592 hadoop-3/bin/hadoop \
593 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
594 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
595 hadoop-3/bin/mapred \
596 >output-integration/hadoop-3.log 2>&1 ; then
597 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
600 echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
601 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
603 --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
604 --working-dir output-integration/hadoop-3-shaded \
605 --hbase-client-install hbase-client \
607 hadoop-3/bin/hadoop \
608 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
609 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
610 hadoop-3/bin/mapred \
611 >output-integration/hadoop-3-shaded.log 2>&1 ; then
612 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
615 echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
622 stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
623 archiveArtifacts artifacts: 'output-srctarball/*'
624 archiveArtifacts artifacts: 'output-srctarball/**/*'
625 archiveArtifacts artifacts: 'output-integration/*'
626 archiveArtifacts artifacts: 'output-integration/**/*'
637 unstash 'general-result'
638 unstash 'jdk7-result'
639 unstash 'hadoop2-result'
640 unstash 'hadoop3-result'
641 unstash 'srctarball-result'
643 def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
644 "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
645 "${env.OUTPUT_DIR_RELATIVE_HADOOP2}/commentfile",
646 "${env.OUTPUT_DIR_RELATIVE_HADOOP3}/commentfile",
647 'output-srctarball/commentfile',
648 'output-integration/commentfile']
651 echo currentBuild.result
652 echo currentBuild.durationString
653 def comment = "Results for branch ${env.BRANCH_NAME}\n"
654 comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
655 if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
656 comment += "(/) *{color:green}+1 overall{color}*\n"
658 comment += "(x) *{color:red}-1 overall{color}*\n"
659 // Ideally get the committer our of the change and @ mention them in the per-jira comment
661 comment += "----\ndetails (if available):\n\n"
663 echo "[DEBUG] trying to aggregate step-wise results"
664 comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
665 echo "[INFO] Comment:"
668 echo "[DEBUG] checking to see if feature branch"
669 def jiras = getJirasToComment(env.BRANCH_NAME, [])
670 if (jiras.isEmpty()) {
671 echo "[DEBUG] non-feature branch, checking change messages for jira keys."
672 echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
673 jiras = getJirasToCommentFromChangesets(currentBuild)
675 jiras.each { currentIssue ->
676 jiraComment issueKey: currentIssue, body: comment
678 } catch (Exception exception) {
679 echo "Got exception: ${exception}"
680 echo " ${exception.getStackTrace()}"
686 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
688 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
690 thisBuild.changeSets.each { cs ->
691 cs.getItems().each { change ->
692 CharSequence msg = change.msg
693 echo "change: ${change}"
695 echo " ${change.commitId}"
696 echo " ${change.author}"
698 seenJiras = getJirasToComment(msg, seenJiras)
704 List<String> getJirasToComment(CharSequence source, List<String> seen) {
705 source.eachMatch("HBASE-[0-9]+") { currentIssue ->
706 echo "[DEBUG] found jira key: ${currentIssue}"
707 if (currentIssue in seen) {
708 echo "[DEBUG] already commented on ${currentIssue}."
710 echo "[INFO] commenting on ${currentIssue}."