1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements. See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership. The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License. You may obtain a copy of the License at
9 // http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied. See the License for the
15 // specific language governing permissions and limitations
27 buildDiscarder(logRotator(numToKeepStr: '30'))
28 timeout (time: 9, unit: 'HOURS')
33 YETUS_RELEASE = '0.7.0'
34 // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
35 OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
36 OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
37 OUTPUT_DIR_RELATIVE_HADOOP2 = 'output-jdk8-hadoop2'
38 OUTPUT_DIR_RELATIVE_HADOOP3 = 'output-jdk8-hadoop3'
41 PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
42 PERSONALITY_FILE = 'tools/personality.sh'
43 // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
44 AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
45 WHITESPACE_IGNORE_LIST = '.*/generated/.*'
46 // output from surefire; sadly the archive function in yetus only works on file names.
47 ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
48 // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
49 TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
50 EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/excludes"
53 booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
55 Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
56 booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
59 stage ('scm-checkout') {
66 stage ('thirdparty installs') {
68 stage ('yetus install') {
70 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
71 dir('downloads-yetus') {
72 // can't just do a simple echo or the directory won't be created. :(
73 sh '''#!/usr/bin/env bash
74 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
77 sh '''#!/usr/bin/env bash
79 echo "Ensure we have a copy of Apache Yetus."
80 if [[ true != "${USE_YETUS_PRERELEASE}" ]]; then
81 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
82 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
83 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
85 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
86 --working-dir "${WORKSPACE}/downloads-yetus" \
87 --keys 'https://www.apache.org/dist/yetus/KEYS' \
88 "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
89 "yetus/${YETUS_RELEASE}/yetus-${YETUS_RELEASE}-bin.tar.gz"
90 mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
92 echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
95 YETUS_DIR="${WORKSPACE}/yetus-git"
97 echo "downloading from github"
98 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
100 if [ ! -d "${YETUS_DIR}" ]; then
101 echo "unpacking yetus into '${YETUS_DIR}'"
102 mkdir -p "${YETUS_DIR}"
103 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
106 // Set up the file we need at PERSONALITY_FILE location
108 sh """#!/usr/bin/env bash
110 echo "Downloading Project personality."
111 curl -L -o personality.sh "${env.PROJECT_PERSONALITY}"
114 stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
117 stage ('hadoop 2 cache') {
119 HADOOP2_VERSION="2.7.1"
122 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
123 dir('downloads-hadoop-2') {
124 sh '''#!/usr/bin/env bash
125 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
128 sh '''#!/usr/bin/env bash
130 echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
131 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
132 --working-dir "${WORKSPACE}/downloads-hadoop-2" \
133 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
134 "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
135 "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
137 stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
140 stage ('hadoop 3 cache') {
142 HADOOP3_VERSION="3.0.0"
145 // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
146 dir('downloads-hadoop-3') {
147 sh '''#!/usr/bin/env bash
148 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
151 sh '''#!/usr/bin/env bash
153 echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
154 "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
155 --working-dir "${WORKSPACE}/downloads-hadoop-3" \
156 --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
157 "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
158 "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
160 stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
165 stage ('init health results') {
167 // stash with given name for all tests we might run, so that we can unstash all of them even if
168 // we skip some due to e.g. branch-specific JDK or Hadoop support
169 stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
170 stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
171 stash name: 'hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP2}/doesn't-match"
172 stash name: 'hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP3}/doesn't-match"
173 stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
176 stage ('health checks') {
178 stage ('yetus general check') {
185 BASEDIR = "${env.WORKSPACE}/component"
186 // TODO does hadoopcheck need to be jdk specific?
187 // Should be things that work with multijdk
188 TESTS = 'all,-unit,-findbugs'
189 // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not
190 // doing multijdk there.
191 MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64'
192 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
193 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
196 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
197 sh '''#!/usr/bin/env bash
199 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
200 echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
201 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
204 // since we have a new node definition we need to re-do the scm checkout
208 sh '''#!/usr/bin/env bash
210 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
211 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
212 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
213 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
215 // TODO roll this into the hbase_nightly_yetus script
216 sh '''#!/usr/bin/env bash
219 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
220 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
222 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
225 echo "-- For more information [see general report|${BUILD_URL}/General_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
231 stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
232 // Has to be relative to WORKSPACE.
233 archive "${env.OUTPUT_DIR_RELATIVE}/*"
234 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
235 publishHTML target: [
238 alwaysLinkToLastBuild: true,
239 // Has to be relative to WORKSPACE
240 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
241 reportFiles: 'console-report.html',
242 reportName: 'General Nightly Build Report'
247 stage ('yetus jdk7 checks') {
257 BASEDIR = "${env.WORKSPACE}/component"
258 TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
259 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
260 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
261 // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already.
264 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
265 sh '''#!/usr/bin/env bash
267 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
268 echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
269 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
275 sh '''#!/usr/bin/env bash
277 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
278 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
279 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
280 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
282 sh '''#!/usr/bin/env bash
285 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
286 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
288 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
291 echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
297 stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
298 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
299 // zip surefire reports.
301 if [ -d "${OUTPUT_DIR}/archiver" ]; then
302 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
303 if [[ 0 -ne ${count} ]]; then
304 echo "zipping ${count} archived files"
305 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
307 echo "No archived files, skipping compressing."
310 echo "No archiver directory, skipping compressing."
313 // Has to be relative to WORKSPACE.
314 archive "${env.OUTPUT_DIR_RELATIVE}/*"
315 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
316 publishHTML target: [
319 alwaysLinkToLastBuild: true,
320 // Has to be relative to WORKSPACE.
321 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
322 reportFiles : 'console-report.html',
323 reportName : 'JDK7 Nightly Build Report'
328 stage ('yetus jdk8 hadoop2 checks') {
335 BASEDIR = "${env.WORKSPACE}/component"
336 TESTS = 'maven,mvninstall,compile,javac,unit,findbugs,htmlout'
337 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
338 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
339 // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
340 // and is needed on branches that do both jdk7 and jdk8
341 SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
344 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
345 sh '''#!/usr/bin/env bash
347 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
348 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
349 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
355 sh '''#!/usr/bin/env bash
357 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
358 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
359 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
360 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
362 sh '''#!/usr/bin/env bash
365 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
366 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
368 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
371 echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop2)/]" >> "${OUTPUT_DIR}/commentfile"
377 stash name: 'hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
378 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
379 // zip surefire reports.
381 if [ -d "${OUTPUT_DIR}/archiver" ]; then
382 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
383 if [[ 0 -ne ${count} ]]; then
384 echo "zipping ${count} archived files"
385 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
387 echo "No archived files, skipping compressing."
390 echo "No archiver directory, skipping compressing."
393 // Has to be relative to WORKSPACE.
394 archive "${env.OUTPUT_DIR_RELATIVE}/*"
395 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
396 publishHTML target: [
399 alwaysLinkToLastBuild: true,
400 // Has to be relative to WORKSPACE.
401 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
402 reportFiles : 'console-report.html',
403 reportName : 'JDK8 Nightly Build Report (Hadoop2)'
408 stage ('yetus jdk8 hadoop3 checks') {
420 BASEDIR = "${env.WORKSPACE}/component"
421 TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
422 OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
423 OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
424 // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
425 // and is needed on branches that do both jdk7 and jdk8
426 SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
427 // Activates hadoop 3.0 profile in maven runs.
428 HADOOP_PROFILE = '3.0'
431 // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
432 sh '''#!/usr/bin/env bash
434 rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
435 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
436 echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
442 sh '''#!/usr/bin/env bash
444 rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
445 "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
446 echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
447 ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
449 sh '''#!/usr/bin/env bash
452 if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
453 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
455 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
458 echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop3)/]" >> "${OUTPUT_DIR}/commentfile"
464 stash name: 'hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
465 junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
466 // zip surefire reports.
468 if [ -d "${OUTPUT_DIR}/archiver" ]; then
469 count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
470 if [[ 0 -ne ${count} ]]; then
471 echo "zipping ${count} archived files"
472 zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
474 echo "No archived files, skipping compressing."
477 echo "No archiver directory, skipping compressing."
480 // Has to be relative to WORKSPACE.
481 archive "${env.OUTPUT_DIR_RELATIVE}/*"
482 archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
483 publishHTML target: [
486 alwaysLinkToLastBuild: true,
487 // Has to be relative to WORKSPACE.
488 reportDir : "${env.OUTPUT_DIR_RELATIVE}",
489 reportFiles : 'console-report.html',
490 reportName : 'JDK8 Nightly Build Report (Hadoop3)'
495 // This is meant to mimic what a release manager will do to create RCs.
496 // See http://hbase.apache.org/book.html#maven.release
497 stage ('packaging and integration') {
499 maven 'Maven (latest)'
500 // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
501 jdk "JDK 1.8 (latest)"
504 BASEDIR = "${env.WORKSPACE}/component"
508 echo "Setting up directories"
509 rm -rf "output-srctarball" && mkdir "output-srctarball"
510 rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
511 rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
512 rm -rf "hbase-install" && mkdir "hbase-install"
513 rm -rf "hbase-client" && mkdir "hbase-client"
514 rm -rf "hadoop-2" && mkdir "hadoop-2"
515 rm -rf "hadoop-3" && mkdir "hadoop-3"
516 rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
517 rm -rf ".m2-for-src" && mkdir ".m2-for-src"
518 echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
519 echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
521 sh '''#!/usr/bin/env bash
523 rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
524 "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
525 echo "got the following saved stats in 'output-srctarball/machine'"
526 ls -lh "output-srctarball/machine"
529 echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
530 if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
531 --intermediate-file-dir output-srctarball \
532 --unpack-temp-dir unpacked_src_tarball \
533 --maven-m2-initial .m2-for-repo \
534 --maven-m2-src-build .m2-for-src \
535 --clean-source-checkout \
536 "${env.BASEDIR}" ; then
537 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
539 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
543 echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
545 if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
546 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
549 install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
550 tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
551 client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
552 tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
555 echo "Attempting to use run an instance on top of Hadoop 2."
556 sh '''#!/bin/bash -xe
557 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
558 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
559 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
561 --working-dir output-integration/hadoop-2 \
562 --hbase-client-install "hbase-client" \
564 "hadoop-2/bin/hadoop" \
565 hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
566 hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
567 >output-integration/hadoop-2.log 2>&1 ; then
568 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
573 echo "Attempting to use run an instance on top of Hadoop 3."
575 artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
576 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
577 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
579 --working-dir output-integration/hadoop-3 \
580 --hbase-client-install hbase-client \
582 hadoop-3/bin/hadoop \
583 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
584 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
585 >output-integration/hadoop-3.log 2>&1 ; then
586 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
589 echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
590 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
592 --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
593 --working-dir output-integration/hadoop-3-shaded \
594 --hbase-client-install hbase-client \
596 hadoop-3/bin/hadoop \
597 hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
598 hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
599 >output-integration/hadoop-3-shaded.log 2>&1 ; then
600 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
603 echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
610 stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
611 archive 'output-srctarball/*'
612 archive 'output-srctarball/**/*'
613 archive 'output-integration/*'
614 archive 'output-integration/**/*'
625 unstash 'general-result'
626 unstash 'jdk7-result'
627 unstash 'hadoop2-result'
628 unstash 'hadoop3-result'
629 unstash 'srctarball-result'
631 def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
632 "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
633 "${env.OUTPUT_DIR_RELATIVE_HADOOP2}/commentfile",
634 "${env.OUTPUT_DIR_RELATIVE_HADOOP3}/commentfile",
635 'output-srctarball/commentfile',
636 'output-integration/commentfile']
639 echo currentBuild.result
640 echo currentBuild.durationString
641 def comment = "Results for branch ${env.BRANCH_NAME}\n"
642 comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
643 if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
644 comment += "(/) *{color:green}+1 overall{color}*\n"
646 comment += "(x) *{color:red}-1 overall{color}*\n"
647 // Ideally get the committer our of the change and @ mention them in the per-jira comment
649 comment += "----\ndetails (if available):\n\n"
651 echo "[DEBUG] trying to aggregate step-wise results"
652 comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
653 echo "[INFO] Comment:"
656 echo "[DEBUG] checking to see if feature branch"
657 def jiras = getJirasToComment(env.BRANCH_NAME, [])
658 if (jiras.isEmpty()) {
659 echo "[DEBUG] non-feature branch, checking change messages for jira keys."
660 echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
661 jiras = getJirasToCommentFromChangesets(currentBuild)
663 jiras.each { currentIssue ->
664 jiraComment issueKey: currentIssue, body: comment
666 } catch (Exception exception) {
667 echo "Got exception: ${exception}"
668 echo " ${exception.getStackTrace()}"
674 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
676 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
678 thisBuild.changeSets.each { cs ->
679 cs.getItems().each { change ->
680 CharSequence msg = change.msg
681 echo "change: ${change}"
683 echo " ${change.commitId}"
684 echo " ${change.author}"
686 seenJiras = getJirasToComment(msg, seenJiras)
692 List<String> getJirasToComment(CharSequence source, List<String> seen) {
693 source.eachMatch("HBASE-[0-9]+") { currentIssue ->
694 echo "[DEBUG] found jira key: ${currentIssue}"
695 if (currentIssue in seen) {
696 echo "[DEBUG] already commented on ${currentIssue}."
698 echo "[INFO] commenting on ${currentIssue}."