HBASE-22518 yetus personality is treating branch-1.4 like earlier branches for hadoop...
[hbase.git] / dev-support / Jenkinsfile
blob5671fc1dec4439d49b789a03b736a001a8b62aad
1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements.  See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership.  The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License.  You may obtain a copy of the License at
8 //
9 //   http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied.  See the License for the
15 // specific language governing permissions and limitations
16 // under the License.
17 pipeline {
18   agent {
19     node {
20       label 'ubuntu'
21     }
22   }
23   triggers {
24     cron('@daily')
25   }
26   options {
27     buildDiscarder(logRotator(numToKeepStr: '30'))
28     timeout (time: 9, unit: 'HOURS')
29     timestamps()
30     skipDefaultCheckout()
31   }
32   environment {
33     YETUS_RELEASE = '0.9.0'
34     // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
35     OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
36     OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
37     OUTPUT_DIR_RELATIVE_HADOOP2 = 'output-jdk8-hadoop2'
38     OUTPUT_DIR_RELATIVE_HADOOP3 = 'output-jdk8-hadoop3'
40     PROJECT = 'hbase'
41     PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
42     PERSONALITY_FILE = 'tools/personality.sh'
43     // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
44     AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
45     WHITESPACE_IGNORE_LIST = '.*/generated/.*'
46     // output from surefire; sadly the archive function in yetus only works on file names.
47     ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
48     // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
49     TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
50     EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/excludes"
51   }
52   parameters {
53     booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
55     Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
56     booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
57   }
58   stages {
59     stage ('scm-checkout') {
60       steps {
61             dir('component') {
62               checkout scm
63             }
64       }
65     }
66     stage ('thirdparty installs') {
67       parallel {
68         stage ('yetus install') {
69           steps {
70             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
71             dir('downloads-yetus') {
72               // can't just do a simple echo or the directory won't be created. :(
73               sh '''#!/usr/bin/env bash
74                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
75 '''
76             }
77             sh  '''#!/usr/bin/env bash
78               set -e
79               echo "Ensure we have a copy of Apache Yetus."
80               if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
81                 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
82                 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
83                 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
84                   rm -rf "${YETUS_DIR}"
85                   "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
86                       --working-dir "${WORKSPACE}/downloads-yetus" \
87                       --keys 'https://www.apache.org/dist/yetus/KEYS' \
88                       "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
89                       "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
90                   mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
91                 else
92                   echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
93                 fi
94               else
95                 YETUS_DIR="${WORKSPACE}/yetus-git"
96                 rm -rf "${YETUS_DIR}"
97                 echo "downloading from github"
98                 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
99               fi
100               if [ ! -d "${YETUS_DIR}" ]; then
101                 echo "unpacking yetus into '${YETUS_DIR}'"
102                 mkdir -p "${YETUS_DIR}"
103                 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
104               fi
105             '''
106             // Set up the file we need at PERSONALITY_FILE location
107             dir ("tools") {
108               sh """#!/usr/bin/env bash
109                 set -e
110                 echo "Downloading Project personality."
111                 curl -L  -o personality.sh "${env.PROJECT_PERSONALITY}"
112               """
113             }
114             stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
115           }
116         }
117         stage ('hadoop 2 cache') {
118           environment {
119             HADOOP2_VERSION="2.8.5"
120           }
121           steps {
122             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
123             dir('downloads-hadoop-2') {
124               sh '''#!/usr/bin/env bash
125                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
127             }
128             sh '''#!/usr/bin/env bash
129               set -e
130               echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
131               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
132                   --working-dir "${WORKSPACE}/downloads-hadoop-2" \
133                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
134                   "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
135                   "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
136               for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
137                 echo "Delete stale hadoop 2 cache ${stale}"
138                 rm -rf $stale
139               done
140             '''
141             stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
142           }
143         }
144         stage ('hadoop 3 cache') {
145           environment {
146             HADOOP3_VERSION="3.1.1"
147           }
148           steps {
149             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
150             dir('downloads-hadoop-3') {
151               sh '''#!/usr/bin/env bash
152                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
154             }
155             sh '''#!/usr/bin/env bash
156               set -e
157               echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
158               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
159                   --working-dir "${WORKSPACE}/downloads-hadoop-3" \
160                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
161                   "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
162                   "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
163               for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
164                 echo "Delete stale hadoop 3 cache ${stale}"
165                 rm -rf $stale
166               done
167             '''
168             stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
169           }
170         }
171       }
172     }
173     stage ('init health results') {
174       steps {
175         // stash with given name for all tests we might run, so that we can unstash all of them even if
176         // we skip some due to e.g. branch-specific JDK or Hadoop support
177         stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
178         stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
179         stash name: 'hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP2}/doesn't-match"
180         stash name: 'hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP3}/doesn't-match"
181         stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
182       }
183     }
184     stage ('health checks') {
185       parallel {
186         stage ('yetus general check') {
187           agent {
188             node {
189               label 'Hadoop'
190             }
191           }
192           environment {
193             BASEDIR = "${env.WORKSPACE}/component"
194             // TODO does hadoopcheck need to be jdk specific?
195             // Should be things that work with multijdk
196             TESTS = 'all,-unit,-findbugs'
197             // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not
198             // doing multijdk there.
199             MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64'
200             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
201             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
202           }
203           steps {
204             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
205             sh '''#!/usr/bin/env bash
206               set -e
207               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
208               echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
209               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
211             unstash 'yetus'
212             // since we have a new node definition we need to re-do the scm checkout
213             dir('component') {
214               checkout scm
215             }
216             sh '''#!/usr/bin/env bash
217               set -e
218               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
219               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
220               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
221               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
223             // TODO roll this into the hbase_nightly_yetus script
224             sh '''#!/usr/bin/env bash
225               set -e
226               declare -i status=0
227               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
228                 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
229               else
230                 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
231                 status=1
232               fi
233               echo "-- For more information [see general report|${BUILD_URL}/General_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
234               exit "${status}"
235             '''
236           }
237           post {
238             always {
239               stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
240               // Has to be relative to WORKSPACE.
241               archive "${env.OUTPUT_DIR_RELATIVE}/*"
242               archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
243               publishHTML target: [
244                 allowMissing: true,
245                 keepAll: true,
246                 alwaysLinkToLastBuild: true,
247                 // Has to be relative to WORKSPACE
248                 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
249                 reportFiles: 'console-report.html',
250                 reportName: 'General Nightly Build Report'
251               ]
252             }
253           }
254         }
255         stage ('yetus jdk7 checks') {
256           agent {
257             node {
258               label 'Hadoop'
259             }
260           }
261           when {
262             branch 'branch-1*'
263           }
264           environment {
265             BASEDIR = "${env.WORKSPACE}/component"
266             TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
267             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
268             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
269             // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already.
270           }
271           steps {
272             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
273             sh '''#!/usr/bin/env bash
274               set -e
275               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
276               echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
277               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
279             unstash 'yetus'
280             dir('component') {
281               checkout scm
282             }
283             sh '''#!/usr/bin/env bash
284               set -e
285               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
286               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
287               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
288               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
290             sh '''#!/usr/bin/env bash
291               set -e
292               declare -i status=0
293               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
294                 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
295               else
296                 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
297                 status=1
298               fi
299               echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
300               exit "${status}"
301             '''
302           }
303           post {
304             always {
305               stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
306               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
307               // zip surefire reports.
308               sh '''#!/bin/bash -e
309                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
310                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
311                   if [[ 0 -ne ${count} ]]; then
312                     echo "zipping ${count} archived files"
313                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
314                   else
315                     echo "No archived files, skipping compressing."
316                   fi
317                 else
318                   echo "No archiver directory, skipping compressing."
319                 fi
321               // Has to be relative to WORKSPACE.
322               archive "${env.OUTPUT_DIR_RELATIVE}/*"
323               archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
324               publishHTML target: [
325                 allowMissing         : true,
326                 keepAll              : true,
327                 alwaysLinkToLastBuild: true,
328                 // Has to be relative to WORKSPACE.
329                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
330                 reportFiles          : 'console-report.html',
331                 reportName           : 'JDK7 Nightly Build Report'
332               ]
333             }
334           }
335         }
336         stage ('yetus jdk8 hadoop2 checks') {
337           agent {
338             node {
339               label 'Hadoop'
340             }
341           }
342           environment {
343             BASEDIR = "${env.WORKSPACE}/component"
344             TESTS = 'maven,mvninstall,compile,javac,unit,findbugs,htmlout'
345             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
346             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
347             // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
348             // and is needed on branches that do both jdk7 and jdk8
349             SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
350           }
351           steps {
352             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
353             sh '''#!/usr/bin/env bash
354               set -e
355               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
356               echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
357               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
359             unstash 'yetus'
360             dir('component') {
361               checkout scm
362             }
363             sh '''#!/usr/bin/env bash
364               set -e
365               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
366               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
367               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
368               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
370             sh '''#!/usr/bin/env bash
371               set -e
372               declare -i status=0
373               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
374                 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
375               else
376                 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
377                 status=1
378               fi
379               echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop2)/]" >> "${OUTPUT_DIR}/commentfile"
380               exit "${status}"
381             '''
382           }
383           post {
384             always {
385               stash name: 'hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
386               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
387               // zip surefire reports.
388               sh '''#!/bin/bash -e
389                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
390                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
391                   if [[ 0 -ne ${count} ]]; then
392                     echo "zipping ${count} archived files"
393                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
394                   else
395                     echo "No archived files, skipping compressing."
396                   fi
397                 else
398                   echo "No archiver directory, skipping compressing."
399                 fi
401               // Has to be relative to WORKSPACE.
402               archive "${env.OUTPUT_DIR_RELATIVE}/*"
403               archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
404               publishHTML target: [
405                 allowMissing         : true,
406                 keepAll              : true,
407                 alwaysLinkToLastBuild: true,
408                 // Has to be relative to WORKSPACE.
409                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
410                 reportFiles          : 'console-report.html',
411                 reportName           : 'JDK8 Nightly Build Report (Hadoop2)'
412               ]
413             }
414           }
415         }
416         stage ('yetus jdk8 hadoop3 checks') {
417           agent {
418             node {
419               label 'Hadoop'
420             }
421           }
422           when {
423             not {
424               branch 'branch-1*'
425             }
426           }
427           environment {
428             BASEDIR = "${env.WORKSPACE}/component"
429             TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
430             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
431             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
432             // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
433             // and is needed on branches that do both jdk7 and jdk8
434             SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
435             // Activates hadoop 3.0 profile in maven runs.
436             HADOOP_PROFILE = '3.0'
437           }
438           steps {
439             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
440             sh '''#!/usr/bin/env bash
441               set -e
442               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
443               echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
444               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
446             unstash 'yetus'
447             dir('component') {
448               checkout scm
449             }
450             sh '''#!/usr/bin/env bash
451               set -e
452               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
453               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
454               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
455               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
457             sh '''#!/usr/bin/env bash
458               set -e
459               declare -i status=0
460               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
461                 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
462               else
463                 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
464                 status=1
465               fi
466               echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop3)/]" >> "${OUTPUT_DIR}/commentfile"
467               exit "${status}"
468             '''
469           }
470           post {
471             always {
472               stash name: 'hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
473               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
474               // zip surefire reports.
475               sh '''#!/bin/bash -e
476                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
477                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
478                   if [[ 0 -ne ${count} ]]; then
479                     echo "zipping ${count} archived files"
480                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
481                   else
482                     echo "No archived files, skipping compressing."
483                   fi
484                 else
485                   echo "No archiver directory, skipping compressing."
486                 fi
488               // Has to be relative to WORKSPACE.
489               archive "${env.OUTPUT_DIR_RELATIVE}/*"
490               archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
491               publishHTML target: [
492                 allowMissing         : true,
493                 keepAll              : true,
494                 alwaysLinkToLastBuild: true,
495                 // Has to be relative to WORKSPACE.
496                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
497                 reportFiles          : 'console-report.html',
498                 reportName           : 'JDK8 Nightly Build Report (Hadoop3)'
499               ]
500             }
501           }
502         }
503         // This is meant to mimic what a release manager will do to create RCs.
504         // See http://hbase.apache.org/book.html#maven.release
505         stage ('packaging and integration') {
506           tools {
507             maven 'Maven (latest)'
508             // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
509             jdk "JDK 1.8 (latest)"
510           }
511           environment {
512             BASEDIR = "${env.WORKSPACE}/component"
513           }
514           steps {
515             sh '''#!/bin/bash -e
516               echo "Setting up directories"
517               rm -rf "output-srctarball" && mkdir "output-srctarball"
518               rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
519               rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
520               rm -rf "hbase-install" && mkdir "hbase-install"
521               rm -rf "hbase-client" && mkdir "hbase-client"
522               rm -rf "hadoop-2" && mkdir "hadoop-2"
523               rm -rf "hadoop-3" && mkdir "hadoop-3"
524               rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
525               rm -rf ".m2-for-src" && mkdir ".m2-for-src"
526               echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
527               echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
529             sh '''#!/usr/bin/env bash
530               set -e
531               rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
532               "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
533               echo "got the following saved stats in 'output-srctarball/machine'"
534               ls -lh "output-srctarball/machine"
536             sh """#!/bin/bash -e
537               echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
538               if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
539                   --intermediate-file-dir output-srctarball \
540                   --unpack-temp-dir unpacked_src_tarball \
541                   --maven-m2-initial .m2-for-repo \
542                   --maven-m2-src-build .m2-for-src \
543                   --clean-source-checkout \
544                   "${env.BASEDIR}" ; then
545                 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
546               else
547                 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
548                 exit 1
549               fi
551             echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
552             sh '''#!/bin/bash -e
553               if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
554                 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
555                 exit 1
556               fi
557               install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
558               tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
559               client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
560               tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
562             unstash 'hadoop-2'
563             echo "Attempting to use run an instance on top of Hadoop 2."
564             sh '''#!/bin/bash -xe
565               artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
566               tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
567               if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
568                   --single-process \
569                   --working-dir output-integration/hadoop-2 \
570                   --hbase-client-install "hbase-client" \
571                   "hbase-install" \
572                   "hadoop-2/bin/hadoop" \
573                   hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
574                   hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
575                   hadoop-2/bin/mapred \
576                   >output-integration/hadoop-2.log 2>&1 ; then
577                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
578                 exit 2
579               fi
581             unstash 'hadoop-3'
582             echo "Attempting to use run an instance on top of Hadoop 3."
583             sh '''#!/bin/bash -e
584               artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
585               tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
586               if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
587                   --single-process \
588                   --working-dir output-integration/hadoop-3 \
589                   --hbase-client-install hbase-client \
590                   hbase-install \
591                   hadoop-3/bin/hadoop \
592                   hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
593                   hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
594                   hadoop-3/bin/mapred \
595                   >output-integration/hadoop-3.log 2>&1 ; then
596                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
597                 exit 2
598               fi
599               echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
600               if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
601                   --single-process \
602                   --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
603                   --working-dir output-integration/hadoop-3-shaded \
604                   --hbase-client-install hbase-client \
605                   hbase-install \
606                   hadoop-3/bin/hadoop \
607                   hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
608                   hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
609                   hadoop-3/bin/mapred \
610                   >output-integration/hadoop-3-shaded.log 2>&1 ; then
611                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
612                 exit 2
613               fi
614               echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
618           }
619           post {
620             always {
621               stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
622               archive 'output-srctarball/*'
623               archive 'output-srctarball/**/*'
624               archive 'output-integration/*'
625               archive 'output-integration/**/*'
626             }
627           }
628         }
629       }
630     }
631   }
632   post {
633     always {
634       script {
635          try {
636            unstash 'general-result'
637            unstash 'jdk7-result'
638            unstash 'hadoop2-result'
639            unstash 'hadoop3-result'
640            unstash 'srctarball-result'
641            sh "printenv"
642            def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
643                           "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
644                           "${env.OUTPUT_DIR_RELATIVE_HADOOP2}/commentfile",
645                           "${env.OUTPUT_DIR_RELATIVE_HADOOP3}/commentfile",
646                           'output-srctarball/commentfile',
647                           'output-integration/commentfile']
648            echo env.BRANCH_NAME
649            echo env.BUILD_URL
650            echo currentBuild.result
651            echo currentBuild.durationString
652            def comment = "Results for branch ${env.BRANCH_NAME}\n"
653            comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
654            if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
655               comment += "(/) *{color:green}+1 overall{color}*\n"
656            } else {
657               comment += "(x) *{color:red}-1 overall{color}*\n"
658               // Ideally get the committer our of the change and @ mention them in the per-jira comment
659            }
660            comment += "----\ndetails (if available):\n\n"
661            echo ""
662            echo "[DEBUG] trying to aggregate step-wise results"
663            comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
664            echo "[INFO] Comment:"
665            echo comment
666            echo ""
667            echo "[DEBUG] checking to see if feature branch"
668            def jiras = getJirasToComment(env.BRANCH_NAME, [])
669            if (jiras.isEmpty()) {
670              echo "[DEBUG] non-feature branch, checking change messages for jira keys."
671              echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
672              jiras = getJirasToCommentFromChangesets(currentBuild)
673            }
674            jiras.each { currentIssue ->
675              jiraComment issueKey: currentIssue, body: comment
676            }
677         } catch (Exception exception) {
678           echo "Got exception: ${exception}"
679           echo "    ${exception.getStackTrace()}"
680         }
681       }
682     }
683   }
685 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
686 @NonCPS
687 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
688   def seenJiras = []
689   thisBuild.changeSets.each { cs ->
690     cs.getItems().each { change ->
691       CharSequence msg = change.msg
692       echo "change: ${change}"
693       echo "     ${msg}"
694       echo "     ${change.commitId}"
695       echo "     ${change.author}"
696       echo ""
697       seenJiras = getJirasToComment(msg, seenJiras)
698     }
699   }
700   return seenJiras
702 @NonCPS
703 List<String> getJirasToComment(CharSequence source, List<String> seen) {
704   source.eachMatch("HBASE-[0-9]+") { currentIssue ->
705     echo "[DEBUG] found jira key: ${currentIssue}"
706     if (currentIssue in seen) {
707       echo "[DEBUG] already commented on ${currentIssue}."
708     } else {
709       echo "[INFO] commenting on ${currentIssue}."
710       seen << currentIssue
711     }
712   }
713   return seen