HBASE-26718 HFileArchiver can remove referenced StoreFiles from the archive (#4274)
[hbase.git] / dev-support / Jenkinsfile
blobd45d22c0c172e911719d1c2c026ab2fd236dd0be
1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements.  See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership.  The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License.  You may obtain a copy of the License at
8 //
9 //   http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied.  See the License for the
15 // specific language governing permissions and limitations
16 // under the License.
17 pipeline {
18   agent {
19     node {
20       label 'hbase'
21     }
22   }
23   triggers {
24     pollSCM('@daily')
25   }
26   options {
27     buildDiscarder(logRotator(numToKeepStr: '20'))
28     timeout (time: 16, unit: 'HOURS')
29     timestamps()
30     skipDefaultCheckout()
31     disableConcurrentBuilds()
32   }
33   environment {
34     YETUS_RELEASE = '0.12.0'
35     // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
36     OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
37     OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
38     OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
39     OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
40     OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
42     PROJECT = 'hbase'
43     PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
44     PERSONALITY_FILE = 'tools/personality.sh'
45     // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
46     AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
47     WHITESPACE_IGNORE_LIST = '.*/generated/.*'
48     // output from surefire; sadly the archive function in yetus only works on file names.
49     ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
50     // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
51     TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
52     EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/output/excludes"
53       // TODO does hadoopcheck need to be jdk specific?
54     SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
55     DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
56     ASF_NIGHTLIES = 'https://nightlies.apache.org'
57     ASF_NIGHTLIES_BASE_ORI = "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}"
58     ASF_NIGHTLIES_BASE = "${ASF_NIGHTLIES_BASE_ORI.replaceAll(' ', '%20')}"
59   }
60   parameters {
61     booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
63     Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
64     booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
65   }
66   stages {
67     stage ('scm-checkout') {
68       steps {
69             dir('component') {
70               checkout scm
71             }
72       }
73     }
74     stage ('thirdparty installs') {
75       parallel {
76         stage ('yetus install') {
77           steps {
78             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
79             dir('downloads-yetus') {
80               // can't just do a simple echo or the directory won't be created. :(
81               sh '''#!/usr/bin/env bash
82                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
83 '''
84             }
85             sh  '''#!/usr/bin/env bash
86               set -e
87               echo "Ensure we have a copy of Apache Yetus."
88               if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
89                 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
90                 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
91                 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
92                   rm -rf "${YETUS_DIR}"
93                   "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
94                       --working-dir "${WORKSPACE}/downloads-yetus" \
95                       --keys 'https://www.apache.org/dist/yetus/KEYS' \
96                       --verify-tar-gz \
97                       "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
98                       "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
99                   mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
100                 else
101                   echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
102                 fi
103               else
104                 YETUS_DIR="${WORKSPACE}/yetus-git"
105                 rm -rf "${YETUS_DIR}"
106                 echo "downloading from github"
107                 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
108               fi
109               if [ ! -d "${YETUS_DIR}" ]; then
110                 echo "unpacking yetus into '${YETUS_DIR}'"
111                 mkdir -p "${YETUS_DIR}"
112                 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
113               fi
114             '''
115             // Set up the file we need at PERSONALITY_FILE location
116             dir ("tools") {
117               sh """#!/usr/bin/env bash
118                 set -e
119                 echo "Downloading Project personality from ${env.PROJECT_PERSONALITY}"
120                 curl -L  -o personality.sh "${env.PROJECT_PERSONALITY}"
121               """
122             }
123             stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
124           }
125         }
126         stage ('hadoop 2 cache') {
127           environment {
128             HADOOP2_VERSION="2.10.0"
129           }
130           steps {
131             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
132             dir('downloads-hadoop-2') {
133               sh '''#!/usr/bin/env bash
134                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
136             }
137             sh '''#!/usr/bin/env bash
138               set -e
139               echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
140               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
141                   --working-dir "${WORKSPACE}/downloads-hadoop-2" \
142                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
143                   --verify-tar-gz \
144                   "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
145                   "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
146               for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
147                 echo "Delete stale hadoop 2 cache ${stale}"
148                 rm -rf $stale
149               done
150             '''
151             stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
152           }
153         }
154         stage ('hadoop 3 cache') {
155           environment {
156             HADOOP3_VERSION="3.1.1"
157           }
158           steps {
159             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
160             dir('downloads-hadoop-3') {
161               sh '''#!/usr/bin/env bash
162                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
164             }
165             sh '''#!/usr/bin/env bash
166               set -e
167               echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
168               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
169                   --working-dir "${WORKSPACE}/downloads-hadoop-3" \
170                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
171                   --verify-tar-gz \
172                   "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
173                   "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
174               for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
175                 echo "Delete stale hadoop 3 cache ${stale}"
176                 rm -rf $stale
177               done
178             '''
179             stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
180           }
181         }
182       }
183     }
184     stage ('init health results') {
185       steps {
186         // stash with given name for all tests we might run, so that we can unstash all of them even if
187         // we skip some due to e.g. branch-specific JDK or Hadoop support
188         stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
189         stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
190         stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
191         stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
192         stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
193         stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
194       }
195     }
196     stage ('health checks') {
197       parallel {
198         stage ('yetus general check') {
199           agent {
200             node {
201               label 'hbase'
202             }
203           }
204           environment {
205             BASEDIR = "${env.WORKSPACE}/component"
206             TESTS = "${env.SHALLOW_CHECKS}"
207             SET_JAVA_HOME = '/usr/lib/jvm/java-8'
208             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
209             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
210             ASF_NIGHTLIES_GENERAL_CHECK_BASE="${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}"
211           }
212           steps {
213             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
214             sh '''#!/usr/bin/env bash
215               set -e
216               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
217               echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
218               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
219             '''
220             unstash 'yetus'
221             // since we have a new node definition we need to re-do the scm checkout
222             dir('component') {
223               checkout scm
224             }
225             sh '''#!/usr/bin/env bash
226               set -e
227               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
228               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
229               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
230               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
231             '''
232             // TODO roll this into the hbase_nightly_yetus script
233             script {
234               def ret = sh(
235                 returnStatus: true,
236                 script: '''#!/usr/bin/env bash
237                   set -e
238                   declare -i status=0
239                   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
240                     echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
241                   else
242                     echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
243                     status=1
244                   fi
245                   echo "-- For more information [see general report|${BUILD_URL}General_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
246                   exit "${status}"
247                 '''
248               )
249               if (ret != 0) {
250                 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
251                 // test output. See HBASE-26339 for more details.
252                 currentBuild.result = 'UNSTABLE'
253               }
254             }
255           }
256           post {
257             always {
258               stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
259               sshPublisher(publishers: [
260                 sshPublisherDesc(configName: 'Nightlies',
261                   transfers: [
262                     sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
263                       sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/*-site/*,${env.OUTPUT_DIR_RELATIVE}/*-site/**/*"
264                     )
265                   ]
266                 )
267               ])
268               sh '''#!/bin/bash -e
269               if [ -d "${OUTPUT_DIR}/branch-site" ]; then
270                 echo "Remove ${OUTPUT_DIR}/branch-site for saving space"
271                 rm -rf "${OUTPUT_DIR}/branch-site"
272                 python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/branch-site" > "${OUTPUT_DIR}/branch-site.html"
273               else
274                 echo "No branch-site, skipping"
275               fi
276               if [ -d "${OUTPUT_DIR}/patch-site" ]; then
277                 echo "Remove ${OUTPUT_DIR}/patch-site for saving space"
278                 rm -rf "${OUTPUT_DIR}/patch-site"
279                 python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/patch-site" > "${OUTPUT_DIR}/patch-site.html"
280               else
281                 echo "No patch-site, skipping"
282               fi
283               '''
284               // Has to be relative to WORKSPACE.
285               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
286               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
287               publishHTML target: [
288                 allowMissing: true,
289                 keepAll: true,
290                 alwaysLinkToLastBuild: true,
291                 // Has to be relative to WORKSPACE
292                 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
293                 reportFiles: 'console-report.html',
294                 reportName: 'General Nightly Build Report'
295               ]
296             }
297           }
298         }
299         stage ('yetus jdk7 checks') {
300           agent {
301             node {
302               label 'hbase'
303             }
304           }
305           when {
306             branch 'branch-1*'
307           }
308           environment {
309             BASEDIR = "${env.WORKSPACE}/component"
310             TESTS = "${env.DEEP_CHECKS}"
311             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
312             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
313             SET_JAVA_HOME = "/usr/lib/jvm/java-7"
314           }
315           steps {
316             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
317             sh '''#!/usr/bin/env bash
318               set -e
319               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
320               echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
321               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
322             '''
323             unstash 'yetus'
324             dir('component') {
325               checkout scm
326             }
327             sh '''#!/usr/bin/env bash
328               set -e
329               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
330               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
331               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
332               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
333             '''
334             script {
335               def ret = sh(
336                 returnStatus: true,
337                 script: '''#!/usr/bin/env bash
338                   set -e
339                   declare -i status=0
340                   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
341                     echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
342                   else
343                     echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
344                     status=1
345                   fi
346                   echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
347                   exit "${status}"
348                 '''
349               )
350               if (ret != 0) {
351                 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
352                 // test output. See HBASE-26339 for more details.
353                 currentBuild.result = 'UNSTABLE'
354               }
355             }
356           }
357           post {
358             always {
359               stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
360               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
361               // zip surefire reports.
362               sh '''#!/bin/bash -e
363                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
364                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
365                   if [[ 0 -ne ${count} ]]; then
366                     echo "zipping ${count} archived files"
367                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
368                   else
369                     echo "No archived files, skipping compressing."
370                   fi
371                 else
372                   echo "No archiver directory, skipping compressing."
373                 fi
375               sshPublisher(publishers: [
376                 sshPublisherDesc(configName: 'Nightlies',
377                   transfers: [
378                     sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
379                       sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
380                     )
381                   ]
382                 )
383               ])
384               // remove the big test logs zip file, store the nightlies url in test_logs.html
385               sh '''#!/bin/bash -e
386                 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
387                   echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
388                   rm -rf "${OUTPUT_DIR}/test_logs.zip"
389                   python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
390                 else
391                   echo "No test_logs.zip, skipping"
392                 fi
394               // Has to be relative to WORKSPACE.
395               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
396               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
397               publishHTML target: [
398                 allowMissing         : true,
399                 keepAll              : true,
400                 alwaysLinkToLastBuild: true,
401                 // Has to be relative to WORKSPACE.
402                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
403                 reportFiles          : 'console-report.html',
404                 reportName           : 'JDK7 Nightly Build Report'
405               ]
406             }
407           }
408         }
409         stage ('yetus jdk8 hadoop2 checks') {
410           agent {
411             node {
412               label 'hbase'
413             }
414           }
415           when {
416             anyOf { branch 'branch-1*'; branch 'branch-2*' }
417           }
418           environment {
419             BASEDIR = "${env.WORKSPACE}/component"
420             TESTS = "${env.DEEP_CHECKS}"
421             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
422             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
423             SET_JAVA_HOME = '/usr/lib/jvm/java-8'
424           }
425           steps {
426             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
427             sh '''#!/usr/bin/env bash
428               set -e
429               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
430               echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
431               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
432             '''
433             unstash 'yetus'
434             dir('component') {
435               checkout scm
436             }
437             sh '''#!/usr/bin/env bash
438               set -e
439               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
440               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
441               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
442               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
443             '''
444             script {
445               def ret = sh(
446                 returnStatus: true,
447                 script: '''#!/usr/bin/env bash
448                   set -e
449                   declare -i status=0
450                   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
451                     echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
452                   else
453                     echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
454                     status=1
455                   fi
456                   echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop2_29/]" >> "${OUTPUT_DIR}/commentfile"
457                   exit "${status}"
458                 '''
459               )
460               if (ret != 0) {
461                 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
462                 // test output. See HBASE-26339 for more details.
463                 currentBuild.result = 'UNSTABLE'
464               }
465             }
466           }
467           post {
468             always {
469               stash name: 'jdk8-hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
470               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
471               // zip surefire reports.
472               sh '''#!/bin/bash -e
473                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
474                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
475                   if [[ 0 -ne ${count} ]]; then
476                     echo "zipping ${count} archived files"
477                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
478                   else
479                     echo "No archived files, skipping compressing."
480                   fi
481                 else
482                   echo "No archiver directory, skipping compressing."
483                 fi
485               sshPublisher(publishers: [
486                 sshPublisherDesc(configName: 'Nightlies',
487                   transfers: [
488                     sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
489                       sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
490                     )
491                   ]
492                 )
493               ])
494               // remove the big test logs zip file, store the nightlies url in test_logs.html
495               sh '''#!/bin/bash -e
496                 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
497                   echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
498                   rm -rf "${OUTPUT_DIR}/test_logs.zip"
499                   python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
500                 else
501                   echo "No test_logs.zip, skipping"
502                 fi
504               // Has to be relative to WORKSPACE.
505               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
506               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
507               publishHTML target: [
508                 allowMissing         : true,
509                 keepAll              : true,
510                 alwaysLinkToLastBuild: true,
511                 // Has to be relative to WORKSPACE.
512                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
513                 reportFiles          : 'console-report.html',
514                 reportName           : 'JDK8 Nightly Build Report (Hadoop2)'
515               ]
516             }
517           }
518         }
519         stage ('yetus jdk8 hadoop3 checks') {
520           agent {
521             node {
522               label 'hbase'
523             }
524           }
525           when {
526             not {
527               branch 'branch-1*'
528             }
529           }
530           environment {
531             BASEDIR = "${env.WORKSPACE}/component"
532             TESTS = "${env.DEEP_CHECKS}"
533             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
534             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
535             SET_JAVA_HOME = '/usr/lib/jvm/java-8'
536             // Activates hadoop 3.0 profile in maven runs.
537             HADOOP_PROFILE = '3.0'
538           }
539           steps {
540             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
541             sh '''#!/usr/bin/env bash
542               set -e
543               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
544               echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
545               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
546             '''
547             unstash 'yetus'
548             dir('component') {
549               checkout scm
550             }
551             sh '''#!/usr/bin/env bash
552               set -e
553               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
554               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
555               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
556               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
557             '''
558             script {
559               def ret = sh(
560                 returnStatus: true,
561                 script: '''#!/usr/bin/env bash
562                   set -e
563                   declare -i status=0
564                   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
565                     echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
566                   else
567                     echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
568                     status=1
569                   fi
570                   echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
571                   exit "${status}"
572                 '''
573               )
574               if (ret != 0) {
575                 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
576                 // test output. See HBASE-26339 for more details.
577                 currentBuild.result = 'UNSTABLE'
578               }
579             }
580           }
581           post {
582             always {
583               stash name: 'jdk8-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
584               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
585               // zip surefire reports.
586               sh '''#!/bin/bash -e
587                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
588                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
589                   if [[ 0 -ne ${count} ]]; then
590                     echo "zipping ${count} archived files"
591                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
592                   else
593                     echo "No archived files, skipping compressing."
594                   fi
595                 else
596                   echo "No archiver directory, skipping compressing."
597                 fi
599               sshPublisher(publishers: [
600                 sshPublisherDesc(configName: 'Nightlies',
601                   transfers: [
602                     sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
603                       sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
604                     )
605                   ]
606                 )
607               ])
608               // remove the big test logs zip file, store the nightlies url in test_logs.html
609               sh '''#!/bin/bash -e
610                 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
611                   echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
612                   rm -rf "${OUTPUT_DIR}/test_logs.zip"
613                   python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
614                 else
615                   echo "No test_logs.zip, skipping"
616                 fi
618               // Has to be relative to WORKSPACE.
619               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
620               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
621               publishHTML target: [
622                 allowMissing         : true,
623                 keepAll              : true,
624                 alwaysLinkToLastBuild: true,
625                 // Has to be relative to WORKSPACE.
626                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
627                 reportFiles          : 'console-report.html',
628                 reportName           : 'JDK8 Nightly Build Report (Hadoop3)'
629               ]
630             }
631           }
632         }
633         stage ('yetus jdk11 hadoop3 checks') {
634           agent {
635             node {
636               label 'hbase'
637             }
638           }
639           when {
640             not {
641               branch 'branch-1*'
642             }
643           }
644           environment {
645             BASEDIR = "${env.WORKSPACE}/component"
646             TESTS = "${env.DEEP_CHECKS}"
647             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
648             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
649             SET_JAVA_HOME = "/usr/lib/jvm/java-11"
650             // Activates hadoop 3.0 profile in maven runs.
651             HADOOP_PROFILE = '3.0'
652             // ErrorProne is broken on JDK11, see HBASE-23894
653             SKIP_ERROR_PRONE = 'true'
654           }
655           steps {
656             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
657             sh '''#!/usr/bin/env bash
658               set -e
659               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
660               echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
661               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
662             '''
663             unstash 'yetus'
664             dir('component') {
665               checkout scm
666             }
667             sh '''#!/usr/bin/env bash
668               set -e
669               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
670               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
671               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
672               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
673             '''
674             script {
675               def ret = sh(
676                 returnStatus: true,
677                 script: '''#!/usr/bin/env bash
678                   set -e
679                   declare -i status=0
680                   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
681                     echo '(/) {color:green}+1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
682                   else
683                     echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
684                     status=1
685                   fi
686                   echo "-- For more information [see jdk11 report|${BUILD_URL}JDK11_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
687                   exit "${status}"
688                 '''
689               )
690               if (ret != 0) {
691                 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
692                 // test output. See HBASE-26339 for more details.
693                 currentBuild.result = 'UNSTABLE'
694               }
695             }
696           }
697           post {
698             always {
699               stash name: 'jdk11-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
700               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
701               // zip surefire reports.
702               sh '''#!/bin/bash -e
703                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
704                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
705                   if [[ 0 -ne ${count} ]]; then
706                     echo "zipping ${count} archived files"
707                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
708                   else
709                     echo "No archived files, skipping compressing."
710                   fi
711                 else
712                   echo "No archiver directory, skipping compressing."
713                 fi
714               '''
715               sshPublisher(publishers: [
716                 sshPublisherDesc(configName: 'Nightlies',
717                   transfers: [
718                     sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
719                       sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
720                     )
721                   ]
722                 )
723               ])
724               // remove the big test logs zip file, store the nightlies url in test_logs.html
725               sh '''#!/bin/bash -e
726                 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
727                   echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
728                   rm -rf "${OUTPUT_DIR}/test_logs.zip"
729                   python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
730                 else
731                   echo "No test_logs.zip, skipping"
732                 fi
733               '''
734               // Has to be relative to WORKSPACE.
735               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
736               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
737               publishHTML target: [
738                 allowMissing         : true,
739                 keepAll              : true,
740                 alwaysLinkToLastBuild: true,
741                 // Has to be relative to WORKSPACE.
742                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
743                 reportFiles          : 'console-report.html',
744                 reportName           : 'JDK11 Nightly Build Report (Hadoop3)'
745               ]
746             }
747           }
748         }
749         // This is meant to mimic what a release manager will do to create RCs.
750         // See http://hbase.apache.org/book.html#maven.release
751         // TODO (HBASE-23870): replace this with invocation of the release tool
752         stage ('packaging and integration') {
753           tools {
754             maven 'maven_latest'
755             // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
756             jdk "jdk_1.8_latest"
757           }
758           environment {
759             BASEDIR = "${env.WORKSPACE}/component"
760             BRANCH = "${env.BRANCH_NAME}"
761           }
762           steps {
763             sh '''#!/bin/bash -e
764               echo "Setting up directories"
765               rm -rf "output-srctarball" && mkdir "output-srctarball"
766               rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
767               rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
768               rm -rf "hbase-install" && mkdir "hbase-install"
769               rm -rf "hbase-client" && mkdir "hbase-client"
770               rm -rf "hadoop-2" && mkdir "hadoop-2"
771               rm -rf "hadoop-3" && mkdir "hadoop-3"
772               rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
773               rm -rf ".m2-for-src" && mkdir ".m2-for-src"
774               echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
775               echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
776             '''
777             sh '''#!/usr/bin/env bash
778               set -e
779               rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
780               "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
781               echo "got the following saved stats in 'output-srctarball/machine'"
782               ls -lh "output-srctarball/machine"
783             '''
784             sh """#!/bin/bash -e
785               echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
786               if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
787                   --intermediate-file-dir output-srctarball \
788                   --unpack-temp-dir unpacked_src_tarball \
789                   --maven-m2-initial .m2-for-repo \
790                   --maven-m2-src-build .m2-for-src \
791                   --clean-source-checkout \
792                   "${env.BASEDIR}" ; then
793                 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
794               else
795                 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
796                 exit 1
797               fi
798             """
799             echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
800             sh '''#!/bin/bash -e
801               if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
802                 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
803                 exit 1
804               fi
805               install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
806               tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
807               client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
808               tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
809             '''
810             unstash 'hadoop-2'
811             sh '''#!/bin/bash -xe
812               if [[ "${BRANCH}" = branch-2* ]] || [[ "${BRANCH}" = branch-1* ]]; then
813                 echo "Attempting to use run an instance on top of Hadoop 2."
814                 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
815                 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
816                 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
817                     --single-process \
818                     --working-dir output-integration/hadoop-2 \
819                     --hbase-client-install "hbase-client" \
820                     "hbase-install" \
821                     "hadoop-2/bin/hadoop" \
822                     hadoop-2/share/hadoop/yarn/timelineservice \
823                     hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
824                     hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
825                     hadoop-2/bin/mapred \
826                     >output-integration/hadoop-2.log 2>&1 ; then
827                   echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
828                   exit 2
829                 fi
830               else
831                 echo "Skipping to run against Hadoop 2 for branch ${BRANCH}"
832               fi
833             '''
834             unstash 'hadoop-3'
835             sh '''#!/bin/bash -e
836               if [[ "${BRANCH}" = branch-1* ]]; then
837                 echo "Skipping to run against Hadoop 3 for branch ${BRANCH}"
838               else
839                 echo "Attempting to use run an instance on top of Hadoop 3."
840                 artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
841                 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
842                 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
843                     --single-process \
844                     --working-dir output-integration/hadoop-3 \
845                     --hbase-client-install hbase-client \
846                     hbase-install \
847                     hadoop-3/bin/hadoop \
848                     hadoop-3/share/hadoop/yarn/timelineservice \
849                     hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
850                     hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
851                     hadoop-3/bin/mapred \
852                     >output-integration/hadoop-3.log 2>&1 ; then
853                   echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
854                   exit 2
855                 fi
856                 echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
857                 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
858                     --single-process \
859                     --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
860                     --working-dir output-integration/hadoop-3-shaded \
861                     --hbase-client-install hbase-client \
862                     hbase-install \
863                     hadoop-3/bin/hadoop \
864                     hadoop-3/share/hadoop/yarn/timelineservice \
865                     hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
866                     hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
867                     hadoop-3/bin/mapred \
868                     >output-integration/hadoop-3-shaded.log 2>&1 ; then
869                   echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
870                   exit 2
871                 fi
872                 echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
873               fi
874             '''
875           }
876           post {
877             always {
878               stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
879               sshPublisher(publishers: [
880                 sshPublisherDesc(configName: 'Nightlies',
881                   transfers: [
882                     sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
883                       sourceFiles: "output-srctarball/hbase-src.tar.gz"
884                     )
885                   ]
886                 )
887               ])
888               // remove the big src tarball, store the nightlies url in hbase-src.html
889               sh '''#!/bin/bash -e
890                 SRC_TAR="${WORKSPACE}/output-srctarball/hbase-src.tar.gz"
891                 if [ -f "${SRC_TAR}" ]; then
892                   echo "Remove ${SRC_TAR} for saving space"
893                   rm -rf "${SRC_TAR}"
894                   python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/output-srctarball" > "${WORKSPACE}/output-srctarball/hbase-src.html"
895                 else
896                   echo "No hbase-src.tar.gz, skipping"
897                 fi
898               '''
899               archiveArtifacts artifacts: 'output-srctarball/*'
900               archiveArtifacts artifacts: 'output-srctarball/**/*'
901               archiveArtifacts artifacts: 'output-integration/*'
902               archiveArtifacts artifacts: 'output-integration/**/*'
903             }
904           }
905         }
906       }
907     }
908   }
909   post {
910     always {
911       script {
912          try {
913            unstash 'general-result'
914            unstash 'jdk7-result'
915            unstash 'jdk8-hadoop2-result'
916            unstash 'jdk8-hadoop3-result'
917            unstash 'jdk11-hadoop3-result'
918            unstash 'srctarball-result'
919            sh "printenv"
920            def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
921                           "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
922                           "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
923                           "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
924                           "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
925                           'output-srctarball/commentfile',
926                           'output-integration/commentfile']
927            echo env.BRANCH_NAME
928            echo env.BUILD_URL
929            echo currentBuild.result
930            echo currentBuild.durationString
931            def comment = "Results for branch ${env.BRANCH_NAME}\n"
932            comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
933            if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
934               comment += "(/) *{color:green}+1 overall{color}*\n"
935            } else {
936               comment += "(x) *{color:red}-1 overall{color}*\n"
937               // Ideally get the committer our of the change and @ mention them in the per-jira comment
938            }
939            comment += "----\ndetails (if available):\n\n"
940            echo ""
941            echo "[DEBUG] trying to aggregate step-wise results"
942            comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
943            echo "[INFO] Comment:"
944            echo comment
945            echo ""
946            echo "[DEBUG] checking to see if feature branch"
947            def jiras = getJirasToComment(env.BRANCH_NAME, [])
948            if (jiras.isEmpty()) {
949              echo "[DEBUG] non-feature branch, checking change messages for jira keys."
950              echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
951              jiras = getJirasToCommentFromChangesets(currentBuild)
952            }
953            jiras.each { currentIssue ->
954              jiraComment issueKey: currentIssue, body: comment
955            }
956         } catch (Exception exception) {
957           echo "Got exception: ${exception}"
958           echo "    ${exception.getStackTrace()}"
959         }
960       }
961     }
962   }
964 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
965 @NonCPS
966 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
967   def seenJiras = []
968   thisBuild.changeSets.each { cs ->
969     cs.getItems().each { change ->
970       CharSequence msg = change.msg
971       echo "change: ${change}"
972       echo "     ${msg}"
973       echo "     ${change.commitId}"
974       echo "     ${change.author}"
975       echo ""
976       seenJiras = getJirasToComment(msg, seenJiras)
977     }
978   }
979   return seenJiras
981 @NonCPS
982 List<String> getJirasToComment(CharSequence source, List<String> seen) {
983   source.eachMatch("HBASE-[0-9]+") { currentIssue ->
984     echo "[DEBUG] found jira key: ${currentIssue}"
985     if (currentIssue in seen) {
986       echo "[DEBUG] already commented on ${currentIssue}."
987     } else {
988       echo "[INFO] commenting on ${currentIssue}."
989       seen << currentIssue
990     }
991   }
992   return seen