HBASE-22033 Update to maven-javadoc-plugin 3.2.0 and switch to non-forking aggregate...
[hbase.git] / dev-support / Jenkinsfile
blob8a8b3586675a1f05e68bfc983545154aadf22a7d
1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements.  See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership.  The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License.  You may obtain a copy of the License at
8 //
9 //   http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied.  See the License for the
15 // specific language governing permissions and limitations
16 // under the License.
17 pipeline {
18   agent {
19     node {
20       label 'Hadoop'
21     }
22   }
23   triggers {
24     pollSCM('@daily')
25   }
26   options {
27     buildDiscarder(logRotator(numToKeepStr: '15'))
28     timeout (time: 9, unit: 'HOURS')
29     timestamps()
30     skipDefaultCheckout()
31     disableConcurrentBuilds()
32   }
33   environment {
34     YETUS_RELEASE = '0.11.1'
35     // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
36     OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
37     OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
38     OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
39     OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
40     OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
42     PROJECT = 'hbase'
43     PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
44     PERSONALITY_FILE = 'tools/personality.sh'
45     // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
46     AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
47     WHITESPACE_IGNORE_LIST = '.*/generated/.*'
48     // output from surefire; sadly the archive function in yetus only works on file names.
49     ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
50     // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
51     TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
52     EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/excludes"
53       // TODO does hadoopcheck need to be jdk specific?
54     SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
55     DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
56   }
57   parameters {
58     booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
60     Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
61     booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
62   }
63   stages {
64     stage ('scm-checkout') {
65       steps {
66             dir('component') {
67               checkout scm
68             }
69       }
70     }
71     stage ('thirdparty installs') {
72       parallel {
73         stage ('yetus install') {
74           steps {
75             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
76             dir('downloads-yetus') {
77               // can't just do a simple echo or the directory won't be created. :(
78               sh '''#!/usr/bin/env bash
79                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
80 '''
81             }
82             sh  '''#!/usr/bin/env bash
83               set -e
84               echo "Ensure we have a copy of Apache Yetus."
85               if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
86                 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
87                 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
88                 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
89                   rm -rf "${YETUS_DIR}"
90                   "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
91                       --working-dir "${WORKSPACE}/downloads-yetus" \
92                       --keys 'https://www.apache.org/dist/yetus/KEYS' \
93                       "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
94                       "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
95                   mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
96                 else
97                   echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
98                 fi
99               else
100                 YETUS_DIR="${WORKSPACE}/yetus-git"
101                 rm -rf "${YETUS_DIR}"
102                 echo "downloading from github"
103                 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
104               fi
105               if [ ! -d "${YETUS_DIR}" ]; then
106                 echo "unpacking yetus into '${YETUS_DIR}'"
107                 mkdir -p "${YETUS_DIR}"
108                 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
109               fi
110             '''
111             // Set up the file we need at PERSONALITY_FILE location
112             dir ("tools") {
113               sh """#!/usr/bin/env bash
114                 set -e
115                 echo "Downloading Project personality from ${env.PROJECT_PERSONALITY}"
116                 curl -L  -o personality.sh "${env.PROJECT_PERSONALITY}"
117               """
118             }
119             stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
120           }
121         }
122         stage ('hadoop 2 cache') {
123           environment {
124             HADOOP2_VERSION="2.10.0"
125           }
126           steps {
127             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
128             dir('downloads-hadoop-2') {
129               sh '''#!/usr/bin/env bash
130                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
132             }
133             sh '''#!/usr/bin/env bash
134               set -e
135               echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
136               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
137                   --working-dir "${WORKSPACE}/downloads-hadoop-2" \
138                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
139                   "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
140                   "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
141               for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
142                 echo "Delete stale hadoop 2 cache ${stale}"
143                 rm -rf $stale
144               done
145             '''
146             stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
147           }
148         }
149         stage ('hadoop 3 cache') {
150           environment {
151             HADOOP3_VERSION="3.1.1"
152           }
153           steps {
154             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
155             dir('downloads-hadoop-3') {
156               sh '''#!/usr/bin/env bash
157                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
159             }
160             sh '''#!/usr/bin/env bash
161               set -e
162               echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
163               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
164                   --working-dir "${WORKSPACE}/downloads-hadoop-3" \
165                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
166                   "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
167                   "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
168               for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
169                 echo "Delete stale hadoop 3 cache ${stale}"
170                 rm -rf $stale
171               done
172             '''
173             stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
174           }
175         }
176       }
177     }
178     stage ('init health results') {
179       steps {
180         // stash with given name for all tests we might run, so that we can unstash all of them even if
181         // we skip some due to e.g. branch-specific JDK or Hadoop support
182         stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
183         stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
184         stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
185         stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
186         stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
187         stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
188       }
189     }
190     stage ('health checks') {
191       parallel {
192         stage ('yetus general check') {
193           agent {
194             node {
195               label 'Hadoop'
196             }
197           }
198           environment {
199             BASEDIR = "${env.WORKSPACE}/component"
200             TESTS = "${env.SHALLOW_CHECKS}"
201             SET_JAVA_HOME = '/usr/lib/jvm/java-8'
202             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
203             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
204           }
205           steps {
206             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
207             sh '''#!/usr/bin/env bash
208               set -e
209               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
210               echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
211               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
213             unstash 'yetus'
214             // since we have a new node definition we need to re-do the scm checkout
215             dir('component') {
216               checkout scm
217             }
218             sh '''#!/usr/bin/env bash
219               set -e
220               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
221               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
222               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
223               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
225             // TODO roll this into the hbase_nightly_yetus script
226             sh '''#!/usr/bin/env bash
227               set -e
228               declare -i status=0
229               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
230                 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
231               else
232                 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
233                 status=1
234               fi
235               echo "-- For more information [see general report|${BUILD_URL}General_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
236               exit "${status}"
237             '''
238           }
239           post {
240             always {
241               stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
242               // Has to be relative to WORKSPACE.
243               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
244               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
245               publishHTML target: [
246                 allowMissing: true,
247                 keepAll: true,
248                 alwaysLinkToLastBuild: true,
249                 // Has to be relative to WORKSPACE
250                 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
251                 reportFiles: 'console-report.html',
252                 reportName: 'General Nightly Build Report'
253               ]
254             }
255           }
256         }
257         stage ('yetus jdk7 checks') {
258           agent {
259             node {
260               label 'Hadoop'
261             }
262           }
263           when {
264             branch 'branch-1*'
265           }
266           environment {
267             BASEDIR = "${env.WORKSPACE}/component"
268             TESTS = "${env.DEEP_CHECKS}"
269             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
270             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
271             SET_JAVA_HOME = "/usr/lib/jvm/java-7"
272           }
273           steps {
274             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
275             sh '''#!/usr/bin/env bash
276               set -e
277               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
278               echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
279               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
281             unstash 'yetus'
282             dir('component') {
283               checkout scm
284             }
285             sh '''#!/usr/bin/env bash
286               set -e
287               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
288               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
289               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
290               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
292             sh '''#!/usr/bin/env bash
293               set -e
294               declare -i status=0
295               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
296                 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
297               else
298                 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
299                 status=1
300               fi
301               echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
302               exit "${status}"
303             '''
304           }
305           post {
306             always {
307               stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
308               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
309               // zip surefire reports.
310               sh '''#!/bin/bash -e
311                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
312                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
313                   if [[ 0 -ne ${count} ]]; then
314                     echo "zipping ${count} archived files"
315                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
316                   else
317                     echo "No archived files, skipping compressing."
318                   fi
319                 else
320                   echo "No archiver directory, skipping compressing."
321                 fi
323               // Has to be relative to WORKSPACE.
324               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
325               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
326               publishHTML target: [
327                 allowMissing         : true,
328                 keepAll              : true,
329                 alwaysLinkToLastBuild: true,
330                 // Has to be relative to WORKSPACE.
331                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
332                 reportFiles          : 'console-report.html',
333                 reportName           : 'JDK7 Nightly Build Report'
334               ]
335             }
336           }
337         }
338         stage ('yetus jdk8 hadoop2 checks') {
339           agent {
340             node {
341               label 'Hadoop'
342             }
343           }
344           when {
345             anyOf { branch 'branch-1*'; branch 'branch-2*' }
346           }
347           environment {
348             BASEDIR = "${env.WORKSPACE}/component"
349             TESTS = "${env.DEEP_CHECKS}"
350             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
351             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
352             SET_JAVA_HOME = '/usr/lib/jvm/java-8'
353           }
354           steps {
355             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
356             sh '''#!/usr/bin/env bash
357               set -e
358               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
359               echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
360               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
362             unstash 'yetus'
363             dir('component') {
364               checkout scm
365             }
366             sh '''#!/usr/bin/env bash
367               set -e
368               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
369               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
370               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
371               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
373             sh '''#!/usr/bin/env bash
374               set -e
375               declare -i status=0
376               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
377                 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
378               else
379                 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
380                 status=1
381               fi
382               echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop2_29/]" >> "${OUTPUT_DIR}/commentfile"
383               exit "${status}"
384             '''
385           }
386           post {
387             always {
388               stash name: 'jdk8-hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
389               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
390               // zip surefire reports.
391               sh '''#!/bin/bash -e
392                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
393                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
394                   if [[ 0 -ne ${count} ]]; then
395                     echo "zipping ${count} archived files"
396                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
397                   else
398                     echo "No archived files, skipping compressing."
399                   fi
400                 else
401                   echo "No archiver directory, skipping compressing."
402                 fi
404               // Has to be relative to WORKSPACE.
405               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
406               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
407               publishHTML target: [
408                 allowMissing         : true,
409                 keepAll              : true,
410                 alwaysLinkToLastBuild: true,
411                 // Has to be relative to WORKSPACE.
412                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
413                 reportFiles          : 'console-report.html',
414                 reportName           : 'JDK8 Nightly Build Report (Hadoop2)'
415               ]
416             }
417           }
418         }
419         stage ('yetus jdk8 hadoop3 checks') {
420           agent {
421             node {
422               label 'Hadoop'
423             }
424           }
425           when {
426             not {
427               branch 'branch-1*'
428             }
429           }
430           environment {
431             BASEDIR = "${env.WORKSPACE}/component"
432             TESTS = "${env.DEEP_CHECKS}"
433             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
434             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
435             SET_JAVA_HOME = '/usr/lib/jvm/java-8'
436             // Activates hadoop 3.0 profile in maven runs.
437             HADOOP_PROFILE = '3.0'
438           }
439           steps {
440             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
441             sh '''#!/usr/bin/env bash
442               set -e
443               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
444               echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
445               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
447             unstash 'yetus'
448             dir('component') {
449               checkout scm
450             }
451             sh '''#!/usr/bin/env bash
452               set -e
453               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
454               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
455               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
456               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
458             sh '''#!/usr/bin/env bash
459               set -e
460               declare -i status=0
461               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
462                 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
463               else
464                 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
465                 status=1
466               fi
467               echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
468               exit "${status}"
469             '''
470           }
471           post {
472             always {
473               stash name: 'jdk8-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
474               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
475               // zip surefire reports.
476               sh '''#!/bin/bash -e
477                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
478                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
479                   if [[ 0 -ne ${count} ]]; then
480                     echo "zipping ${count} archived files"
481                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
482                   else
483                     echo "No archived files, skipping compressing."
484                   fi
485                 else
486                   echo "No archiver directory, skipping compressing."
487                 fi
489               // Has to be relative to WORKSPACE.
490               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
491               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
492               publishHTML target: [
493                 allowMissing         : true,
494                 keepAll              : true,
495                 alwaysLinkToLastBuild: true,
496                 // Has to be relative to WORKSPACE.
497                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
498                 reportFiles          : 'console-report.html',
499                 reportName           : 'JDK8 Nightly Build Report (Hadoop3)'
500               ]
501             }
502           }
503         }
504         stage ('yetus jdk11 hadoop3 checks') {
505           agent {
506             node {
507               label 'Hadoop'
508             }
509           }
510           when {
511             not {
512               branch 'branch-1*'
513             }
514           }
515           environment {
516             BASEDIR = "${env.WORKSPACE}/component"
517             TESTS = "${env.DEEP_CHECKS}"
518             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
519             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
520             SET_JAVA_HOME = "/usr/lib/jvm/java-11"
521             // Activates hadoop 3.0 profile in maven runs.
522             HADOOP_PROFILE = '3.0'
523             // ErrorProne is broken on JDK11, see HBASE-23894
524             SKIP_ERROR_PRONE = 'true'
525           }
526           steps {
527             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
528             sh '''#!/usr/bin/env bash
529               set -e
530               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
531               echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
532               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
534             unstash 'yetus'
535             dir('component') {
536               checkout scm
537             }
538             sh '''#!/usr/bin/env bash
539               set -e
540               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
541               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
542               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
543               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
545             sh '''#!/usr/bin/env bash
546               set -e
547               declare -i status=0
548               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
549                 echo '(/) {color:green}+1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
550               else
551                 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
552                 status=1
553               fi
554               echo "-- For more information [see jdk11 report|${BUILD_URL}JDK11_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
555               exit "${status}"
556             '''
557           }
558           post {
559             always {
560               stash name: 'jdk11-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
561               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
562               // zip surefire reports.
563               sh '''#!/bin/bash -e
564                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
565                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
566                   if [[ 0 -ne ${count} ]]; then
567                     echo "zipping ${count} archived files"
568                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
569                   else
570                     echo "No archived files, skipping compressing."
571                   fi
572                 else
573                   echo "No archiver directory, skipping compressing."
574                 fi
576               // Has to be relative to WORKSPACE.
577               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
578               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
579               publishHTML target: [
580                 allowMissing         : true,
581                 keepAll              : true,
582                 alwaysLinkToLastBuild: true,
583                 // Has to be relative to WORKSPACE.
584                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
585                 reportFiles          : 'console-report.html',
586                 reportName           : 'JDK11 Nightly Build Report (Hadoop3)'
587               ]
588             }
589           }
590         }
591         // This is meant to mimic what a release manager will do to create RCs.
592         // See http://hbase.apache.org/book.html#maven.release
593         // TODO (HBASE-23870): replace this with invocation of the release tool
594         stage ('packaging and integration') {
595           tools {
596             maven 'Maven (latest)'
597             // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
598             jdk "JDK 1.8 (latest)"
599           }
600           environment {
601             BASEDIR = "${env.WORKSPACE}/component"
602           }
603           steps {
604             sh '''#!/bin/bash -e
605               echo "Setting up directories"
606               rm -rf "output-srctarball" && mkdir "output-srctarball"
607               rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
608               rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
609               rm -rf "hbase-install" && mkdir "hbase-install"
610               rm -rf "hbase-client" && mkdir "hbase-client"
611               rm -rf "hadoop-2" && mkdir "hadoop-2"
612               rm -rf "hadoop-3" && mkdir "hadoop-3"
613               rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
614               rm -rf ".m2-for-src" && mkdir ".m2-for-src"
615               echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
616               echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
618             sh '''#!/usr/bin/env bash
619               set -e
620               rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
621               "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
622               echo "got the following saved stats in 'output-srctarball/machine'"
623               ls -lh "output-srctarball/machine"
625             sh """#!/bin/bash -e
626               echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
627               if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
628                   --intermediate-file-dir output-srctarball \
629                   --unpack-temp-dir unpacked_src_tarball \
630                   --maven-m2-initial .m2-for-repo \
631                   --maven-m2-src-build .m2-for-src \
632                   --clean-source-checkout \
633                   "${env.BASEDIR}" ; then
634                 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
635               else
636                 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
637                 exit 1
638               fi
640             echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
641             sh '''#!/bin/bash -e
642               if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
643                 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
644                 exit 1
645               fi
646               install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
647               tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
648               client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
649               tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
651             unstash 'hadoop-2'
652             echo "Attempting to use run an instance on top of Hadoop 2."
653             sh '''#!/bin/bash -xe
654               artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
655               tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
656               if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
657                   --single-process \
658                   --working-dir output-integration/hadoop-2 \
659                   --hbase-client-install "hbase-client" \
660                   "hbase-install" \
661                   "hadoop-2/bin/hadoop" \
662                   hadoop-2/share/hadoop/yarn/timelineservice \
663                   hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
664                   hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
665                   hadoop-2/bin/mapred \
666                   >output-integration/hadoop-2.log 2>&1 ; then
667                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
668                 exit 2
669               fi
671             unstash 'hadoop-3'
672             echo "Attempting to use run an instance on top of Hadoop 3."
673             sh '''#!/bin/bash -e
674               artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
675               tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
676               if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
677                   --single-process \
678                   --working-dir output-integration/hadoop-3 \
679                   --hbase-client-install hbase-client \
680                   hbase-install \
681                   hadoop-3/bin/hadoop \
682                   hadoop-3/share/hadoop/yarn/timelineservice \
683                   hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
684                   hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
685                   hadoop-3/bin/mapred \
686                   >output-integration/hadoop-3.log 2>&1 ; then
687                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
688                 exit 2
689               fi
690               echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
691               if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
692                   --single-process \
693                   --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
694                   --working-dir output-integration/hadoop-3-shaded \
695                   --hbase-client-install hbase-client \
696                   hbase-install \
697                   hadoop-3/bin/hadoop \
698                   hadoop-3/share/hadoop/yarn/timelineservice \
699                   hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
700                   hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
701                   hadoop-3/bin/mapred \
702                   >output-integration/hadoop-3-shaded.log 2>&1 ; then
703                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
704                 exit 2
705               fi
706               echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
710           }
711           post {
712             always {
713               stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
714               archiveArtifacts artifacts: 'output-srctarball/*'
715               archiveArtifacts artifacts: 'output-srctarball/**/*'
716               archiveArtifacts artifacts: 'output-integration/*'
717               archiveArtifacts artifacts: 'output-integration/**/*'
718             }
719           }
720         }
721       }
722     }
723   }
724   post {
725     always {
726       script {
727          try {
728            unstash 'general-result'
729            unstash 'jdk7-result'
730            unstash 'jdk8-hadoop2-result'
731            unstash 'jdk8-hadoop3-result'
732            unstash 'jdk11-hadoop3-result'
733            unstash 'srctarball-result'
734            sh "printenv"
735            def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
736                           "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
737                           "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
738                           "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
739                           "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
740                           'output-srctarball/commentfile',
741                           'output-integration/commentfile']
742            echo env.BRANCH_NAME
743            echo env.BUILD_URL
744            echo currentBuild.result
745            echo currentBuild.durationString
746            def comment = "Results for branch ${env.BRANCH_NAME}\n"
747            comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
748            if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
749               comment += "(/) *{color:green}+1 overall{color}*\n"
750            } else {
751               comment += "(x) *{color:red}-1 overall{color}*\n"
752               // Ideally get the committer our of the change and @ mention them in the per-jira comment
753            }
754            comment += "----\ndetails (if available):\n\n"
755            echo ""
756            echo "[DEBUG] trying to aggregate step-wise results"
757            comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
758            echo "[INFO] Comment:"
759            echo comment
760            echo ""
761            echo "[DEBUG] checking to see if feature branch"
762            def jiras = getJirasToComment(env.BRANCH_NAME, [])
763            if (jiras.isEmpty()) {
764              echo "[DEBUG] non-feature branch, checking change messages for jira keys."
765              echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
766              jiras = getJirasToCommentFromChangesets(currentBuild)
767            }
768            jiras.each { currentIssue ->
769              jiraComment issueKey: currentIssue, body: comment
770            }
771         } catch (Exception exception) {
772           echo "Got exception: ${exception}"
773           echo "    ${exception.getStackTrace()}"
774         }
775       }
776     }
777   }
779 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
780 @NonCPS
781 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
782   def seenJiras = []
783   thisBuild.changeSets.each { cs ->
784     cs.getItems().each { change ->
785       CharSequence msg = change.msg
786       echo "change: ${change}"
787       echo "     ${msg}"
788       echo "     ${change.commitId}"
789       echo "     ${change.author}"
790       echo ""
791       seenJiras = getJirasToComment(msg, seenJiras)
792     }
793   }
794   return seenJiras
796 @NonCPS
797 List<String> getJirasToComment(CharSequence source, List<String> seen) {
798   source.eachMatch("HBASE-[0-9]+") { currentIssue ->
799     echo "[DEBUG] found jira key: ${currentIssue}"
800     if (currentIssue in seen) {
801       echo "[DEBUG] already commented on ${currentIssue}."
802     } else {
803       echo "[INFO] commenting on ${currentIssue}."
804       seen << currentIssue
805     }
806   }
807   return seen