HBASE-22923 Consider minVersionToMoveSysTables while moving region and creating regio...
[hbase.git] / dev-support / Jenkinsfile
blobf3de8edffcbec01a25372229f47c7b034f519e37
1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements.  See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership.  The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License.  You may obtain a copy of the License at
8 //
9 //   http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied.  See the License for the
15 // specific language governing permissions and limitations
16 // under the License.
17 pipeline {
18   agent {
19     node {
20       label 'hbase'
21     }
22   }
23   triggers {
24     pollSCM('@daily')
25   }
26   options {
27     buildDiscarder(logRotator(numToKeepStr: '15'))
28     timeout (time: 16, unit: 'HOURS')
29     timestamps()
30     skipDefaultCheckout()
31     disableConcurrentBuilds()
32   }
33   environment {
34     YETUS_RELEASE = '0.12.0'
35     // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
36     OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
37     OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
38     OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
39     OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
40     OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
42     PROJECT = 'hbase'
43     PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
44     PERSONALITY_FILE = 'tools/personality.sh'
45     // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
46     AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
47     WHITESPACE_IGNORE_LIST = '.*/generated/.*'
48     // output from surefire; sadly the archive function in yetus only works on file names.
49     ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
50     // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
51     TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
52     EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/output/excludes"
53       // TODO does hadoopcheck need to be jdk specific?
54     SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
55     DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
56   }
57   parameters {
58     booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
60     Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
61     booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
62   }
63   stages {
64     stage ('scm-checkout') {
65       steps {
66             dir('component') {
67               checkout scm
68             }
69       }
70     }
71     stage ('thirdparty installs') {
72       parallel {
73         stage ('yetus install') {
74           steps {
75             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
76             dir('downloads-yetus') {
77               // can't just do a simple echo or the directory won't be created. :(
78               sh '''#!/usr/bin/env bash
79                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
80 '''
81             }
82             sh  '''#!/usr/bin/env bash
83               set -e
84               echo "Ensure we have a copy of Apache Yetus."
85               if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
86                 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
87                 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
88                 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
89                   rm -rf "${YETUS_DIR}"
90                   "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
91                       --working-dir "${WORKSPACE}/downloads-yetus" \
92                       --keys 'https://www.apache.org/dist/yetus/KEYS' \
93                       "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
94                       "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
95                   mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
96                 else
97                   echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
98                 fi
99               else
100                 YETUS_DIR="${WORKSPACE}/yetus-git"
101                 rm -rf "${YETUS_DIR}"
102                 echo "downloading from github"
103                 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
104               fi
105               if [ ! -d "${YETUS_DIR}" ]; then
106                 echo "unpacking yetus into '${YETUS_DIR}'"
107                 mkdir -p "${YETUS_DIR}"
108                 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
109               fi
110             '''
111             // Set up the file we need at PERSONALITY_FILE location
112             dir ("tools") {
113               sh """#!/usr/bin/env bash
114                 set -e
115                 echo "Downloading Project personality from ${env.PROJECT_PERSONALITY}"
116                 curl -L  -o personality.sh "${env.PROJECT_PERSONALITY}"
117               """
118             }
119             stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
120           }
121         }
122         stage ('hadoop 2 cache') {
123           environment {
124             HADOOP2_VERSION="2.10.0"
125           }
126           steps {
127             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
128             dir('downloads-hadoop-2') {
129               sh '''#!/usr/bin/env bash
130                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
132             }
133             sh '''#!/usr/bin/env bash
134               set -e
135               echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
136               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
137                   --working-dir "${WORKSPACE}/downloads-hadoop-2" \
138                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
139                   "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
140                   "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
141               for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
142                 echo "Delete stale hadoop 2 cache ${stale}"
143                 rm -rf $stale
144               done
145             '''
146             stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
147           }
148         }
149         stage ('hadoop 3 cache') {
150           environment {
151             HADOOP3_VERSION="3.1.1"
152           }
153           steps {
154             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
155             dir('downloads-hadoop-3') {
156               sh '''#!/usr/bin/env bash
157                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
159             }
160             sh '''#!/usr/bin/env bash
161               set -e
162               echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
163               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
164                   --working-dir "${WORKSPACE}/downloads-hadoop-3" \
165                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
166                   "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
167                   "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
168               for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
169                 echo "Delete stale hadoop 3 cache ${stale}"
170                 rm -rf $stale
171               done
172             '''
173             stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
174           }
175         }
176       }
177     }
178     stage ('init health results') {
179       steps {
180         // stash with given name for all tests we might run, so that we can unstash all of them even if
181         // we skip some due to e.g. branch-specific JDK or Hadoop support
182         stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
183         stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
184         stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
185         stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
186         stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
187         stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
188       }
189     }
190     stage ('health checks') {
191       parallel {
192         stage ('yetus general check') {
193           agent {
194             node {
195               label 'hbase'
196             }
197           }
198           environment {
199             BASEDIR = "${env.WORKSPACE}/component"
200             TESTS = "${env.SHALLOW_CHECKS}"
201             SET_JAVA_HOME = '/usr/lib/jvm/java-8'
202             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
203             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
204           }
205           steps {
206             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
207             sh '''#!/usr/bin/env bash
208               set -e
209               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
210               echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
211               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
213             unstash 'yetus'
214             // since we have a new node definition we need to re-do the scm checkout
215             dir('component') {
216               checkout scm
217             }
218             sh '''#!/usr/bin/env bash
219               set -e
220               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
221               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
222               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
223               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
225             // TODO roll this into the hbase_nightly_yetus script
226             sh '''#!/usr/bin/env bash
227               set -e
228               declare -i status=0
229               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
230                 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
231               else
232                 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
233                 status=1
234               fi
235               echo "-- For more information [see general report|${BUILD_URL}General_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
236               exit "${status}"
237             '''
238           }
239           post {
240             always {
241               stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
242               // Has to be relative to WORKSPACE.
243               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
244               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
245               publishHTML target: [
246                 allowMissing: true,
247                 keepAll: true,
248                 alwaysLinkToLastBuild: true,
249                 // Has to be relative to WORKSPACE
250                 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
251                 reportFiles: 'console-report.html',
252                 reportName: 'General Nightly Build Report'
253               ]
254             }
255           }
256         }
257         stage ('yetus jdk7 checks') {
258           agent {
259             node {
260               label 'hbase'
261             }
262           }
263           when {
264             branch 'branch-1*'
265           }
266           environment {
267             BASEDIR = "${env.WORKSPACE}/component"
268             TESTS = "${env.DEEP_CHECKS}"
269             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
270             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
271             SET_JAVA_HOME = "/usr/lib/jvm/java-7"
272           }
273           steps {
274             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
275             sh '''#!/usr/bin/env bash
276               set -e
277               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
278               echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
279               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
281             unstash 'yetus'
282             dir('component') {
283               checkout scm
284             }
285             sh '''#!/usr/bin/env bash
286               set -e
287               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
288               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
289               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
290               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
292             sh '''#!/usr/bin/env bash
293               set -e
294               declare -i status=0
295               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
296                 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
297               else
298                 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
299                 status=1
300               fi
301               echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
302               exit "${status}"
303             '''
304           }
305           post {
306             always {
307               stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
308               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
309               // zip surefire reports.
310               sh '''#!/bin/bash -e
311                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
312                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
313                   if [[ 0 -ne ${count} ]]; then
314                     echo "zipping ${count} archived files"
315                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
316                   else
317                     echo "No archived files, skipping compressing."
318                   fi
319                 else
320                   echo "No archiver directory, skipping compressing."
321                 fi
323               // Has to be relative to WORKSPACE.
324               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
325               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
326               publishHTML target: [
327                 allowMissing         : true,
328                 keepAll              : true,
329                 alwaysLinkToLastBuild: true,
330                 // Has to be relative to WORKSPACE.
331                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
332                 reportFiles          : 'console-report.html',
333                 reportName           : 'JDK7 Nightly Build Report'
334               ]
335             }
336           }
337         }
338         stage ('yetus jdk8 hadoop2 checks') {
339           agent {
340             node {
341               label 'hbase'
342             }
343           }
344           when {
345             anyOf { branch 'branch-1*'; branch 'branch-2*' }
346           }
347           environment {
348             BASEDIR = "${env.WORKSPACE}/component"
349             TESTS = "${env.DEEP_CHECKS}"
350             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
351             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
352             SET_JAVA_HOME = '/usr/lib/jvm/java-8'
353           }
354           steps {
355             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
356             sh '''#!/usr/bin/env bash
357               set -e
358               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
359               echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
360               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
362             unstash 'yetus'
363             dir('component') {
364               checkout scm
365             }
366             sh '''#!/usr/bin/env bash
367               set -e
368               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
369               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
370               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
371               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
373             sh '''#!/usr/bin/env bash
374               set -e
375               declare -i status=0
376               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
377                 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
378               else
379                 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
380                 status=1
381               fi
382               echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop2_29/]" >> "${OUTPUT_DIR}/commentfile"
383               exit "${status}"
384             '''
385           }
386           post {
387             always {
388               stash name: 'jdk8-hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
389               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
390               // zip surefire reports.
391               sh '''#!/bin/bash -e
392                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
393                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
394                   if [[ 0 -ne ${count} ]]; then
395                     echo "zipping ${count} archived files"
396                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
397                   else
398                     echo "No archived files, skipping compressing."
399                   fi
400                 else
401                   echo "No archiver directory, skipping compressing."
402                 fi
404               // Has to be relative to WORKSPACE.
405               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
406               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
407               publishHTML target: [
408                 allowMissing         : true,
409                 keepAll              : true,
410                 alwaysLinkToLastBuild: true,
411                 // Has to be relative to WORKSPACE.
412                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
413                 reportFiles          : 'console-report.html',
414                 reportName           : 'JDK8 Nightly Build Report (Hadoop2)'
415               ]
416             }
417           }
418         }
419         stage ('yetus jdk8 hadoop3 checks') {
420           agent {
421             node {
422               label 'hbase'
423             }
424           }
425           when {
426             not {
427               branch 'branch-1*'
428             }
429           }
430           environment {
431             BASEDIR = "${env.WORKSPACE}/component"
432             TESTS = "${env.DEEP_CHECKS}"
433             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
434             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
435             SET_JAVA_HOME = '/usr/lib/jvm/java-8'
436             // Activates hadoop 3.0 profile in maven runs.
437             HADOOP_PROFILE = '3.0'
438           }
439           steps {
440             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
441             sh '''#!/usr/bin/env bash
442               set -e
443               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
444               echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
445               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
447             unstash 'yetus'
448             dir('component') {
449               checkout scm
450             }
451             sh '''#!/usr/bin/env bash
452               set -e
453               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
454               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
455               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
456               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
458             sh '''#!/usr/bin/env bash
459               set -e
460               declare -i status=0
461               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
462                 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
463               else
464                 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
465                 status=1
466               fi
467               echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
468               exit "${status}"
469             '''
470           }
471           post {
472             always {
473               stash name: 'jdk8-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
474               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
475               // zip surefire reports.
476               sh '''#!/bin/bash -e
477                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
478                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
479                   if [[ 0 -ne ${count} ]]; then
480                     echo "zipping ${count} archived files"
481                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
482                   else
483                     echo "No archived files, skipping compressing."
484                   fi
485                 else
486                   echo "No archiver directory, skipping compressing."
487                 fi
489               // Has to be relative to WORKSPACE.
490               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
491               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
492               publishHTML target: [
493                 allowMissing         : true,
494                 keepAll              : true,
495                 alwaysLinkToLastBuild: true,
496                 // Has to be relative to WORKSPACE.
497                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
498                 reportFiles          : 'console-report.html',
499                 reportName           : 'JDK8 Nightly Build Report (Hadoop3)'
500               ]
501             }
502           }
503         }
504         stage ('yetus jdk11 hadoop3 checks') {
505           agent {
506             node {
507               label 'hbase'
508             }
509           }
510           when {
511             not {
512               branch 'branch-1*'
513             }
514           }
515           environment {
516             BASEDIR = "${env.WORKSPACE}/component"
517             TESTS = "${env.DEEP_CHECKS}"
518             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
519             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
520             SET_JAVA_HOME = "/usr/lib/jvm/java-11"
521             // Activates hadoop 3.0 profile in maven runs.
522             HADOOP_PROFILE = '3.0'
523             // ErrorProne is broken on JDK11, see HBASE-23894
524             SKIP_ERROR_PRONE = 'true'
525           }
526           steps {
527             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
528             sh '''#!/usr/bin/env bash
529               set -e
530               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
531               echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
532               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
534             unstash 'yetus'
535             dir('component') {
536               checkout scm
537             }
538             sh '''#!/usr/bin/env bash
539               set -e
540               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
541               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
542               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
543               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
545             sh '''#!/usr/bin/env bash
546               set -e
547               declare -i status=0
548               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
549                 echo '(/) {color:green}+1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
550               else
551                 echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
552                 status=1
553               fi
554               echo "-- For more information [see jdk11 report|${BUILD_URL}JDK11_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
555               exit "${status}"
556             '''
557           }
558           post {
559             always {
560               stash name: 'jdk11-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
561               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
562               // zip surefire reports.
563               sh '''#!/bin/bash -e
564                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
565                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
566                   if [[ 0 -ne ${count} ]]; then
567                     echo "zipping ${count} archived files"
568                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
569                   else
570                     echo "No archived files, skipping compressing."
571                   fi
572                 else
573                   echo "No archiver directory, skipping compressing."
574                 fi
576               // Has to be relative to WORKSPACE.
577               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
578               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
579               publishHTML target: [
580                 allowMissing         : true,
581                 keepAll              : true,
582                 alwaysLinkToLastBuild: true,
583                 // Has to be relative to WORKSPACE.
584                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
585                 reportFiles          : 'console-report.html',
586                 reportName           : 'JDK11 Nightly Build Report (Hadoop3)'
587               ]
588             }
589           }
590         }
591         // This is meant to mimic what a release manager will do to create RCs.
592         // See http://hbase.apache.org/book.html#maven.release
593         // TODO (HBASE-23870): replace this with invocation of the release tool
594         stage ('packaging and integration') {
595           tools {
596             maven 'maven_latest'
597             // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
598             jdk "jdk_1.8_latest"
599           }
600           environment {
601             BASEDIR = "${env.WORKSPACE}/component"
602             BRANCH = "${env.BRANCH_NAME}"
603           }
604           steps {
605             sh '''#!/bin/bash -e
606               echo "Setting up directories"
607               rm -rf "output-srctarball" && mkdir "output-srctarball"
608               rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
609               rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
610               rm -rf "hbase-install" && mkdir "hbase-install"
611               rm -rf "hbase-client" && mkdir "hbase-client"
612               rm -rf "hadoop-2" && mkdir "hadoop-2"
613               rm -rf "hadoop-3" && mkdir "hadoop-3"
614               rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
615               rm -rf ".m2-for-src" && mkdir ".m2-for-src"
616               echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
617               echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
619             sh '''#!/usr/bin/env bash
620               set -e
621               rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
622               "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
623               echo "got the following saved stats in 'output-srctarball/machine'"
624               ls -lh "output-srctarball/machine"
626             sh """#!/bin/bash -e
627               echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
628               if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
629                   --intermediate-file-dir output-srctarball \
630                   --unpack-temp-dir unpacked_src_tarball \
631                   --maven-m2-initial .m2-for-repo \
632                   --maven-m2-src-build .m2-for-src \
633                   --clean-source-checkout \
634                   "${env.BASEDIR}" ; then
635                 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
636               else
637                 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
638                 exit 1
639               fi
641             echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
642             sh '''#!/bin/bash -e
643               if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
644                 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
645                 exit 1
646               fi
647               install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
648               tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
649               client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
650               tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
652             unstash 'hadoop-2'
653             sh '''#!/bin/bash -xe
654               if [[ "${BRANCH}" = branch-2* ]] || [[ "${BRANCH}" = branch-1* ]]; then
655                 echo "Attempting to use run an instance on top of Hadoop 2."
656                 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
657                 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
658                 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
659                     --single-process \
660                     --working-dir output-integration/hadoop-2 \
661                     --hbase-client-install "hbase-client" \
662                     "hbase-install" \
663                     "hadoop-2/bin/hadoop" \
664                     hadoop-2/share/hadoop/yarn/timelineservice \
665                     hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
666                     hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
667                     hadoop-2/bin/mapred \
668                     >output-integration/hadoop-2.log 2>&1 ; then
669                   echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
670                   exit 2
671                 fi
672               else
673                 echo "Skipping to run against Hadoop 2 for branch ${BRANCH}"
674               fi
676             unstash 'hadoop-3'
677             sh '''#!/bin/bash -e
678               if [[ "${BRANCH}" = branch-1* ]]; then
679                 echo "Skipping to run against Hadoop 3 for branch ${BRANCH}"
680               else
681                 echo "Attempting to use run an instance on top of Hadoop 3."
682                 artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
683                 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
684                 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
685                     --single-process \
686                     --working-dir output-integration/hadoop-3 \
687                     --hbase-client-install hbase-client \
688                     hbase-install \
689                     hadoop-3/bin/hadoop \
690                     hadoop-3/share/hadoop/yarn/timelineservice \
691                     hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
692                     hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
693                     hadoop-3/bin/mapred \
694                     >output-integration/hadoop-3.log 2>&1 ; then
695                   echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
696                   exit 2
697                 fi
698                 echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
699                 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
700                     --single-process \
701                     --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
702                     --working-dir output-integration/hadoop-3-shaded \
703                     --hbase-client-install hbase-client \
704                     hbase-install \
705                     hadoop-3/bin/hadoop \
706                     hadoop-3/share/hadoop/yarn/timelineservice \
707                     hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
708                     hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
709                     hadoop-3/bin/mapred \
710                     >output-integration/hadoop-3-shaded.log 2>&1 ; then
711                   echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
712                   exit 2
713                 fi
714                 echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
715               fi
719           }
720           post {
721             always {
722               stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
723               archiveArtifacts artifacts: 'output-srctarball/*'
724               archiveArtifacts artifacts: 'output-srctarball/**/*'
725               archiveArtifacts artifacts: 'output-integration/*'
726               archiveArtifacts artifacts: 'output-integration/**/*'
727             }
728           }
729         }
730       }
731     }
732   }
733   post {
734     always {
735       script {
736          try {
737            unstash 'general-result'
738            unstash 'jdk7-result'
739            unstash 'jdk8-hadoop2-result'
740            unstash 'jdk8-hadoop3-result'
741            unstash 'jdk11-hadoop3-result'
742            unstash 'srctarball-result'
743            sh "printenv"
744            def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
745                           "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
746                           "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
747                           "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
748                           "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
749                           'output-srctarball/commentfile',
750                           'output-integration/commentfile']
751            echo env.BRANCH_NAME
752            echo env.BUILD_URL
753            echo currentBuild.result
754            echo currentBuild.durationString
755            def comment = "Results for branch ${env.BRANCH_NAME}\n"
756            comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
757            if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
758               comment += "(/) *{color:green}+1 overall{color}*\n"
759            } else {
760               comment += "(x) *{color:red}-1 overall{color}*\n"
761               // Ideally get the committer our of the change and @ mention them in the per-jira comment
762            }
763            comment += "----\ndetails (if available):\n\n"
764            echo ""
765            echo "[DEBUG] trying to aggregate step-wise results"
766            comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
767            echo "[INFO] Comment:"
768            echo comment
769            echo ""
770            echo "[DEBUG] checking to see if feature branch"
771            def jiras = getJirasToComment(env.BRANCH_NAME, [])
772            if (jiras.isEmpty()) {
773              echo "[DEBUG] non-feature branch, checking change messages for jira keys."
774              echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
775              jiras = getJirasToCommentFromChangesets(currentBuild)
776            }
777            jiras.each { currentIssue ->
778              jiraComment issueKey: currentIssue, body: comment
779            }
780         } catch (Exception exception) {
781           echo "Got exception: ${exception}"
782           echo "    ${exception.getStackTrace()}"
783         }
784       }
785     }
786   }
788 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
789 @NonCPS
790 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
791   def seenJiras = []
792   thisBuild.changeSets.each { cs ->
793     cs.getItems().each { change ->
794       CharSequence msg = change.msg
795       echo "change: ${change}"
796       echo "     ${msg}"
797       echo "     ${change.commitId}"
798       echo "     ${change.author}"
799       echo ""
800       seenJiras = getJirasToComment(msg, seenJiras)
801     }
802   }
803   return seenJiras
805 @NonCPS
806 List<String> getJirasToComment(CharSequence source, List<String> seen) {
807   source.eachMatch("HBASE-[0-9]+") { currentIssue ->
808     echo "[DEBUG] found jira key: ${currentIssue}"
809     if (currentIssue in seen) {
810       echo "[DEBUG] already commented on ${currentIssue}."
811     } else {
812       echo "[INFO] commenting on ${currentIssue}."
813       seen << currentIssue
814     }
815   }
816   return seen