HBASE-26312 Shell scan fails with timestamp (#3734)
[hbase.git] / dev-support / Jenkinsfile
blob85d2c2a4bb114e8da73f683230a7ae95d0f94ddd
1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements.  See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership.  The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License.  You may obtain a copy of the License at
8 //
9 //   http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied.  See the License for the
15 // specific language governing permissions and limitations
16 // under the License.
17 pipeline {
18   agent {
19     node {
20       label 'hbase'
21     }
22   }
23   triggers {
24     pollSCM('@daily')
25   }
26   options {
27     buildDiscarder(logRotator(numToKeepStr: '15'))
28     timeout (time: 16, unit: 'HOURS')
29     timestamps()
30     skipDefaultCheckout()
31     disableConcurrentBuilds()
32   }
33   environment {
34     YETUS_RELEASE = '0.12.0'
35     // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
36     OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
37     OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
38     OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
39     OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
40     OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
42     PROJECT = 'hbase'
43     PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
44     PERSONALITY_FILE = 'tools/personality.sh'
45     // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
46     AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
47     WHITESPACE_IGNORE_LIST = '.*/generated/.*'
48     // output from surefire; sadly the archive function in yetus only works on file names.
49     ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
50     // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
51     TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
52     EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/output/excludes"
53       // TODO does hadoopcheck need to be jdk specific?
54     SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
55     DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
56     ASF_NIGHTLIES = 'https://nightlies.apache.org'
57   }
58   parameters {
59     booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
61     Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
62     booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
63   }
64   stages {
65     stage ('scm-checkout') {
66       steps {
67             dir('component') {
68               checkout scm
69             }
70       }
71     }
72     stage ('thirdparty installs') {
73       parallel {
74         stage ('yetus install') {
75           steps {
76             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
77             dir('downloads-yetus') {
78               // can't just do a simple echo or the directory won't be created. :(
79               sh '''#!/usr/bin/env bash
80                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
81 '''
82             }
83             sh  '''#!/usr/bin/env bash
84               set -e
85               echo "Ensure we have a copy of Apache Yetus."
86               if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
87                 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
88                 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
89                 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
90                   rm -rf "${YETUS_DIR}"
91                   "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
92                       --working-dir "${WORKSPACE}/downloads-yetus" \
93                       --keys 'https://www.apache.org/dist/yetus/KEYS' \
94                       "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
95                       "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
96                   mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
97                 else
98                   echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
99                 fi
100               else
101                 YETUS_DIR="${WORKSPACE}/yetus-git"
102                 rm -rf "${YETUS_DIR}"
103                 echo "downloading from github"
104                 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
105               fi
106               if [ ! -d "${YETUS_DIR}" ]; then
107                 echo "unpacking yetus into '${YETUS_DIR}'"
108                 mkdir -p "${YETUS_DIR}"
109                 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
110               fi
111             '''
112             // Set up the file we need at PERSONALITY_FILE location
113             dir ("tools") {
114               sh """#!/usr/bin/env bash
115                 set -e
116                 echo "Downloading Project personality from ${env.PROJECT_PERSONALITY}"
117                 curl -L  -o personality.sh "${env.PROJECT_PERSONALITY}"
118               """
119             }
120             stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
121           }
122         }
123         stage ('hadoop 2 cache') {
124           environment {
125             HADOOP2_VERSION="2.10.0"
126           }
127           steps {
128             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
129             dir('downloads-hadoop-2') {
130               sh '''#!/usr/bin/env bash
131                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
133             }
134             sh '''#!/usr/bin/env bash
135               set -e
136               echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
137               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
138                   --working-dir "${WORKSPACE}/downloads-hadoop-2" \
139                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
140                   "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
141                   "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
142               for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
143                 echo "Delete stale hadoop 2 cache ${stale}"
144                 rm -rf $stale
145               done
146             '''
147             stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
148           }
149         }
150         stage ('hadoop 3 cache') {
151           environment {
152             HADOOP3_VERSION="3.1.1"
153           }
154           steps {
155             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
156             dir('downloads-hadoop-3') {
157               sh '''#!/usr/bin/env bash
158                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
160             }
161             sh '''#!/usr/bin/env bash
162               set -e
163               echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
164               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
165                   --working-dir "${WORKSPACE}/downloads-hadoop-3" \
166                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
167                   "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
168                   "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
169               for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
170                 echo "Delete stale hadoop 3 cache ${stale}"
171                 rm -rf $stale
172               done
173             '''
174             stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
175           }
176         }
177       }
178     }
179     stage ('init health results') {
180       steps {
181         // stash with given name for all tests we might run, so that we can unstash all of them even if
182         // we skip some due to e.g. branch-specific JDK or Hadoop support
183         stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
184         stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
185         stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
186         stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
187         stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
188         stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
189       }
190     }
191     stage ('health checks') {
192       parallel {
193         stage ('yetus general check') {
194           agent {
195             node {
196               label 'hbase'
197             }
198           }
199           environment {
200             BASEDIR = "${env.WORKSPACE}/component"
201             TESTS = "${env.SHALLOW_CHECKS}"
202             SET_JAVA_HOME = '/usr/lib/jvm/java-8'
203             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
204             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
205           }
206           steps {
207             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
208             sh '''#!/usr/bin/env bash
209               set -e
210               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
211               echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
212               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
213             '''
214             unstash 'yetus'
215             // since we have a new node definition we need to re-do the scm checkout
216             dir('component') {
217               checkout scm
218             }
219             sh '''#!/usr/bin/env bash
220               set -e
221               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
222               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
223               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
224               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
225             '''
226             // TODO roll this into the hbase_nightly_yetus script
227             script {
228               def ret = sh(
229                 returnStatus: true,
230                 script: '''#!/usr/bin/env bash
231                   set -e
232                   declare -i status=0
233                   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
234                     echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
235                   else
236                     echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
237                     status=1
238                   fi
239                   echo "-- For more information [see general report|${BUILD_URL}General_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
240                   exit "${status}"
241                 '''
242               )
243               if (ret != 0) {
244                 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
245                 // test output. See HBASE-26339 for more details.
246                 currentBuild.result = 'UNSTABLE'
247               }
248             }
249           }
250           post {
251             always {
252               stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
253               // Has to be relative to WORKSPACE.
254               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
255               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
256               publishHTML target: [
257                 allowMissing: true,
258                 keepAll: true,
259                 alwaysLinkToLastBuild: true,
260                 // Has to be relative to WORKSPACE
261                 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
262                 reportFiles: 'console-report.html',
263                 reportName: 'General Nightly Build Report'
264               ]
265             }
266           }
267         }
268         stage ('yetus jdk7 checks') {
269           agent {
270             node {
271               label 'hbase'
272             }
273           }
274           when {
275             branch 'branch-1*'
276           }
277           environment {
278             BASEDIR = "${env.WORKSPACE}/component"
279             TESTS = "${env.DEEP_CHECKS}"
280             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
281             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
282             SET_JAVA_HOME = "/usr/lib/jvm/java-7"
283           }
284           steps {
285             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
286             sh '''#!/usr/bin/env bash
287               set -e
288               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
289               echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
290               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
291             '''
292             unstash 'yetus'
293             dir('component') {
294               checkout scm
295             }
296             sh '''#!/usr/bin/env bash
297               set -e
298               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
299               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
300               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
301               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
302             '''
303             script {
304               def ret = sh(
305                 returnStatus: true,
306                 script: '''#!/usr/bin/env bash
307                   set -e
308                   declare -i status=0
309                   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
310                     echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
311                   else
312                     echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
313                     status=1
314                   fi
315                   echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
316                   exit "${status}"
317                 '''
318               )
319               if (ret != 0) {
320                 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
321                 // test output. See HBASE-26339 for more details.
322                 currentBuild.result = 'UNSTABLE'
323               }
324             }
325           }
326           post {
327             always {
328               stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
329               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
330               // zip surefire reports.
331               sh '''#!/bin/bash -e
332                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
333                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
334                   if [[ 0 -ne ${count} ]]; then
335                     echo "zipping ${count} archived files"
336                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
337                   else
338                     echo "No archived files, skipping compressing."
339                   fi
340                 else
341                   echo "No archiver directory, skipping compressing."
342                 fi
344               sshPublisher(publishers: [
345                 sshPublisherDesc(configName: 'Nightlies',
346                   transfers: [
347                     sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
348                       sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
349                     )
350                   ]
351                 )
352               ])
353               // remove the big test logs zip file, store the nightlies url in test_logs.txt
354               sh '''#!/bin/bash -e
355                 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
356                   echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
357                   rm -rf "${OUTPUT_DIR}/test_logs.zip"
358                   echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
359                 else
360                   echo "No test_logs.zip, skipping"
361                 fi
363               // Has to be relative to WORKSPACE.
364               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
365               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
366               publishHTML target: [
367                 allowMissing         : true,
368                 keepAll              : true,
369                 alwaysLinkToLastBuild: true,
370                 // Has to be relative to WORKSPACE.
371                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
372                 reportFiles          : 'console-report.html',
373                 reportName           : 'JDK7 Nightly Build Report'
374               ]
375             }
376           }
377         }
378         stage ('yetus jdk8 hadoop2 checks') {
379           agent {
380             node {
381               label 'hbase'
382             }
383           }
384           when {
385             anyOf { branch 'branch-1*'; branch 'branch-2*' }
386           }
387           environment {
388             BASEDIR = "${env.WORKSPACE}/component"
389             TESTS = "${env.DEEP_CHECKS}"
390             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
391             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
392             SET_JAVA_HOME = '/usr/lib/jvm/java-8'
393           }
394           steps {
395             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
396             sh '''#!/usr/bin/env bash
397               set -e
398               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
399               echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
400               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
401             '''
402             unstash 'yetus'
403             dir('component') {
404               checkout scm
405             }
406             sh '''#!/usr/bin/env bash
407               set -e
408               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
409               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
410               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
411               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
412             '''
413             script {
414               def ret = sh(
415                 returnStatus: true,
416                 script: '''#!/usr/bin/env bash
417                   set -e
418                   declare -i status=0
419                   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
420                     echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
421                   else
422                     echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
423                     status=1
424                   fi
425                   echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop2_29/]" >> "${OUTPUT_DIR}/commentfile"
426                   exit "${status}"
427                 '''
428               )
429               if (ret != 0) {
430                 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
431                 // test output. See HBASE-26339 for more details.
432                 currentBuild.result = 'UNSTABLE'
433               }
434             }
435           }
436           post {
437             always {
438               stash name: 'jdk8-hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
439               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
440               // zip surefire reports.
441               sh '''#!/bin/bash -e
442                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
443                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
444                   if [[ 0 -ne ${count} ]]; then
445                     echo "zipping ${count} archived files"
446                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
447                   else
448                     echo "No archived files, skipping compressing."
449                   fi
450                 else
451                   echo "No archiver directory, skipping compressing."
452                 fi
454               sshPublisher(publishers: [
455                 sshPublisherDesc(configName: 'Nightlies',
456                   transfers: [
457                     sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
458                       sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
459                     )
460                   ]
461                 )
462               ])
463               // remove the big test logs zip file, store the nightlies url in test_logs.txt
464               sh '''#!/bin/bash -e
465                 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
466                   echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
467                   rm -rf "${OUTPUT_DIR}/test_logs.zip"
468                   echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
469                 else
470                   echo "No test_logs.zip, skipping"
471                 fi
473               // Has to be relative to WORKSPACE.
474               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
475               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
476               publishHTML target: [
477                 allowMissing         : true,
478                 keepAll              : true,
479                 alwaysLinkToLastBuild: true,
480                 // Has to be relative to WORKSPACE.
481                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
482                 reportFiles          : 'console-report.html',
483                 reportName           : 'JDK8 Nightly Build Report (Hadoop2)'
484               ]
485             }
486           }
487         }
488         stage ('yetus jdk8 hadoop3 checks') {
489           agent {
490             node {
491               label 'hbase'
492             }
493           }
494           when {
495             not {
496               branch 'branch-1*'
497             }
498           }
499           environment {
500             BASEDIR = "${env.WORKSPACE}/component"
501             TESTS = "${env.DEEP_CHECKS}"
502             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
503             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
504             SET_JAVA_HOME = '/usr/lib/jvm/java-8'
505             // Activates hadoop 3.0 profile in maven runs.
506             HADOOP_PROFILE = '3.0'
507           }
508           steps {
509             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
510             sh '''#!/usr/bin/env bash
511               set -e
512               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
513               echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
514               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
515             '''
516             unstash 'yetus'
517             dir('component') {
518               checkout scm
519             }
520             sh '''#!/usr/bin/env bash
521               set -e
522               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
523               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
524               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
525               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
526             '''
527             script {
528               def ret = sh(
529                 returnStatus: true,
530                 script: '''#!/usr/bin/env bash
531                   set -e
532                   declare -i status=0
533                   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
534                     echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
535                   else
536                     echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
537                     status=1
538                   fi
539                   echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
540                   exit "${status}"
541                 '''
542               )
543               if (ret != 0) {
544                 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
545                 // test output. See HBASE-26339 for more details.
546                 currentBuild.result = 'UNSTABLE'
547               }
548             }
549           }
550           post {
551             always {
552               stash name: 'jdk8-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
553               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
554               // zip surefire reports.
555               sh '''#!/bin/bash -e
556                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
557                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
558                   if [[ 0 -ne ${count} ]]; then
559                     echo "zipping ${count} archived files"
560                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
561                   else
562                     echo "No archived files, skipping compressing."
563                   fi
564                 else
565                   echo "No archiver directory, skipping compressing."
566                 fi
568               sshPublisher(publishers: [
569                 sshPublisherDesc(configName: 'Nightlies',
570                   transfers: [
571                     sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
572                       sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
573                     )
574                   ]
575                 )
576               ])
577               // remove the big test logs zip file, store the nightlies url in test_logs.txt
578               sh '''#!/bin/bash -e
579                 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
580                   echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
581                   rm -rf "${OUTPUT_DIR}/test_logs.zip"
582                   echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
583                 else
584                   echo "No test_logs.zip, skipping"
585                 fi
587               // Has to be relative to WORKSPACE.
588               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
589               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
590               publishHTML target: [
591                 allowMissing         : true,
592                 keepAll              : true,
593                 alwaysLinkToLastBuild: true,
594                 // Has to be relative to WORKSPACE.
595                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
596                 reportFiles          : 'console-report.html',
597                 reportName           : 'JDK8 Nightly Build Report (Hadoop3)'
598               ]
599             }
600           }
601         }
602         stage ('yetus jdk11 hadoop3 checks') {
603           agent {
604             node {
605               label 'hbase'
606             }
607           }
608           when {
609             not {
610               branch 'branch-1*'
611             }
612           }
613           environment {
614             BASEDIR = "${env.WORKSPACE}/component"
615             TESTS = "${env.DEEP_CHECKS}"
616             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
617             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
618             SET_JAVA_HOME = "/usr/lib/jvm/java-11"
619             // Activates hadoop 3.0 profile in maven runs.
620             HADOOP_PROFILE = '3.0'
621             // ErrorProne is broken on JDK11, see HBASE-23894
622             SKIP_ERROR_PRONE = 'true'
623           }
624           steps {
625             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
626             sh '''#!/usr/bin/env bash
627               set -e
628               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
629               echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
630               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
631             '''
632             unstash 'yetus'
633             dir('component') {
634               checkout scm
635             }
636             sh '''#!/usr/bin/env bash
637               set -e
638               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
639               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
640               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
641               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
642             '''
643             script {
644               def ret = sh(
645                 returnStatus: true,
646                 script: '''#!/usr/bin/env bash
647                   set -e
648                   declare -i status=0
649                   if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
650                     echo '(/) {color:green}+1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
651                   else
652                     echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
653                     status=1
654                   fi
655                   echo "-- For more information [see jdk11 report|${BUILD_URL}JDK11_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
656                   exit "${status}"
657                 '''
658               )
659               if (ret != 0) {
660                 // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
661                 // test output. See HBASE-26339 for more details.
662                 currentBuild.result = 'UNSTABLE'
663               }
664             }
665           }
666           post {
667             always {
668               stash name: 'jdk11-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
669               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
670               // zip surefire reports.
671               sh '''#!/bin/bash -e
672                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
673                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
674                   if [[ 0 -ne ${count} ]]; then
675                     echo "zipping ${count} archived files"
676                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
677                   else
678                     echo "No archived files, skipping compressing."
679                   fi
680                 else
681                   echo "No archiver directory, skipping compressing."
682                 fi
684               sshPublisher(publishers: [
685                 sshPublisherDesc(configName: 'Nightlies',
686                   transfers: [
687                     sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
688                       sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
689                     )
690                   ]
691                 )
692               ])
693               // remove the big test logs zip file, store the nightlies url in test_logs.txt
694               sh '''#!/bin/bash -e
695                 if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
696                   echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
697                   rm -rf "${OUTPUT_DIR}/test_logs.zip"
698                   echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
699                 else
700                   echo "No test_logs.zip, skipping"
701                 fi
703               // Has to be relative to WORKSPACE.
704               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
705               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
706               publishHTML target: [
707                 allowMissing         : true,
708                 keepAll              : true,
709                 alwaysLinkToLastBuild: true,
710                 // Has to be relative to WORKSPACE.
711                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
712                 reportFiles          : 'console-report.html',
713                 reportName           : 'JDK11 Nightly Build Report (Hadoop3)'
714               ]
715             }
716           }
717         }
718         // This is meant to mimic what a release manager will do to create RCs.
719         // See http://hbase.apache.org/book.html#maven.release
720         // TODO (HBASE-23870): replace this with invocation of the release tool
721         stage ('packaging and integration') {
722           tools {
723             maven 'maven_latest'
724             // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
725             jdk "jdk_1.8_latest"
726           }
727           environment {
728             BASEDIR = "${env.WORKSPACE}/component"
729             BRANCH = "${env.BRANCH_NAME}"
730           }
731           steps {
732             sh '''#!/bin/bash -e
733               echo "Setting up directories"
734               rm -rf "output-srctarball" && mkdir "output-srctarball"
735               rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
736               rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
737               rm -rf "hbase-install" && mkdir "hbase-install"
738               rm -rf "hbase-client" && mkdir "hbase-client"
739               rm -rf "hadoop-2" && mkdir "hadoop-2"
740               rm -rf "hadoop-3" && mkdir "hadoop-3"
741               rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
742               rm -rf ".m2-for-src" && mkdir ".m2-for-src"
743               echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
744               echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
746             sh '''#!/usr/bin/env bash
747               set -e
748               rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
749               "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
750               echo "got the following saved stats in 'output-srctarball/machine'"
751               ls -lh "output-srctarball/machine"
753             sh """#!/bin/bash -e
754               echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
755               if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
756                   --intermediate-file-dir output-srctarball \
757                   --unpack-temp-dir unpacked_src_tarball \
758                   --maven-m2-initial .m2-for-repo \
759                   --maven-m2-src-build .m2-for-src \
760                   --clean-source-checkout \
761                   "${env.BASEDIR}" ; then
762                 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
763               else
764                 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
765                 exit 1
766               fi
768             echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
769             sh '''#!/bin/bash -e
770               if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
771                 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
772                 exit 1
773               fi
774               install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
775               tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
776               client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
777               tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
779             unstash 'hadoop-2'
780             sh '''#!/bin/bash -xe
781               if [[ "${BRANCH}" = branch-2* ]] || [[ "${BRANCH}" = branch-1* ]]; then
782                 echo "Attempting to use run an instance on top of Hadoop 2."
783                 artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
784                 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
785                 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
786                     --single-process \
787                     --working-dir output-integration/hadoop-2 \
788                     --hbase-client-install "hbase-client" \
789                     "hbase-install" \
790                     "hadoop-2/bin/hadoop" \
791                     hadoop-2/share/hadoop/yarn/timelineservice \
792                     hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
793                     hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
794                     hadoop-2/bin/mapred \
795                     >output-integration/hadoop-2.log 2>&1 ; then
796                   echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
797                   exit 2
798                 fi
799               else
800                 echo "Skipping to run against Hadoop 2 for branch ${BRANCH}"
801               fi
803             unstash 'hadoop-3'
804             sh '''#!/bin/bash -e
805               if [[ "${BRANCH}" = branch-1* ]]; then
806                 echo "Skipping to run against Hadoop 3 for branch ${BRANCH}"
807               else
808                 echo "Attempting to use run an instance on top of Hadoop 3."
809                 artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
810                 tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
811                 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
812                     --single-process \
813                     --working-dir output-integration/hadoop-3 \
814                     --hbase-client-install hbase-client \
815                     hbase-install \
816                     hadoop-3/bin/hadoop \
817                     hadoop-3/share/hadoop/yarn/timelineservice \
818                     hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
819                     hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
820                     hadoop-3/bin/mapred \
821                     >output-integration/hadoop-3.log 2>&1 ; then
822                   echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
823                   exit 2
824                 fi
825                 echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
826                 if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
827                     --single-process \
828                     --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
829                     --working-dir output-integration/hadoop-3-shaded \
830                     --hbase-client-install hbase-client \
831                     hbase-install \
832                     hadoop-3/bin/hadoop \
833                     hadoop-3/share/hadoop/yarn/timelineservice \
834                     hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
835                     hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
836                     hadoop-3/bin/mapred \
837                     >output-integration/hadoop-3-shaded.log 2>&1 ; then
838                   echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
839                   exit 2
840                 fi
841                 echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
842               fi
846           }
847           post {
848             always {
849               stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
850               archiveArtifacts artifacts: 'output-srctarball/*'
851               archiveArtifacts artifacts: 'output-srctarball/**/*'
852               archiveArtifacts artifacts: 'output-integration/*'
853               archiveArtifacts artifacts: 'output-integration/**/*'
854             }
855           }
856         }
857       }
858     }
859   }
860   post {
861     always {
862       script {
863          try {
864            unstash 'general-result'
865            unstash 'jdk7-result'
866            unstash 'jdk8-hadoop2-result'
867            unstash 'jdk8-hadoop3-result'
868            unstash 'jdk11-hadoop3-result'
869            unstash 'srctarball-result'
870            sh "printenv"
871            def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
872                           "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
873                           "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
874                           "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
875                           "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
876                           'output-srctarball/commentfile',
877                           'output-integration/commentfile']
878            echo env.BRANCH_NAME
879            echo env.BUILD_URL
880            echo currentBuild.result
881            echo currentBuild.durationString
882            def comment = "Results for branch ${env.BRANCH_NAME}\n"
883            comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
884            if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
885               comment += "(/) *{color:green}+1 overall{color}*\n"
886            } else {
887               comment += "(x) *{color:red}-1 overall{color}*\n"
888               // Ideally get the committer our of the change and @ mention them in the per-jira comment
889            }
890            comment += "----\ndetails (if available):\n\n"
891            echo ""
892            echo "[DEBUG] trying to aggregate step-wise results"
893            comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
894            echo "[INFO] Comment:"
895            echo comment
896            echo ""
897            echo "[DEBUG] checking to see if feature branch"
898            def jiras = getJirasToComment(env.BRANCH_NAME, [])
899            if (jiras.isEmpty()) {
900              echo "[DEBUG] non-feature branch, checking change messages for jira keys."
901              echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
902              jiras = getJirasToCommentFromChangesets(currentBuild)
903            }
904            jiras.each { currentIssue ->
905              jiraComment issueKey: currentIssue, body: comment
906            }
907         } catch (Exception exception) {
908           echo "Got exception: ${exception}"
909           echo "    ${exception.getStackTrace()}"
910         }
911       }
912     }
913   }
915 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
916 @NonCPS
917 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
918   def seenJiras = []
919   thisBuild.changeSets.each { cs ->
920     cs.getItems().each { change ->
921       CharSequence msg = change.msg
922       echo "change: ${change}"
923       echo "     ${msg}"
924       echo "     ${change.commitId}"
925       echo "     ${change.author}"
926       echo ""
927       seenJiras = getJirasToComment(msg, seenJiras)
928     }
929   }
930   return seenJiras
932 @NonCPS
933 List<String> getJirasToComment(CharSequence source, List<String> seen) {
934   source.eachMatch("HBASE-[0-9]+") { currentIssue ->
935     echo "[DEBUG] found jira key: ${currentIssue}"
936     if (currentIssue in seen) {
937       echo "[DEBUG] already commented on ${currentIssue}."
938     } else {
939       echo "[INFO] commenting on ${currentIssue}."
940       seen << currentIssue
941     }
942   }
943   return seen