HBASE-20937 Update the support matrix in our ref guide about recent hadoop releases
[hbase.git] / dev-support / Jenkinsfile
blob59d3227f58c0d6a49f063d23241f4f7251a610fd
1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements.  See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership.  The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License.  You may obtain a copy of the License at
8 //
9 //   http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied.  See the License for the
15 // specific language governing permissions and limitations
16 // under the License.
17 pipeline {
18   agent {
19     node {
20       label 'ubuntu'
21     }
22   }
23   triggers {
24     cron('@daily')
25   }
26   options {
27     buildDiscarder(logRotator(numToKeepStr: '30'))
28     timeout (time: 9, unit: 'HOURS')
29     timestamps()
30     skipDefaultCheckout()
31   }
32   environment {
33     YETUS_RELEASE = '0.7.0'
34     // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
35     OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
36     OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
37     OUTPUT_DIR_RELATIVE_HADOOP2 = 'output-jdk8-hadoop2'
38     OUTPUT_DIR_RELATIVE_HADOOP3 = 'output-jdk8-hadoop3'
40     PROJECT = 'hbase'
41     PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
42     PERSONALITY_FILE = 'tools/personality.sh'
43     // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
44     AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
45     WHITESPACE_IGNORE_LIST = '.*/generated/.*'
46     // output from surefire; sadly the archive function in yetus only works on file names.
47     ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
48     // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
49     TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
50     // Flaky urls for different branches. Replace '-' and '.' in branch name by '_' because those
51     // characters are not allowed in bash variable name.
52     // Not excluding flakies from the nightly build for now.
53     // EXCLUDE_TESTS_URL_master = 'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
54     // EXCLUDE_TESTS_URL_branch_2 = 'https://builds.apache.org/job/HBase-Find-Flaky-Tests-branch2.0/lastSuccessfulBuild/artifact/excludes/'
55   }
56   parameters {
57     booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
59     Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
60     booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
61   }
62   stages {
63     stage ('scm-checkout') {
64       steps {
65             dir('component') {
66               checkout scm
67             }
68       }
69     }
70     stage ('thirdparty installs') {
71       parallel {
72         stage ('yetus install') {
73           steps {
74             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
75             dir('downloads-yetus') {
76               // can't just do a simple echo or the directory won't be created. :(
77               sh '''#!/usr/bin/env bash
78                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
79 '''
80             }
81             sh  '''#!/usr/bin/env bash
82               set -e
83               echo "Ensure we have a copy of Apache Yetus."
84               if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
85                 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
86                 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
87                 if [ ! -d "${YETUS_DIR}" ]; then
88                   "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
89                       --working-dir "${WORKSPACE}/downloads-yetus" \
90                       --keys 'https://www.apache.org/dist/yetus/KEYS' \
91                       "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
92                       "yetus/${YETUS_RELEASE}/yetus-${YETUS_RELEASE}-bin.tar.gz"
93                   mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
94                 else
95                   echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
96                 fi
97               else
98                 YETUS_DIR="${WORKSPACE}/yetus-git"
99                 rm -rf "${YETUS_DIR}"
100                 echo "downloading from github"
101                 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
102               fi
103               if [ ! -d "${YETUS_DIR}" ]; then
104                 echo "unpacking yetus into '${YETUS_DIR}'"
105                 mkdir -p "${YETUS_DIR}"
106                 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
107               fi
108             '''
109             // Set up the file we need at PERSONALITY_FILE location
110             dir ("tools") {
111               sh """#!/usr/bin/env bash
112                 set -e
113                 echo "Downloading Project personality."
114                 curl -L  -o personality.sh "${env.PROJECT_PERSONALITY}"
115               """
116             }
117             stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
118           }
119         }
120         stage ('hadoop 2 cache') {
121           environment {
122             HADOOP2_VERSION="2.7.1"
123           }
124           steps {
125             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
126             dir('downloads-hadoop-2') {
127               sh '''#!/usr/bin/env bash
128                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
130             }
131             sh '''#!/usr/bin/env bash
132               set -e
133               echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
134               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
135                   --working-dir "${WORKSPACE}/downloads-hadoop-2" \
136                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
137                   "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
138                   "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
139             '''
140             stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
141           }
142         }
143         stage ('hadoop 3 cache') {
144           environment {
145             HADOOP3_VERSION="3.0.0"
146           }
147           steps {
148             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
149             dir('downloads-hadoop-3') {
150               sh '''#!/usr/bin/env bash
151                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
153             }
154             sh '''#!/usr/bin/env bash
155               set -e
156               echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
157               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
158                   --working-dir "${WORKSPACE}/downloads-hadoop-3" \
159                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
160                   "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
161                   "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
162             '''
163             stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
164           }
165         }
166       }
167     }
168     stage ('init health results') {
169       steps {
170         // stash with given name for all tests we might run, so that we can unstash all of them even if
171         // we skip some due to e.g. branch-specific JDK or Hadoop support
172         stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
173         stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
174         stash name: 'hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP2}/doesn't-match"
175         stash name: 'hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP3}/doesn't-match"
176         stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
177       }
178     }
179     stage ('health checks') {
180       parallel {
181         stage ('yetus general check') {
182           agent {
183             node {
184               label 'Hadoop'
185             }
186           }
187           environment {
188             BASEDIR = "${env.WORKSPACE}/component"
189             // TODO does hadoopcheck need to be jdk specific?
190             // Should be things that work with multijdk
191             TESTS = 'all,-unit,-findbugs'
192             // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not
193             // doing multijdk there.
194             MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64'
195             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
196             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
197           }
198           steps {
199             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
200             sh '''#!/usr/bin/env bash
201               set -e
202               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
203               echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
204               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
206             unstash 'yetus'
207             // since we have a new node definition we need to re-do the scm checkout
208             dir('component') {
209               checkout scm
210             }
211             sh '''#!/usr/bin/env bash
212               set -e
213               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
214               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
215               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
216               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
218             // TODO roll this into the hbase_nightly_yetus script
219             sh '''#!/usr/bin/env bash
220               set -e
221               declare -i status=0
222               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
223                 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
224               else
225                 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
226                 status=1
227               fi
228               echo "-- For more information [see general report|${BUILD_URL}/General_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
229               exit "${status}"
230             '''
231           }
232           post {
233             always {
234               stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
235               // Has to be relative to WORKSPACE.
236               archive "${env.OUTPUT_DIR_RELATIVE}/*"
237               archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
238               publishHTML target: [
239                 allowMissing: true,
240                 keepAll: true,
241                 alwaysLinkToLastBuild: true,
242                 // Has to be relative to WORKSPACE
243                 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
244                 reportFiles: 'console-report.html',
245                 reportName: 'General Nightly Build Report'
246               ]
247             }
248           }
249         }
250         stage ('yetus jdk7 checks') {
251           agent {
252             node {
253               label 'Hadoop'
254             }
255           }
256           when {
257             branch 'branch-1*'
258           }
259           environment {
260             BASEDIR = "${env.WORKSPACE}/component"
261             TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
262             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
263             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
264             // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already.
265           }
266           steps {
267             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
268             sh '''#!/usr/bin/env bash
269               set -e
270               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
271               echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
272               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
274             unstash 'yetus'
275             dir('component') {
276               checkout scm
277             }
278             sh '''#!/usr/bin/env bash
279               set -e
280               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
281               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
282               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
283               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
285             sh '''#!/usr/bin/env bash
286               set -e
287               declare -i status=0
288               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
289                 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
290               else
291                 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
292                 status=1
293               fi
294               echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
295               exit "${status}"
296             '''
297           }
298           post {
299             always {
300               stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
301               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
302               // zip surefire reports.
303               sh '''#!/bin/bash -e
304                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
305                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
306                   if [[ 0 -ne ${count} ]]; then
307                     echo "zipping ${count} archived files"
308                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
309                   else
310                     echo "No archived files, skipping compressing."
311                   fi
312                 else
313                   echo "No archiver directory, skipping compressing."
314                 fi
316               // Has to be relative to WORKSPACE.
317               archive "${env.OUTPUT_DIR_RELATIVE}/*"
318               archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
319               publishHTML target: [
320                 allowMissing         : true,
321                 keepAll              : true,
322                 alwaysLinkToLastBuild: true,
323                 // Has to be relative to WORKSPACE.
324                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
325                 reportFiles          : 'console-report.html',
326                 reportName           : 'JDK7 Nightly Build Report'
327               ]
328             }
329           }
330         }
331         stage ('yetus jdk8 hadoop2 checks') {
332           agent {
333             node {
334               label 'Hadoop'
335             }
336           }
337           environment {
338             BASEDIR = "${env.WORKSPACE}/component"
339             TESTS = 'maven,mvninstall,compile,javac,unit,findbugs,htmlout'
340             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
341             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
342             // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
343             // and is needed on branches that do both jdk7 and jdk8
344             SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
345           }
346           steps {
347             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
348             sh '''#!/usr/bin/env bash
349               set -e
350               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
351               echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
352               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
354             unstash 'yetus'
355             dir('component') {
356               checkout scm
357             }
358             sh '''#!/usr/bin/env bash
359               set -e
360               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
361               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
362               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
363               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
365             sh '''#!/usr/bin/env bash
366               set -e
367               declare -i status=0
368               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
369                 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
370               else
371                 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
372                 status=1
373               fi
374               echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop2)/]" >> "${OUTPUT_DIR}/commentfile"
375               exit "${status}"
376             '''
377           }
378           post {
379             always {
380               stash name: 'hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
381               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
382               // zip surefire reports.
383               sh '''#!/bin/bash -e
384                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
385                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
386                   if [[ 0 -ne ${count} ]]; then
387                     echo "zipping ${count} archived files"
388                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
389                   else
390                     echo "No archived files, skipping compressing."
391                   fi
392                 else
393                   echo "No archiver directory, skipping compressing."
394                 fi
396               // Has to be relative to WORKSPACE.
397               archive "${env.OUTPUT_DIR_RELATIVE}/*"
398               archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
399               publishHTML target: [
400                 allowMissing         : true,
401                 keepAll              : true,
402                 alwaysLinkToLastBuild: true,
403                 // Has to be relative to WORKSPACE.
404                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
405                 reportFiles          : 'console-report.html',
406                 reportName           : 'JDK8 Nightly Build Report (Hadoop2)'
407               ]
408             }
409           }
410         }
411         stage ('yetus jdk8 hadoop3 checks') {
412           agent {
413             node {
414               label 'Hadoop'
415             }
416           }
417           when {
418             not {
419               branch 'branch-1*'
420             }
421           }
422           environment {
423             BASEDIR = "${env.WORKSPACE}/component"
424             TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
425             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
426             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
427             // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
428             // and is needed on branches that do both jdk7 and jdk8
429             SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
430             // Activates hadoop 3.0 profile in maven runs.
431             HADOOP_PROFILE = '3.0'
432           }
433           steps {
434             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
435             sh '''#!/usr/bin/env bash
436               set -e
437               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
438               echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
439               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
441             unstash 'yetus'
442             dir('component') {
443               checkout scm
444             }
445             sh '''#!/usr/bin/env bash
446               set -e
447               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
448               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
449               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
450               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
452             sh '''#!/usr/bin/env bash
453               set -e
454               declare -i status=0
455               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
456                 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
457               else
458                 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
459                 status=1
460               fi
461               echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop3)/]" >> "${OUTPUT_DIR}/commentfile"
462               exit "${status}"
463             '''
464           }
465           post {
466             always {
467               stash name: 'hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
468               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
469               // zip surefire reports.
470               sh '''#!/bin/bash -e
471                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
472                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
473                   if [[ 0 -ne ${count} ]]; then
474                     echo "zipping ${count} archived files"
475                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
476                   else
477                     echo "No archived files, skipping compressing."
478                   fi
479                 else
480                   echo "No archiver directory, skipping compressing."
481                 fi
483               // Has to be relative to WORKSPACE.
484               archive "${env.OUTPUT_DIR_RELATIVE}/*"
485               archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
486               publishHTML target: [
487                 allowMissing         : true,
488                 keepAll              : true,
489                 alwaysLinkToLastBuild: true,
490                 // Has to be relative to WORKSPACE.
491                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
492                 reportFiles          : 'console-report.html',
493                 reportName           : 'JDK8 Nightly Build Report (Hadoop3)'
494               ]
495             }
496           }
497         }
498         // This is meant to mimic what a release manager will do to create RCs.
499         // See http://hbase.apache.org/book.html#maven.release
500         stage ('packaging and integration') {
501           tools {
502             maven 'Maven (latest)'
503             // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
504             jdk "JDK 1.8 (latest)"
505           }
506           environment {
507             BASEDIR = "${env.WORKSPACE}/component"
508           }
509           steps {
510             sh '''#!/bin/bash -e
511               echo "Setting up directories"
512               rm -rf "output-srctarball" && mkdir "output-srctarball"
513               rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
514               rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
515               rm -rf "hbase-install" && mkdir "hbase-install"
516               rm -rf "hbase-client" && mkdir "hbase-client"
517               rm -rf "hadoop-2" && mkdir "hadoop-2"
518               rm -rf "hadoop-3" && mkdir "hadoop-3"
519               rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
520               rm -rf ".m2-for-src" && mkdir ".m2-for-src"
521               echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
522               echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
524             sh '''#!/usr/bin/env bash
525               set -e
526               rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
527               "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
528               echo "got the following saved stats in 'output-srctarball/machine'"
529               ls -lh "output-srctarball/machine"
531             sh """#!/bin/bash -e
532               echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
533               if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
534                   --intermediate-file-dir output-srctarball \
535                   --unpack-temp-dir unpacked_src_tarball \
536                   --maven-m2-initial .m2-for-repo \
537                   --maven-m2-src-build .m2-for-src \
538                   --clean-source-checkout \
539                   "${env.BASEDIR}" ; then
540                 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
541               else
542                 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
543                 exit 1
544               fi
546             echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
547             sh '''#!/bin/bash -e
548               if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
549                 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
550                 exit 1
551               fi
552               install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
553               tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
554               client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
555               tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
557             unstash 'hadoop-2'
558             echo "Attempting to use run an instance on top of Hadoop 2."
559             sh '''#!/bin/bash -xe
560               artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
561               tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
562               if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
563                   --single-process \
564                   --working-dir output-integration/hadoop-2 \
565                   --hbase-client-install "hbase-client" \
566                   "hbase-install" \
567                   "hadoop-2/bin/hadoop" \
568                   hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
569                   hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
570                   >output-integration/hadoop-2.log 2>&1 ; then
571                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
572                 exit 2
573               fi
575             unstash 'hadoop-3'
576             echo "Attempting to use run an instance on top of Hadoop 3."
577             sh '''#!/bin/bash -e
578               artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
579               tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
580               if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
581                   --single-process \
582                   --working-dir output-integration/hadoop-3 \
583                   --hbase-client-install hbase-client \
584                   hbase-install \
585                   hadoop-3/bin/hadoop \
586                   hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
587                   hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
588                   >output-integration/hadoop-3.log 2>&1 ; then
589                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
590                 exit 2
591               fi
592               echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
593               if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
594                   --single-process \
595                   --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
596                   --working-dir output-integration/hadoop-3-shaded \
597                   --hbase-client-install hbase-client \
598                   hbase-install \
599                   hadoop-3/bin/hadoop \
600                   hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
601                   hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
602                   >output-integration/hadoop-3-shaded.log 2>&1 ; then
603                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
604                 exit 2
605               fi
606               echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
610           }
611           post {
612             always {
613               stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
614               archive 'output-srctarball/*'
615               archive 'output-srctarball/**/*'
616               archive 'output-integration/*'
617               archive 'output-integration/**/*'
618             }
619           }
620         }
621       }
622     }
623   }
624   post {
625     always {
626       script {
627          try {
628            unstash 'general-result'
629            unstash 'jdk7-result'
630            unstash 'hadoop2-result'
631            unstash 'hadoop3-result'
632            unstash 'srctarball-result'
633            sh "printenv"
634            def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
635                           "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
636                           "${env.OUTPUT_DIR_RELATIVE_HADOOP2}/commentfile",
637                           "${env.OUTPUT_DIR_RELATIVE_HADOOP3}/commentfile",
638                           'output-srctarball/commentfile',
639                           'output-integration/commentfile']
640            echo env.BRANCH_NAME
641            echo env.BUILD_URL
642            echo currentBuild.result
643            echo currentBuild.durationString
644            def comment = "Results for branch ${env.BRANCH_NAME}\n"
645            comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
646            if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
647               comment += "(/) *{color:green}+1 overall{color}*\n"
648            } else {
649               comment += "(x) *{color:red}-1 overall{color}*\n"
650               // Ideally get the committer our of the change and @ mention them in the per-jira comment
651            }
652            comment += "----\ndetails (if available):\n\n"
653            echo ""
654            echo "[DEBUG] trying to aggregate step-wise results"
655            comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
656            echo "[INFO] Comment:"
657            echo comment
658            echo ""
659            echo "[DEBUG] checking to see if feature branch"
660            def jiras = getJirasToComment(env.BRANCH_NAME, [])
661            if (jiras.isEmpty()) {
662              echo "[DEBUG] non-feature branch, checking change messages for jira keys."
663              echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
664              jiras = getJirasToCommentFromChangesets(currentBuild)
665            }
666            jiras.each { currentIssue ->
667              jiraComment issueKey: currentIssue, body: comment
668            }
669         } catch (Exception exception) {
670           echo "Got exception: ${exception}"
671           echo "    ${exception.getStackTrace()}"
672         }
673       }
674     }
675   }
677 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
678 @NonCPS
679 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
680   def seenJiras = []
681   thisBuild.changeSets.each { cs ->
682     cs.getItems().each { change ->
683       CharSequence msg = change.msg
684       echo "change: ${change}"
685       echo "     ${msg}"
686       echo "     ${change.commitId}"
687       echo "     ${change.author}"
688       echo ""
689       seenJiras = getJirasToComment(msg, seenJiras)
690     }
691   }
692   return seenJiras
694 @NonCPS
695 List<String> getJirasToComment(CharSequence source, List<String> seen) {
696   source.eachMatch("HBASE-[0-9]+") { currentIssue ->
697     echo "[DEBUG] found jira key: ${currentIssue}"
698     if (currentIssue in seen) {
699       echo "[DEBUG] already commented on ${currentIssue}."
700     } else {
701       echo "[INFO] commenting on ${currentIssue}."
702       seen << currentIssue
703     }
704   }
705   return seen