HBASE-21443 [hbase-connectors] Purge hbase-* modules from core now they've been moved...
[hbase.git] / dev-support / Jenkinsfile
blobb333afbd7f2859c9681e433d96d5cc1142147d8e
1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements.  See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership.  The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License.  You may obtain a copy of the License at
8 //
9 //   http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied.  See the License for the
15 // specific language governing permissions and limitations
16 // under the License.
17 pipeline {
18   agent {
19     node {
20       label 'ubuntu'
21     }
22   }
23   triggers {
24     cron('@daily')
25   }
26   options {
27     buildDiscarder(logRotator(numToKeepStr: '30'))
28     timeout (time: 9, unit: 'HOURS')
29     timestamps()
30     skipDefaultCheckout()
31   }
32   environment {
33     YETUS_RELEASE = '0.7.0'
34     // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
35     OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
36     OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
37     OUTPUT_DIR_RELATIVE_HADOOP2 = 'output-jdk8-hadoop2'
38     OUTPUT_DIR_RELATIVE_HADOOP3 = 'output-jdk8-hadoop3'
40     PROJECT = 'hbase'
41     PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
42     PERSONALITY_FILE = 'tools/personality.sh'
43     // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
44     AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
45     WHITESPACE_IGNORE_LIST = '.*/generated/.*'
46     // output from surefire; sadly the archive function in yetus only works on file names.
47     ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
48     // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
49     TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
50     EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/excludes"
51   }
52   parameters {
53     booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
55     Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
56     booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
57   }
58   stages {
59     stage ('scm-checkout') {
60       steps {
61             dir('component') {
62               checkout scm
63             }
64       }
65     }
66     stage ('thirdparty installs') {
67       parallel {
68         stage ('yetus install') {
69           steps {
70             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
71             dir('downloads-yetus') {
72               // can't just do a simple echo or the directory won't be created. :(
73               sh '''#!/usr/bin/env bash
74                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
75 '''
76             }
77             sh  '''#!/usr/bin/env bash
78               set -e
79               echo "Ensure we have a copy of Apache Yetus."
80               if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
81                 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
82                 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
83                 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
84                   rm -rf "${YETUS_DIR}"
85                   "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
86                       --working-dir "${WORKSPACE}/downloads-yetus" \
87                       --keys 'https://www.apache.org/dist/yetus/KEYS' \
88                       "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
89                       "yetus/${YETUS_RELEASE}/yetus-${YETUS_RELEASE}-bin.tar.gz"
90                   mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
91                 else
92                   echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
93                 fi
94               else
95                 YETUS_DIR="${WORKSPACE}/yetus-git"
96                 rm -rf "${YETUS_DIR}"
97                 echo "downloading from github"
98                 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
99               fi
100               if [ ! -d "${YETUS_DIR}" ]; then
101                 echo "unpacking yetus into '${YETUS_DIR}'"
102                 mkdir -p "${YETUS_DIR}"
103                 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
104               fi
105             '''
106             // Set up the file we need at PERSONALITY_FILE location
107             dir ("tools") {
108               sh """#!/usr/bin/env bash
109                 set -e
110                 echo "Downloading Project personality."
111                 curl -L  -o personality.sh "${env.PROJECT_PERSONALITY}"
112               """
113             }
114             stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
115           }
116         }
117         stage ('hadoop 2 cache') {
118           environment {
119             HADOOP2_VERSION="2.7.1"
120           }
121           steps {
122             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
123             dir('downloads-hadoop-2') {
124               sh '''#!/usr/bin/env bash
125                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
127             }
128             sh '''#!/usr/bin/env bash
129               set -e
130               echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
131               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
132                   --working-dir "${WORKSPACE}/downloads-hadoop-2" \
133                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
134                   "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
135                   "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
136             '''
137             stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
138           }
139         }
140         stage ('hadoop 3 cache') {
141           environment {
142             HADOOP3_VERSION="3.0.0"
143           }
144           steps {
145             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
146             dir('downloads-hadoop-3') {
147               sh '''#!/usr/bin/env bash
148                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
150             }
151             sh '''#!/usr/bin/env bash
152               set -e
153               echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
154               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
155                   --working-dir "${WORKSPACE}/downloads-hadoop-3" \
156                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
157                   "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
158                   "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
159             '''
160             stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
161           }
162         }
163       }
164     }
165     stage ('init health results') {
166       steps {
167         // stash with given name for all tests we might run, so that we can unstash all of them even if
168         // we skip some due to e.g. branch-specific JDK or Hadoop support
169         stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
170         stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
171         stash name: 'hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP2}/doesn't-match"
172         stash name: 'hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP3}/doesn't-match"
173         stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
174       }
175     }
176     stage ('health checks') {
177       parallel {
178         stage ('yetus general check') {
179           agent {
180             node {
181               label 'Hadoop'
182             }
183           }
184           environment {
185             BASEDIR = "${env.WORKSPACE}/component"
186             // TODO does hadoopcheck need to be jdk specific?
187             // Should be things that work with multijdk
188             TESTS = 'all,-unit,-findbugs'
189             // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not
190             // doing multijdk there.
191             MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64'
192             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
193             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
194           }
195           steps {
196             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
197             sh '''#!/usr/bin/env bash
198               set -e
199               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
200               echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
201               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
203             unstash 'yetus'
204             // since we have a new node definition we need to re-do the scm checkout
205             dir('component') {
206               checkout scm
207             }
208             sh '''#!/usr/bin/env bash
209               set -e
210               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
211               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
212               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
213               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
215             // TODO roll this into the hbase_nightly_yetus script
216             sh '''#!/usr/bin/env bash
217               set -e
218               declare -i status=0
219               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
220                 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
221               else
222                 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
223                 status=1
224               fi
225               echo "-- For more information [see general report|${BUILD_URL}/General_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
226               exit "${status}"
227             '''
228           }
229           post {
230             always {
231               stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
232               // Has to be relative to WORKSPACE.
233               archive "${env.OUTPUT_DIR_RELATIVE}/*"
234               archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
235               publishHTML target: [
236                 allowMissing: true,
237                 keepAll: true,
238                 alwaysLinkToLastBuild: true,
239                 // Has to be relative to WORKSPACE
240                 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
241                 reportFiles: 'console-report.html',
242                 reportName: 'General Nightly Build Report'
243               ]
244             }
245           }
246         }
247         stage ('yetus jdk7 checks') {
248           agent {
249             node {
250               label 'Hadoop'
251             }
252           }
253           when {
254             branch 'branch-1*'
255           }
256           environment {
257             BASEDIR = "${env.WORKSPACE}/component"
258             TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
259             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
260             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
261             // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already.
262           }
263           steps {
264             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
265             sh '''#!/usr/bin/env bash
266               set -e
267               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
268               echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
269               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
271             unstash 'yetus'
272             dir('component') {
273               checkout scm
274             }
275             sh '''#!/usr/bin/env bash
276               set -e
277               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
278               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
279               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
280               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
282             sh '''#!/usr/bin/env bash
283               set -e
284               declare -i status=0
285               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
286                 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
287               else
288                 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
289                 status=1
290               fi
291               echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
292               exit "${status}"
293             '''
294           }
295           post {
296             always {
297               stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
298               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
299               // zip surefire reports.
300               sh '''#!/bin/bash -e
301                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
302                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
303                   if [[ 0 -ne ${count} ]]; then
304                     echo "zipping ${count} archived files"
305                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
306                   else
307                     echo "No archived files, skipping compressing."
308                   fi
309                 else
310                   echo "No archiver directory, skipping compressing."
311                 fi
313               // Has to be relative to WORKSPACE.
314               archive "${env.OUTPUT_DIR_RELATIVE}/*"
315               archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
316               publishHTML target: [
317                 allowMissing         : true,
318                 keepAll              : true,
319                 alwaysLinkToLastBuild: true,
320                 // Has to be relative to WORKSPACE.
321                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
322                 reportFiles          : 'console-report.html',
323                 reportName           : 'JDK7 Nightly Build Report'
324               ]
325             }
326           }
327         }
328         stage ('yetus jdk8 hadoop2 checks') {
329           agent {
330             node {
331               label 'Hadoop'
332             }
333           }
334           environment {
335             BASEDIR = "${env.WORKSPACE}/component"
336             TESTS = 'maven,mvninstall,compile,javac,unit,findbugs,htmlout'
337             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
338             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
339             // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
340             // and is needed on branches that do both jdk7 and jdk8
341             SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
342           }
343           steps {
344             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
345             sh '''#!/usr/bin/env bash
346               set -e
347               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
348               echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
349               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
351             unstash 'yetus'
352             dir('component') {
353               checkout scm
354             }
355             sh '''#!/usr/bin/env bash
356               set -e
357               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
358               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
359               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
360               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
362             sh '''#!/usr/bin/env bash
363               set -e
364               declare -i status=0
365               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
366                 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
367               else
368                 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
369                 status=1
370               fi
371               echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop2)/]" >> "${OUTPUT_DIR}/commentfile"
372               exit "${status}"
373             '''
374           }
375           post {
376             always {
377               stash name: 'hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
378               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
379               // zip surefire reports.
380               sh '''#!/bin/bash -e
381                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
382                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
383                   if [[ 0 -ne ${count} ]]; then
384                     echo "zipping ${count} archived files"
385                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
386                   else
387                     echo "No archived files, skipping compressing."
388                   fi
389                 else
390                   echo "No archiver directory, skipping compressing."
391                 fi
393               // Has to be relative to WORKSPACE.
394               archive "${env.OUTPUT_DIR_RELATIVE}/*"
395               archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
396               publishHTML target: [
397                 allowMissing         : true,
398                 keepAll              : true,
399                 alwaysLinkToLastBuild: true,
400                 // Has to be relative to WORKSPACE.
401                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
402                 reportFiles          : 'console-report.html',
403                 reportName           : 'JDK8 Nightly Build Report (Hadoop2)'
404               ]
405             }
406           }
407         }
408         stage ('yetus jdk8 hadoop3 checks') {
409           agent {
410             node {
411               label 'Hadoop'
412             }
413           }
414           when {
415             not {
416               branch 'branch-1*'
417             }
418           }
419           environment {
420             BASEDIR = "${env.WORKSPACE}/component"
421             TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
422             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
423             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
424             // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
425             // and is needed on branches that do both jdk7 and jdk8
426             SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
427             // Activates hadoop 3.0 profile in maven runs.
428             HADOOP_PROFILE = '3.0'
429           }
430           steps {
431             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
432             sh '''#!/usr/bin/env bash
433               set -e
434               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
435               echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
436               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
438             unstash 'yetus'
439             dir('component') {
440               checkout scm
441             }
442             sh '''#!/usr/bin/env bash
443               set -e
444               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
445               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
446               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
447               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
449             sh '''#!/usr/bin/env bash
450               set -e
451               declare -i status=0
452               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
453                 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
454               else
455                 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
456                 status=1
457               fi
458               echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop3)/]" >> "${OUTPUT_DIR}/commentfile"
459               exit "${status}"
460             '''
461           }
462           post {
463             always {
464               stash name: 'hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
465               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
466               // zip surefire reports.
467               sh '''#!/bin/bash -e
468                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
469                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
470                   if [[ 0 -ne ${count} ]]; then
471                     echo "zipping ${count} archived files"
472                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
473                   else
474                     echo "No archived files, skipping compressing."
475                   fi
476                 else
477                   echo "No archiver directory, skipping compressing."
478                 fi
480               // Has to be relative to WORKSPACE.
481               archive "${env.OUTPUT_DIR_RELATIVE}/*"
482               archive "${env.OUTPUT_DIR_RELATIVE}/**/*"
483               publishHTML target: [
484                 allowMissing         : true,
485                 keepAll              : true,
486                 alwaysLinkToLastBuild: true,
487                 // Has to be relative to WORKSPACE.
488                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
489                 reportFiles          : 'console-report.html',
490                 reportName           : 'JDK8 Nightly Build Report (Hadoop3)'
491               ]
492             }
493           }
494         }
495         // This is meant to mimic what a release manager will do to create RCs.
496         // See http://hbase.apache.org/book.html#maven.release
497         stage ('packaging and integration') {
498           tools {
499             maven 'Maven (latest)'
500             // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
501             jdk "JDK 1.8 (latest)"
502           }
503           environment {
504             BASEDIR = "${env.WORKSPACE}/component"
505           }
506           steps {
507             sh '''#!/bin/bash -e
508               echo "Setting up directories"
509               rm -rf "output-srctarball" && mkdir "output-srctarball"
510               rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
511               rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
512               rm -rf "hbase-install" && mkdir "hbase-install"
513               rm -rf "hbase-client" && mkdir "hbase-client"
514               rm -rf "hadoop-2" && mkdir "hadoop-2"
515               rm -rf "hadoop-3" && mkdir "hadoop-3"
516               rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
517               rm -rf ".m2-for-src" && mkdir ".m2-for-src"
518               echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
519               echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
521             sh '''#!/usr/bin/env bash
522               set -e
523               rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
524               "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
525               echo "got the following saved stats in 'output-srctarball/machine'"
526               ls -lh "output-srctarball/machine"
528             sh """#!/bin/bash -e
529               echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
530               if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
531                   --intermediate-file-dir output-srctarball \
532                   --unpack-temp-dir unpacked_src_tarball \
533                   --maven-m2-initial .m2-for-repo \
534                   --maven-m2-src-build .m2-for-src \
535                   --clean-source-checkout \
536                   "${env.BASEDIR}" ; then
537                 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
538               else
539                 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
540                 exit 1
541               fi
543             echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
544             sh '''#!/bin/bash -e
545               if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
546                 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
547                 exit 1
548               fi
549               install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
550               tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
551               client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
552               tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
554             unstash 'hadoop-2'
555             echo "Attempting to use run an instance on top of Hadoop 2."
556             sh '''#!/bin/bash -xe
557               artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
558               tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
559               if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
560                   --single-process \
561                   --working-dir output-integration/hadoop-2 \
562                   --hbase-client-install "hbase-client" \
563                   "hbase-install" \
564                   "hadoop-2/bin/hadoop" \
565                   hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
566                   hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
567                   >output-integration/hadoop-2.log 2>&1 ; then
568                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
569                 exit 2
570               fi
572             unstash 'hadoop-3'
573             echo "Attempting to use run an instance on top of Hadoop 3."
574             sh '''#!/bin/bash -e
575               artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
576               tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
577               if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
578                   --single-process \
579                   --working-dir output-integration/hadoop-3 \
580                   --hbase-client-install hbase-client \
581                   hbase-install \
582                   hadoop-3/bin/hadoop \
583                   hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
584                   hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
585                   >output-integration/hadoop-3.log 2>&1 ; then
586                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
587                 exit 2
588               fi
589               echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
590               if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
591                   --single-process \
592                   --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
593                   --working-dir output-integration/hadoop-3-shaded \
594                   --hbase-client-install hbase-client \
595                   hbase-install \
596                   hadoop-3/bin/hadoop \
597                   hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
598                   hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
599                   >output-integration/hadoop-3-shaded.log 2>&1 ; then
600                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
601                 exit 2
602               fi
603               echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
607           }
608           post {
609             always {
610               stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
611               archive 'output-srctarball/*'
612               archive 'output-srctarball/**/*'
613               archive 'output-integration/*'
614               archive 'output-integration/**/*'
615             }
616           }
617         }
618       }
619     }
620   }
621   post {
622     always {
623       script {
624          try {
625            unstash 'general-result'
626            unstash 'jdk7-result'
627            unstash 'hadoop2-result'
628            unstash 'hadoop3-result'
629            unstash 'srctarball-result'
630            sh "printenv"
631            def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
632                           "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
633                           "${env.OUTPUT_DIR_RELATIVE_HADOOP2}/commentfile",
634                           "${env.OUTPUT_DIR_RELATIVE_HADOOP3}/commentfile",
635                           'output-srctarball/commentfile',
636                           'output-integration/commentfile']
637            echo env.BRANCH_NAME
638            echo env.BUILD_URL
639            echo currentBuild.result
640            echo currentBuild.durationString
641            def comment = "Results for branch ${env.BRANCH_NAME}\n"
642            comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
643            if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
644               comment += "(/) *{color:green}+1 overall{color}*\n"
645            } else {
646               comment += "(x) *{color:red}-1 overall{color}*\n"
647               // Ideally get the committer our of the change and @ mention them in the per-jira comment
648            }
649            comment += "----\ndetails (if available):\n\n"
650            echo ""
651            echo "[DEBUG] trying to aggregate step-wise results"
652            comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
653            echo "[INFO] Comment:"
654            echo comment
655            echo ""
656            echo "[DEBUG] checking to see if feature branch"
657            def jiras = getJirasToComment(env.BRANCH_NAME, [])
658            if (jiras.isEmpty()) {
659              echo "[DEBUG] non-feature branch, checking change messages for jira keys."
660              echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
661              jiras = getJirasToCommentFromChangesets(currentBuild)
662            }
663            jiras.each { currentIssue ->
664              jiraComment issueKey: currentIssue, body: comment
665            }
666         } catch (Exception exception) {
667           echo "Got exception: ${exception}"
668           echo "    ${exception.getStackTrace()}"
669         }
670       }
671     }
672   }
674 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
675 @NonCPS
676 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
677   def seenJiras = []
678   thisBuild.changeSets.each { cs ->
679     cs.getItems().each { change ->
680       CharSequence msg = change.msg
681       echo "change: ${change}"
682       echo "     ${msg}"
683       echo "     ${change.commitId}"
684       echo "     ${change.author}"
685       echo ""
686       seenJiras = getJirasToComment(msg, seenJiras)
687     }
688   }
689   return seenJiras
691 @NonCPS
692 List<String> getJirasToComment(CharSequence source, List<String> seen) {
693   source.eachMatch("HBASE-[0-9]+") { currentIssue ->
694     echo "[DEBUG] found jira key: ${currentIssue}"
695     if (currentIssue in seen) {
696       echo "[DEBUG] already commented on ${currentIssue}."
697     } else {
698       echo "[INFO] commenting on ${currentIssue}."
699       seen << currentIssue
700     }
701   }
702   return seen