HBASE-23755. [OpenTracing] Declare HTrace is unusable in the user doc (#1196)
[hbase.git] / dev-support / Jenkinsfile
blob2eb09475eee0351992ce05f7752714c46611db42
1 // Licensed to the Apache Software Foundation (ASF) under one
2 // or more contributor license agreements.  See the NOTICE file
3 // distributed with this work for additional information
4 // regarding copyright ownership.  The ASF licenses this file
5 // to you under the Apache License, Version 2.0 (the
6 // "License"); you may not use this file except in compliance
7 // with the License.  You may obtain a copy of the License at
8 //
9 //   http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing,
12 // software distributed under the License is distributed on an
13 // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 // KIND, either express or implied.  See the License for the
15 // specific language governing permissions and limitations
16 // under the License.
17 pipeline {
18   agent {
19     node {
20       label 'Hadoop'
21     }
22   }
23   triggers {
24     cron('@daily')
25   }
26   options {
27     buildDiscarder(logRotator(numToKeepStr: '15'))
28     timeout (time: 9, unit: 'HOURS')
29     timestamps()
30     skipDefaultCheckout()
31     disableConcurrentBuilds()
32   }
33   environment {
34     YETUS_RELEASE = '0.11.1'
35     // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
36     OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
37     OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
38     OUTPUT_DIR_RELATIVE_HADOOP2 = 'output-jdk8-hadoop2'
39     OUTPUT_DIR_RELATIVE_HADOOP3 = 'output-jdk8-hadoop3'
41     PROJECT = 'hbase'
42     PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
43     PERSONALITY_FILE = 'tools/personality.sh'
44     // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
45     AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
46     WHITESPACE_IGNORE_LIST = '.*/generated/.*'
47     // output from surefire; sadly the archive function in yetus only works on file names.
48     ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
49     // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
50     TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
51     EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/excludes"
52   }
53   parameters {
54     booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
56     Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
57     booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
58   }
59   stages {
60     stage ('scm-checkout') {
61       steps {
62             dir('component') {
63               checkout scm
64             }
65       }
66     }
67     stage ('thirdparty installs') {
68       parallel {
69         stage ('yetus install') {
70           steps {
71             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
72             dir('downloads-yetus') {
73               // can't just do a simple echo or the directory won't be created. :(
74               sh '''#!/usr/bin/env bash
75                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
76 '''
77             }
78             sh  '''#!/usr/bin/env bash
79               set -e
80               echo "Ensure we have a copy of Apache Yetus."
81               if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
82                 YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
83                 echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
84                 if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
85                   rm -rf "${YETUS_DIR}"
86                   "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
87                       --working-dir "${WORKSPACE}/downloads-yetus" \
88                       --keys 'https://www.apache.org/dist/yetus/KEYS' \
89                       "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
90                       "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
91                   mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
92                 else
93                   echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
94                 fi
95               else
96                 YETUS_DIR="${WORKSPACE}/yetus-git"
97                 rm -rf "${YETUS_DIR}"
98                 echo "downloading from github"
99                 curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
100               fi
101               if [ ! -d "${YETUS_DIR}" ]; then
102                 echo "unpacking yetus into '${YETUS_DIR}'"
103                 mkdir -p "${YETUS_DIR}"
104                 gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
105               fi
106             '''
107             // Set up the file we need at PERSONALITY_FILE location
108             dir ("tools") {
109               sh """#!/usr/bin/env bash
110                 set -e
111                 echo "Downloading Project personality."
112                 curl -L  -o personality.sh "${env.PROJECT_PERSONALITY}"
113               """
114             }
115             stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
116           }
117         }
118         stage ('hadoop 2 cache') {
119           environment {
120             HADOOP2_VERSION="2.8.5"
121           }
122           steps {
123             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
124             dir('downloads-hadoop-2') {
125               sh '''#!/usr/bin/env bash
126                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
128             }
129             sh '''#!/usr/bin/env bash
130               set -e
131               echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
132               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
133                   --working-dir "${WORKSPACE}/downloads-hadoop-2" \
134                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
135                   "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
136                   "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
137               for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
138                 echo "Delete stale hadoop 2 cache ${stale}"
139                 rm -rf $stale
140               done
141             '''
142             stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
143           }
144         }
145         stage ('hadoop 3 cache') {
146           environment {
147             HADOOP3_VERSION="3.1.1"
148           }
149           steps {
150             // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
151             dir('downloads-hadoop-3') {
152               sh '''#!/usr/bin/env bash
153                 echo "Make sure we have a directory for downloading dependencies: $(pwd)"
155             }
156             sh '''#!/usr/bin/env bash
157               set -e
158               echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
159               "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
160                   --working-dir "${WORKSPACE}/downloads-hadoop-3" \
161                   --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
162                   "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
163                   "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
164               for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
165                 echo "Delete stale hadoop 3 cache ${stale}"
166                 rm -rf $stale
167               done
168             '''
169             stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
170           }
171         }
172       }
173     }
174     stage ('init health results') {
175       steps {
176         // stash with given name for all tests we might run, so that we can unstash all of them even if
177         // we skip some due to e.g. branch-specific JDK or Hadoop support
178         stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
179         stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
180         stash name: 'hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP2}/doesn't-match"
181         stash name: 'hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP3}/doesn't-match"
182         stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
183       }
184     }
185     stage ('health checks') {
186       parallel {
187         stage ('yetus general check') {
188           agent {
189             node {
190               label 'Hadoop'
191             }
192           }
193           environment {
194             BASEDIR = "${env.WORKSPACE}/component"
195             // TODO does hadoopcheck need to be jdk specific?
196             // Should be things that work with multijdk
197             TESTS = 'all,-unit,-findbugs'
198             // on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not
199             // doing multijdk there.
200             MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64'
201             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
202             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
203           }
204           steps {
205             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
206             sh '''#!/usr/bin/env bash
207               set -e
208               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
209               echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
210               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
212             unstash 'yetus'
213             // since we have a new node definition we need to re-do the scm checkout
214             dir('component') {
215               checkout scm
216             }
217             sh '''#!/usr/bin/env bash
218               set -e
219               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
220               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
221               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
222               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
224             // TODO roll this into the hbase_nightly_yetus script
225             sh '''#!/usr/bin/env bash
226               set -e
227               declare -i status=0
228               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
229                 echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
230               else
231                 echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
232                 status=1
233               fi
234               echo "-- For more information [see general report|${BUILD_URL}/General_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
235               exit "${status}"
236             '''
237           }
238           post {
239             always {
240               stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
241               // Has to be relative to WORKSPACE.
242               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
243               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
244               publishHTML target: [
245                 allowMissing: true,
246                 keepAll: true,
247                 alwaysLinkToLastBuild: true,
248                 // Has to be relative to WORKSPACE
249                 reportDir: "${env.OUTPUT_DIR_RELATIVE}",
250                 reportFiles: 'console-report.html',
251                 reportName: 'General Nightly Build Report'
252               ]
253             }
254           }
255         }
256         stage ('yetus jdk7 checks') {
257           agent {
258             node {
259               label 'Hadoop'
260             }
261           }
262           when {
263             branch 'branch-1*'
264           }
265           environment {
266             BASEDIR = "${env.WORKSPACE}/component"
267             TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
268             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
269             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
270             // On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already.
271           }
272           steps {
273             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
274             sh '''#!/usr/bin/env bash
275               set -e
276               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
277               echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
278               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
280             unstash 'yetus'
281             dir('component') {
282               checkout scm
283             }
284             sh '''#!/usr/bin/env bash
285               set -e
286               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
287               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
288               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
289               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
291             sh '''#!/usr/bin/env bash
292               set -e
293               declare -i status=0
294               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
295                 echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
296               else
297                 echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
298                 status=1
299               fi
300               echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
301               exit "${status}"
302             '''
303           }
304           post {
305             always {
306               stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
307               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
308               // zip surefire reports.
309               sh '''#!/bin/bash -e
310                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
311                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
312                   if [[ 0 -ne ${count} ]]; then
313                     echo "zipping ${count} archived files"
314                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
315                   else
316                     echo "No archived files, skipping compressing."
317                   fi
318                 else
319                   echo "No archiver directory, skipping compressing."
320                 fi
322               // Has to be relative to WORKSPACE.
323               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
324               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
325               publishHTML target: [
326                 allowMissing         : true,
327                 keepAll              : true,
328                 alwaysLinkToLastBuild: true,
329                 // Has to be relative to WORKSPACE.
330                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
331                 reportFiles          : 'console-report.html',
332                 reportName           : 'JDK7 Nightly Build Report'
333               ]
334             }
335           }
336         }
337         stage ('yetus jdk8 hadoop2 checks') {
338           agent {
339             node {
340               label 'Hadoop'
341             }
342           }
343           environment {
344             BASEDIR = "${env.WORKSPACE}/component"
345             TESTS = 'maven,mvninstall,compile,javac,unit,findbugs,htmlout'
346             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
347             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
348             // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
349             // and is needed on branches that do both jdk7 and jdk8
350             SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
351           }
352           steps {
353             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
354             sh '''#!/usr/bin/env bash
355               set -e
356               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
357               echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
358               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
360             unstash 'yetus'
361             dir('component') {
362               checkout scm
363             }
364             sh '''#!/usr/bin/env bash
365               set -e
366               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
367               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
368               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
369               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
371             sh '''#!/usr/bin/env bash
372               set -e
373               declare -i status=0
374               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
375                 echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
376               else
377                 echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
378                 status=1
379               fi
380               echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop2)/]" >> "${OUTPUT_DIR}/commentfile"
381               exit "${status}"
382             '''
383           }
384           post {
385             always {
386               stash name: 'hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
387               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
388               // zip surefire reports.
389               sh '''#!/bin/bash -e
390                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
391                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
392                   if [[ 0 -ne ${count} ]]; then
393                     echo "zipping ${count} archived files"
394                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
395                   else
396                     echo "No archived files, skipping compressing."
397                   fi
398                 else
399                   echo "No archiver directory, skipping compressing."
400                 fi
402               // Has to be relative to WORKSPACE.
403               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
404               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
405               publishHTML target: [
406                 allowMissing         : true,
407                 keepAll              : true,
408                 alwaysLinkToLastBuild: true,
409                 // Has to be relative to WORKSPACE.
410                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
411                 reportFiles          : 'console-report.html',
412                 reportName           : 'JDK8 Nightly Build Report (Hadoop2)'
413               ]
414             }
415           }
416         }
417         stage ('yetus jdk8 hadoop3 checks') {
418           agent {
419             node {
420               label 'Hadoop'
421             }
422           }
423           when {
424             not {
425               branch 'branch-1*'
426             }
427           }
428           environment {
429             BASEDIR = "${env.WORKSPACE}/component"
430             TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
431             OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
432             OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
433             // This isn't strictly needed on branches that only support jdk8, but doesn't hurt
434             // and is needed on branches that do both jdk7 and jdk8
435             SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
436             // Activates hadoop 3.0 profile in maven runs.
437             HADOOP_PROFILE = '3.0'
438           }
439           steps {
440             // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
441             sh '''#!/usr/bin/env bash
442               set -e
443               rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
444               echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
445               echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
447             unstash 'yetus'
448             dir('component') {
449               checkout scm
450             }
451             sh '''#!/usr/bin/env bash
452               set -e
453               rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
454               "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
455               echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
456               ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
458             sh '''#!/usr/bin/env bash
459               set -e
460               declare -i status=0
461               if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
462                 echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
463               else
464                 echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
465                 status=1
466               fi
467               echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop3)/]" >> "${OUTPUT_DIR}/commentfile"
468               exit "${status}"
469             '''
470           }
471           post {
472             always {
473               stash name: 'hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
474               junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
475               // zip surefire reports.
476               sh '''#!/bin/bash -e
477                 if [ -d "${OUTPUT_DIR}/archiver" ]; then
478                   count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
479                   if [[ 0 -ne ${count} ]]; then
480                     echo "zipping ${count} archived files"
481                     zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
482                   else
483                     echo "No archived files, skipping compressing."
484                   fi
485                 else
486                   echo "No archiver directory, skipping compressing."
487                 fi
489               // Has to be relative to WORKSPACE.
490               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
491               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
492               publishHTML target: [
493                 allowMissing         : true,
494                 keepAll              : true,
495                 alwaysLinkToLastBuild: true,
496                 // Has to be relative to WORKSPACE.
497                 reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
498                 reportFiles          : 'console-report.html',
499                 reportName           : 'JDK8 Nightly Build Report (Hadoop3)'
500               ]
501             }
502           }
503         }
504         // This is meant to mimic what a release manager will do to create RCs.
505         // See http://hbase.apache.org/book.html#maven.release
506         stage ('packaging and integration') {
507           tools {
508             maven 'Maven (latest)'
509             // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
510             jdk "JDK 1.8 (latest)"
511           }
512           environment {
513             BASEDIR = "${env.WORKSPACE}/component"
514           }
515           steps {
516             sh '''#!/bin/bash -e
517               echo "Setting up directories"
518               rm -rf "output-srctarball" && mkdir "output-srctarball"
519               rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
520               rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
521               rm -rf "hbase-install" && mkdir "hbase-install"
522               rm -rf "hbase-client" && mkdir "hbase-client"
523               rm -rf "hadoop-2" && mkdir "hadoop-2"
524               rm -rf "hadoop-3" && mkdir "hadoop-3"
525               rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
526               rm -rf ".m2-for-src" && mkdir ".m2-for-src"
527               echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
528               echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
530             sh '''#!/usr/bin/env bash
531               set -e
532               rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
533               "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
534               echo "got the following saved stats in 'output-srctarball/machine'"
535               ls -lh "output-srctarball/machine"
537             sh """#!/bin/bash -e
538               echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
539               if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
540                   --intermediate-file-dir output-srctarball \
541                   --unpack-temp-dir unpacked_src_tarball \
542                   --maven-m2-initial .m2-for-repo \
543                   --maven-m2-src-build .m2-for-src \
544                   --clean-source-checkout \
545                   "${env.BASEDIR}" ; then
546                 echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
547               else
548                 echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
549                 exit 1
550               fi
552             echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
553             sh '''#!/bin/bash -e
554               if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
555                 echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
556                 exit 1
557               fi
558               install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
559               tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
560               client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
561               tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
563             unstash 'hadoop-2'
564             echo "Attempting to use run an instance on top of Hadoop 2."
565             sh '''#!/bin/bash -xe
566               artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
567               tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
568               if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
569                   --single-process \
570                   --working-dir output-integration/hadoop-2 \
571                   --hbase-client-install "hbase-client" \
572                   "hbase-install" \
573                   "hadoop-2/bin/hadoop" \
574                   hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
575                   hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
576                   hadoop-2/bin/mapred \
577                   >output-integration/hadoop-2.log 2>&1 ; then
578                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
579                 exit 2
580               fi
582             unstash 'hadoop-3'
583             echo "Attempting to use run an instance on top of Hadoop 3."
584             sh '''#!/bin/bash -e
585               artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
586               tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
587               if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
588                   --single-process \
589                   --working-dir output-integration/hadoop-3 \
590                   --hbase-client-install hbase-client \
591                   hbase-install \
592                   hadoop-3/bin/hadoop \
593                   hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
594                   hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
595                   hadoop-3/bin/mapred \
596                   >output-integration/hadoop-3.log 2>&1 ; then
597                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
598                 exit 2
599               fi
600               echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
601               if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
602                   --single-process \
603                   --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
604                   --working-dir output-integration/hadoop-3-shaded \
605                   --hbase-client-install hbase-client \
606                   hbase-install \
607                   hadoop-3/bin/hadoop \
608                   hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
609                   hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
610                   hadoop-3/bin/mapred \
611                   >output-integration/hadoop-3-shaded.log 2>&1 ; then
612                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
613                 exit 2
614               fi
615               echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
619           }
620           post {
621             always {
622               stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
623               archiveArtifacts artifacts: 'output-srctarball/*'
624               archiveArtifacts artifacts: 'output-srctarball/**/*'
625               archiveArtifacts artifacts: 'output-integration/*'
626               archiveArtifacts artifacts: 'output-integration/**/*'
627             }
628           }
629         }
630       }
631     }
632   }
633   post {
634     always {
635       script {
636          try {
637            unstash 'general-result'
638            unstash 'jdk7-result'
639            unstash 'hadoop2-result'
640            unstash 'hadoop3-result'
641            unstash 'srctarball-result'
642            sh "printenv"
643            def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
644                           "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
645                           "${env.OUTPUT_DIR_RELATIVE_HADOOP2}/commentfile",
646                           "${env.OUTPUT_DIR_RELATIVE_HADOOP3}/commentfile",
647                           'output-srctarball/commentfile',
648                           'output-integration/commentfile']
649            echo env.BRANCH_NAME
650            echo env.BUILD_URL
651            echo currentBuild.result
652            echo currentBuild.durationString
653            def comment = "Results for branch ${env.BRANCH_NAME}\n"
654            comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
655            if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
656               comment += "(/) *{color:green}+1 overall{color}*\n"
657            } else {
658               comment += "(x) *{color:red}-1 overall{color}*\n"
659               // Ideally get the committer our of the change and @ mention them in the per-jira comment
660            }
661            comment += "----\ndetails (if available):\n\n"
662            echo ""
663            echo "[DEBUG] trying to aggregate step-wise results"
664            comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
665            echo "[INFO] Comment:"
666            echo comment
667            echo ""
668            echo "[DEBUG] checking to see if feature branch"
669            def jiras = getJirasToComment(env.BRANCH_NAME, [])
670            if (jiras.isEmpty()) {
671              echo "[DEBUG] non-feature branch, checking change messages for jira keys."
672              echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
673              jiras = getJirasToCommentFromChangesets(currentBuild)
674            }
675            jiras.each { currentIssue ->
676              jiraComment issueKey: currentIssue, body: comment
677            }
678         } catch (Exception exception) {
679           echo "Got exception: ${exception}"
680           echo "    ${exception.getStackTrace()}"
681         }
682       }
683     }
684   }
686 import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
687 @NonCPS
688 List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
689   def seenJiras = []
690   thisBuild.changeSets.each { cs ->
691     cs.getItems().each { change ->
692       CharSequence msg = change.msg
693       echo "change: ${change}"
694       echo "     ${msg}"
695       echo "     ${change.commitId}"
696       echo "     ${change.author}"
697       echo ""
698       seenJiras = getJirasToComment(msg, seenJiras)
699     }
700   }
701   return seenJiras
703 @NonCPS
704 List<String> getJirasToComment(CharSequence source, List<String> seen) {
705   source.eachMatch("HBASE-[0-9]+") { currentIssue ->
706     echo "[DEBUG] found jira key: ${currentIssue}"
707     if (currentIssue in seen) {
708       echo "[DEBUG] already commented on ${currentIssue}."
709     } else {
710       echo "[INFO] commenting on ${currentIssue}."
711       seen << currentIssue
712     }
713   }
714   return seen