You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ps...@apache.org on 2022/01/26 15:04:51 UTC

[hbase] branch branch-2.3 updated: HBASE-26710 Remove jenkins files from branch-2.3 (#4067)

This is an automated email from the ASF dual-hosted git repository.

psomogyi pushed a commit to branch branch-2.3
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2.3 by this push:
     new d9819ad  HBASE-26710 Remove jenkins files from branch-2.3 (#4067)
d9819ad is described below

commit d9819ad417e9486844af7db2a3d981628f7f3272
Author: Peter Somogyi <ps...@apache.org>
AuthorDate: Wed Jan 26 16:03:55 2022 +0100

    HBASE-26710 Remove jenkins files from branch-2.3 (#4067)
    
    Signed-off-by: Duo Zhang <zh...@apache.org>
---
 dev-support/Jenkinsfile                            | 933 ---------------------
 dev-support/Jenkinsfile_GitHub                     | 443 ----------
 .../flaky-tests/flaky-reporting.Jenkinsfile        |  67 --
 .../flaky-tests/run-flaky-tests.Jenkinsfile        |  93 --
 4 files changed, 1536 deletions(-)

diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
deleted file mode 100644
index 983938d..0000000
--- a/dev-support/Jenkinsfile
+++ /dev/null
@@ -1,933 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-pipeline {
-  agent {
-    node {
-      label 'hbase'
-    }
-  }
-  triggers {
-    pollSCM('@daily')
-  }
-  options {
-    buildDiscarder(logRotator(numToKeepStr: '15'))
-    timeout (time: 16, unit: 'HOURS')
-    timestamps()
-    skipDefaultCheckout()
-    disableConcurrentBuilds()
-  }
-  environment {
-    YETUS_RELEASE = '0.12.0'
-    // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
-    OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
-    OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
-    OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
-    OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
-    OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
-
-    PROJECT = 'hbase'
-    PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
-    PERSONALITY_FILE = 'tools/personality.sh'
-    // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
-    AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
-    WHITESPACE_IGNORE_LIST = '.*/generated/.*'
-    // output from surefire; sadly the archive function in yetus only works on file names.
-    ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
-    // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
-    TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
-    EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/output/excludes"
-      // TODO does hadoopcheck need to be jdk specific?
-    SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
-    DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
-    ASF_NIGHTLIES = 'https://nightlies.apache.org'
-  }
-  parameters {
-    booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
-
-    Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
-    booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
-  }
-  stages {
-    stage ('scm-checkout') {
-      steps {
-            dir('component') {
-              checkout scm
-            }
-      }
-    }
-    stage ('thirdparty installs') {
-      parallel {
-        stage ('yetus install') {
-          steps {
-            // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
-            dir('downloads-yetus') {
-              // can't just do a simple echo or the directory won't be created. :(
-              sh '''#!/usr/bin/env bash
-                echo "Make sure we have a directory for downloading dependencies: $(pwd)"
-'''
-            }
-            sh  '''#!/usr/bin/env bash
-              set -e
-              echo "Ensure we have a copy of Apache Yetus."
-              if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
-                YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
-                echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
-                if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
-                  rm -rf "${YETUS_DIR}"
-                  "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
-                      --working-dir "${WORKSPACE}/downloads-yetus" \
-                      --keys 'https://www.apache.org/dist/yetus/KEYS' \
-                      --verify-tar-gz \
-                      "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
-                      "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
-                  mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
-                else
-                  echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
-                fi
-              else
-                YETUS_DIR="${WORKSPACE}/yetus-git"
-                rm -rf "${YETUS_DIR}"
-                echo "downloading from github"
-                curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
-              fi
-              if [ ! -d "${YETUS_DIR}" ]; then
-                echo "unpacking yetus into '${YETUS_DIR}'"
-                mkdir -p "${YETUS_DIR}"
-                gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
-              fi
-            '''
-            // Set up the file we need at PERSONALITY_FILE location
-            dir ("tools") {
-              sh """#!/usr/bin/env bash
-                set -e
-                echo "Downloading Project personality from ${env.PROJECT_PERSONALITY}"
-                curl -L  -o personality.sh "${env.PROJECT_PERSONALITY}"
-              """
-            }
-            stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
-          }
-        }
-        stage ('hadoop 2 cache') {
-          environment {
-            HADOOP2_VERSION="2.10.0"
-          }
-          steps {
-            // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
-            dir('downloads-hadoop-2') {
-              sh '''#!/usr/bin/env bash
-                echo "Make sure we have a directory for downloading dependencies: $(pwd)"
-'''
-            }
-            sh '''#!/usr/bin/env bash
-              set -e
-              echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
-              "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
-                  --working-dir "${WORKSPACE}/downloads-hadoop-2" \
-                  --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
-                  --verify-tar-gz \
-                  "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
-                  "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
-              for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
-                echo "Delete stale hadoop 2 cache ${stale}"
-                rm -rf $stale
-              done
-            '''
-            stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
-          }
-        }
-        stage ('hadoop 3 cache') {
-          environment {
-            HADOOP3_VERSION="3.1.1"
-          }
-          steps {
-            // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
-            dir('downloads-hadoop-3') {
-              sh '''#!/usr/bin/env bash
-                echo "Make sure we have a directory for downloading dependencies: $(pwd)"
-'''
-            }
-            sh '''#!/usr/bin/env bash
-              set -e
-              echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
-              "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
-                  --working-dir "${WORKSPACE}/downloads-hadoop-3" \
-                  --keys 'http://www.apache.org/dist/hadoop/common/KEYS' \
-                  --verify-tar-gz \
-                  "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
-                  "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
-              for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
-                echo "Delete stale hadoop 3 cache ${stale}"
-                rm -rf $stale
-              done
-            '''
-            stash name: 'hadoop-3', includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
-          }
-        }
-      }
-    }
-    stage ('init health results') {
-      steps {
-        // stash with given name for all tests we might run, so that we can unstash all of them even if
-        // we skip some due to e.g. branch-specific JDK or Hadoop support
-        stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
-        stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
-        stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
-        stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
-        stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
-        stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
-      }
-    }
-    stage ('health checks') {
-      parallel {
-        stage ('yetus general check') {
-          agent {
-            node {
-              label 'hbase'
-            }
-          }
-          environment {
-            BASEDIR = "${env.WORKSPACE}/component"
-            TESTS = "${env.SHALLOW_CHECKS}"
-            SET_JAVA_HOME = '/usr/lib/jvm/java-8'
-            OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
-            OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
-          }
-          steps {
-            // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
-            sh '''#!/usr/bin/env bash
-              set -e
-              rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
-              echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
-              echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
-            '''
-            unstash 'yetus'
-            // since we have a new node definition we need to re-do the scm checkout
-            dir('component') {
-              checkout scm
-            }
-            sh '''#!/usr/bin/env bash
-              set -e
-              rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
-              "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
-              echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
-              ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
-            '''
-            // TODO roll this into the hbase_nightly_yetus script
-            script {
-              def ret = sh(
-                returnStatus: true,
-                script: '''#!/usr/bin/env bash
-                  set -e
-                  declare -i status=0
-                  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
-                    echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
-                  else
-                    echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
-                    status=1
-                  fi
-                  echo "-- For more information [see general report|${BUILD_URL}General_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
-                  exit "${status}"
-                '''
-              )
-              if (ret != 0) {
-                // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
-                // test output. See HBASE-26339 for more details.
-                currentBuild.result = 'UNSTABLE'
-              }
-            }
-          }
-          post {
-            always {
-              stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
-              // Has to be relative to WORKSPACE.
-              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
-              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
-              publishHTML target: [
-                allowMissing: true,
-                keepAll: true,
-                alwaysLinkToLastBuild: true,
-                // Has to be relative to WORKSPACE
-                reportDir: "${env.OUTPUT_DIR_RELATIVE}",
-                reportFiles: 'console-report.html',
-                reportName: 'General Nightly Build Report'
-              ]
-            }
-          }
-        }
-        stage ('yetus jdk7 checks') {
-          agent {
-            node {
-              label 'hbase'
-            }
-          }
-          when {
-            branch 'branch-1*'
-          }
-          environment {
-            BASEDIR = "${env.WORKSPACE}/component"
-            TESTS = "${env.DEEP_CHECKS}"
-            OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
-            OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
-            SET_JAVA_HOME = "/usr/lib/jvm/java-7"
-          }
-          steps {
-            // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
-            sh '''#!/usr/bin/env bash
-              set -e
-              rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
-              echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
-              echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
-            '''
-            unstash 'yetus'
-            dir('component') {
-              checkout scm
-            }
-            sh '''#!/usr/bin/env bash
-              set -e
-              rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
-              "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
-              echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
-              ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
-            '''
-            script {
-              def ret = sh(
-                returnStatus: true,
-                script: '''#!/usr/bin/env bash
-                  set -e
-                  declare -i status=0
-                  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
-                    echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
-                  else
-                    echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
-                    status=1
-                  fi
-                  echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
-                  exit "${status}"
-                '''
-              )
-              if (ret != 0) {
-                // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
-                // test output. See HBASE-26339 for more details.
-                currentBuild.result = 'UNSTABLE'
-              }
-            }
-          }
-          post {
-            always {
-              stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
-              junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
-              // zip surefire reports.
-              sh '''#!/bin/bash -e
-                if [ -d "${OUTPUT_DIR}/archiver" ]; then
-                  count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
-                  if [[ 0 -ne ${count} ]]; then
-                    echo "zipping ${count} archived files"
-                    zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
-                  else
-                    echo "No archived files, skipping compressing."
-                  fi
-                else
-                  echo "No archiver directory, skipping compressing."
-                fi
-'''
-              sshPublisher(publishers: [
-                sshPublisherDesc(configName: 'Nightlies',
-                  transfers: [
-                    sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
-                      sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
-                    )
-                  ]
-                )
-              ])
-              // remove the big test logs zip file, store the nightlies url in test_logs.txt
-              sh '''#!/bin/bash -e
-                if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
-                  echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
-                  rm -rf "${OUTPUT_DIR}/test_logs.zip"
-                  echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
-                else
-                  echo "No test_logs.zip, skipping"
-                fi
-'''
-              // Has to be relative to WORKSPACE.
-              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
-              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
-              publishHTML target: [
-                allowMissing         : true,
-                keepAll              : true,
-                alwaysLinkToLastBuild: true,
-                // Has to be relative to WORKSPACE.
-                reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
-                reportFiles          : 'console-report.html',
-                reportName           : 'JDK7 Nightly Build Report'
-              ]
-            }
-          }
-        }
-        stage ('yetus jdk8 hadoop2 checks') {
-          agent {
-            node {
-              label 'hbase'
-            }
-          }
-          environment {
-            BASEDIR = "${env.WORKSPACE}/component"
-            TESTS = "${env.DEEP_CHECKS}"
-            OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
-            OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
-            SET_JAVA_HOME = '/usr/lib/jvm/java-8'
-          }
-          steps {
-            // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
-            sh '''#!/usr/bin/env bash
-              set -e
-              rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
-              echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
-              echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
-            '''
-            unstash 'yetus'
-            dir('component') {
-              checkout scm
-            }
-            sh '''#!/usr/bin/env bash
-              set -e
-              rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
-              "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
-              echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
-              ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
-            '''
-            script {
-              def ret = sh(
-                returnStatus: true,
-                script: '''#!/usr/bin/env bash
-                  set -e
-                  declare -i status=0
-                  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
-                    echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
-                  else
-                    echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
-                    status=1
-                  fi
-                  echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop2_29/]" >> "${OUTPUT_DIR}/commentfile"
-                  exit "${status}"
-                '''
-              )
-              if (ret != 0) {
-                // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
-                // test output. See HBASE-26339 for more details.
-                currentBuild.result = 'UNSTABLE'
-              }
-            }
-          }
-          post {
-            always {
-              stash name: 'jdk8-hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
-              junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
-              // zip surefire reports.
-              sh '''#!/bin/bash -e
-                if [ -d "${OUTPUT_DIR}/archiver" ]; then
-                  count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
-                  if [[ 0 -ne ${count} ]]; then
-                    echo "zipping ${count} archived files"
-                    zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
-                  else
-                    echo "No archived files, skipping compressing."
-                  fi
-                else
-                  echo "No archiver directory, skipping compressing."
-                fi
-'''
-              sshPublisher(publishers: [
-                sshPublisherDesc(configName: 'Nightlies',
-                  transfers: [
-                    sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
-                      sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
-                    )
-                  ]
-                )
-              ])
-              // remove the big test logs zip file, store the nightlies url in test_logs.txt
-              sh '''#!/bin/bash -e
-                if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
-                  echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
-                  rm -rf "${OUTPUT_DIR}/test_logs.zip"
-                  echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
-                else
-                  echo "No test_logs.zip, skipping"
-                fi
-'''
-              // Has to be relative to WORKSPACE.
-              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
-              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
-              publishHTML target: [
-                allowMissing         : true,
-                keepAll              : true,
-                alwaysLinkToLastBuild: true,
-                // Has to be relative to WORKSPACE.
-                reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
-                reportFiles          : 'console-report.html',
-                reportName           : 'JDK8 Nightly Build Report (Hadoop2)'
-              ]
-            }
-          }
-        }
-        stage ('yetus jdk8 hadoop3 checks') {
-          agent {
-            node {
-              label 'hbase'
-            }
-          }
-          when {
-            not {
-              branch 'branch-1*'
-            }
-          }
-          environment {
-            BASEDIR = "${env.WORKSPACE}/component"
-            TESTS = "${env.DEEP_CHECKS}"
-            OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
-            OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
-            SET_JAVA_HOME = '/usr/lib/jvm/java-8'
-            // Activates hadoop 3.0 profile in maven runs.
-            HADOOP_PROFILE = '3.0'
-          }
-          steps {
-            // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
-            sh '''#!/usr/bin/env bash
-              set -e
-              rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
-              echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
-              echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
-            '''
-            unstash 'yetus'
-            dir('component') {
-              checkout scm
-            }
-            sh '''#!/usr/bin/env bash
-              set -e
-              rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
-              "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
-              echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
-              ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
-            '''
-            script {
-              def ret = sh(
-                returnStatus: true,
-                script: '''#!/usr/bin/env bash
-                  set -e
-                  declare -i status=0
-                  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
-                    echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
-                  else
-                    echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
-                    status=1
-                  fi
-                  echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
-                  exit "${status}"
-                '''
-              )
-              if (ret != 0) {
-                // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
-                // test output. See HBASE-26339 for more details.
-                currentBuild.result = 'UNSTABLE'
-              }
-            }
-          }
-          post {
-            always {
-              stash name: 'jdk8-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
-              junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
-              // zip surefire reports.
-              sh '''#!/bin/bash -e
-                if [ -d "${OUTPUT_DIR}/archiver" ]; then
-                  count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
-                  if [[ 0 -ne ${count} ]]; then
-                    echo "zipping ${count} archived files"
-                    zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
-                  else
-                    echo "No archived files, skipping compressing."
-                  fi
-                else
-                  echo "No archiver directory, skipping compressing."
-                fi
-'''
-              sshPublisher(publishers: [
-                sshPublisherDesc(configName: 'Nightlies',
-                  transfers: [
-                    sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
-                      sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
-                    )
-                  ]
-                )
-              ])
-              // remove the big test logs zip file, store the nightlies url in test_logs.txt
-              sh '''#!/bin/bash -e
-                if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
-                  echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
-                  rm -rf "${OUTPUT_DIR}/test_logs.zip"
-                  echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
-                else
-                  echo "No test_logs.zip, skipping"
-                fi
-'''
-              // Has to be relative to WORKSPACE.
-              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
-              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
-              publishHTML target: [
-                allowMissing         : true,
-                keepAll              : true,
-                alwaysLinkToLastBuild: true,
-                // Has to be relative to WORKSPACE.
-                reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
-                reportFiles          : 'console-report.html',
-                reportName           : 'JDK8 Nightly Build Report (Hadoop3)'
-              ]
-            }
-          }
-        }
-        stage ('yetus jdk11 hadoop3 checks') {
-          agent {
-            node {
-              label 'hbase'
-            }
-          }
-          when {
-            not {
-              branch 'branch-1*'
-            }
-          }
-          environment {
-            BASEDIR = "${env.WORKSPACE}/component"
-            TESTS = "${env.DEEP_CHECKS}"
-            OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
-            OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
-            SET_JAVA_HOME = "/usr/lib/jvm/java-11"
-            // Activates hadoop 3.0 profile in maven runs.
-            HADOOP_PROFILE = '3.0'
-            // ErrorProne is broken on JDK11, see HBASE-23894
-            SKIP_ERROR_PRONE = 'true'
-          }
-          steps {
-            // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
-            sh '''#!/usr/bin/env bash
-              set -e
-              rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
-              echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
-              echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
-            '''
-            unstash 'yetus'
-            dir('component') {
-              checkout scm
-            }
-            sh '''#!/usr/bin/env bash
-              set -e
-              rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
-              "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
-              echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
-              ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
-            '''
-            script {
-              def ret = sh(
-                returnStatus: true,
-                script: '''#!/usr/bin/env bash
-                  set -e
-                  declare -i status=0
-                  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
-                    echo '(/) {color:green}+1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
-                  else
-                    echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
-                    status=1
-                  fi
-                  echo "-- For more information [see jdk11 report|${BUILD_URL}JDK11_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
-                  exit "${status}"
-                '''
-              )
-              if (ret != 0) {
-                // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
-                // test output. See HBASE-26339 for more details.
-                currentBuild.result = 'UNSTABLE'
-              }
-            }
-          }
-          post {
-            always {
-              stash name: 'jdk11-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
-              junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
-              // zip surefire reports.
-              sh '''#!/bin/bash -e
-                if [ -d "${OUTPUT_DIR}/archiver" ]; then
-                  count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
-                  if [[ 0 -ne ${count} ]]; then
-                    echo "zipping ${count} archived files"
-                    zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
-                  else
-                    echo "No archived files, skipping compressing."
-                  fi
-                else
-                  echo "No archiver directory, skipping compressing."
-                fi
-'''
-              sshPublisher(publishers: [
-                sshPublisherDesc(configName: 'Nightlies',
-                  transfers: [
-                    sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
-                      sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
-                    )
-                  ]
-                )
-              ])
-              // remove the big test logs zip file, store the nightlies url in test_logs.txt
-              sh '''#!/bin/bash -e
-                if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
-                  echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
-                  rm -rf "${OUTPUT_DIR}/test_logs.zip"
-                  echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
-                else
-                  echo "No test_logs.zip, skipping"
-                fi
-'''
-              // Has to be relative to WORKSPACE.
-              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
-              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
-              publishHTML target: [
-                allowMissing         : true,
-                keepAll              : true,
-                alwaysLinkToLastBuild: true,
-                // Has to be relative to WORKSPACE.
-                reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
-                reportFiles          : 'console-report.html',
-                reportName           : 'JDK11 Nightly Build Report (Hadoop3)'
-              ]
-            }
-          }
-        }
-        // This is meant to mimic what a release manager will do to create RCs.
-        // See http://hbase.apache.org/book.html#maven.release
-        // TODO (HBASE-23870): replace this with invocation of the release tool
-        stage ('packaging and integration') {
-          tools {
-            maven 'maven_latest'
-            // this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
-            jdk "jdk_1.8_latest"
-          }
-          environment {
-            BASEDIR = "${env.WORKSPACE}/component"
-            BRANCH = "${env.BRANCH_NAME}"
-          }
-          steps {
-            sh '''#!/bin/bash -e
-              echo "Setting up directories"
-              rm -rf "output-srctarball" && mkdir "output-srctarball"
-              rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
-              rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
-              rm -rf "hbase-install" && mkdir "hbase-install"
-              rm -rf "hbase-client" && mkdir "hbase-client"
-              rm -rf "hadoop-2" && mkdir "hadoop-2"
-              rm -rf "hadoop-3" && mkdir "hadoop-3"
-              rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
-              rm -rf ".m2-for-src" && mkdir ".m2-for-src"
-              echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
-              echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
-'''
-            sh '''#!/usr/bin/env bash
-              set -e
-              rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
-              "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
-              echo "got the following saved stats in 'output-srctarball/machine'"
-              ls -lh "output-srctarball/machine"
-'''
-            sh """#!/bin/bash -e
-              echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
-              if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
-                  --intermediate-file-dir output-srctarball \
-                  --unpack-temp-dir unpacked_src_tarball \
-                  --maven-m2-initial .m2-for-repo \
-                  --maven-m2-src-build .m2-for-src \
-                  --clean-source-checkout \
-                  "${env.BASEDIR}" ; then
-                echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
-              else
-                echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
-                exit 1
-              fi
-"""
-            echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
-            sh '''#!/bin/bash -e
-              if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | wc -l) ]; then
-                echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
-                exit 1
-              fi
-              install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | head -n 1)
-              tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
-              client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | sort | tail -n 1)
-              tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
-'''
-            unstash 'hadoop-2'
-            sh '''#!/bin/bash -xe
-              if [[ "${BRANCH}" = branch-2* ]] || [[ "${BRANCH}" = branch-1* ]]; then
-                echo "Attempting to use run an instance on top of Hadoop 2."
-                artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
-                tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
-                if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
-                    --single-process \
-                    --working-dir output-integration/hadoop-2 \
-                    --hbase-client-install "hbase-client" \
-                    "hbase-install" \
-                    "hadoop-2/bin/hadoop" \
-                    hadoop-2/share/hadoop/yarn/timelineservice \
-                    hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
-                    hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
-                    hadoop-2/bin/mapred \
-                    >output-integration/hadoop-2.log 2>&1 ; then
-                  echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
-                  exit 2
-                fi
-              else
-                echo "Skipping to run against Hadoop 2 for branch ${BRANCH}"
-              fi
-'''
-            unstash 'hadoop-3'
-            sh '''#!/bin/bash -e
-              if [[ "${BRANCH}" = branch-1* ]]; then
-                echo "Skipping to run against Hadoop 3 for branch ${BRANCH}"
-              else
-                echo "Attempting to use run an instance on top of Hadoop 3."
-                artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
-                tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
-                if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
-                    --single-process \
-                    --working-dir output-integration/hadoop-3 \
-                    --hbase-client-install hbase-client \
-                    hbase-install \
-                    hadoop-3/bin/hadoop \
-                    hadoop-3/share/hadoop/yarn/timelineservice \
-                    hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
-                    hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
-                    hadoop-3/bin/mapred \
-                    >output-integration/hadoop-3.log 2>&1 ; then
-                  echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
-                  exit 2
-                fi
-                echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
-                if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
-                    --single-process \
-                    --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
-                    --working-dir output-integration/hadoop-3-shaded \
-                    --hbase-client-install hbase-client \
-                    hbase-install \
-                    hadoop-3/bin/hadoop \
-                    hadoop-3/share/hadoop/yarn/timelineservice \
-                    hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
-                    hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
-                    hadoop-3/bin/mapred \
-                    >output-integration/hadoop-3-shaded.log 2>&1 ; then
-                  echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
-                  exit 2
-                fi
-                echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
-              fi
-'''
-
-
-          }
-          post {
-            always {
-              stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
-              archiveArtifacts artifacts: 'output-srctarball/*'
-              archiveArtifacts artifacts: 'output-srctarball/**/*'
-              archiveArtifacts artifacts: 'output-integration/*'
-              archiveArtifacts artifacts: 'output-integration/**/*'
-            }
-          }
-        }
-      }
-    }
-  }
-  post {
-    always {
-      script {
-         try {
-           unstash 'general-result'
-           unstash 'jdk7-result'
-           unstash 'jdk8-hadoop2-result'
-           unstash 'jdk8-hadoop3-result'
-           unstash 'jdk11-hadoop3-result'
-           unstash 'srctarball-result'
-           sh "printenv"
-           def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
-                          "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
-                          "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
-                          "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
-                          "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
-                          'output-srctarball/commentfile',
-                          'output-integration/commentfile']
-           echo env.BRANCH_NAME
-           echo env.BUILD_URL
-           echo currentBuild.result
-           echo currentBuild.durationString
-           def comment = "Results for branch ${env.BRANCH_NAME}\n"
-           comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
-           if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
-              comment += "(/) *{color:green}+1 overall{color}*\n"
-           } else {
-              comment += "(x) *{color:red}-1 overall{color}*\n"
-              // Ideally get the committer our of the change and @ mention them in the per-jira comment
-           }
-           comment += "----\ndetails (if available):\n\n"
-           echo ""
-           echo "[DEBUG] trying to aggregate step-wise results"
-           comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
-           echo "[INFO] Comment:"
-           echo comment
-           echo ""
-           echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
-           getJirasToComment(currentBuild).each { currentIssue ->
-             jiraComment issueKey: currentIssue, body: comment
-           }
-        } catch (Exception exception) {
-          echo "Got exception: ${exception}"
-          echo "    ${exception.getStackTrace()}"
-        }
-      }
-    }
-  }
-}
-import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
-@NonCPS
-List<String> getJirasToComment(RunWrapper thisBuild) {
-  def seenJiras = []
-  thisBuild.changeSets.each { cs ->
-    cs.getItems().each { change ->
-      CharSequence msg = change.msg
-      echo "change: ${change}"
-      echo "     ${msg}"
-      echo "     ${change.commitId}"
-      echo "     ${change.author}"
-      echo ""
-      msg.eachMatch("HBASE-[0-9]+") { currentIssue ->
-        echo "[DEBUG] found jira key: ${currentIssue}"
-        if (currentIssue in seenJiras) {
-          echo "[DEBUG] already commented on ${currentIssue}."
-        } else {
-          echo "[INFO] commenting on ${currentIssue}."
-          seenJiras << currentIssue
-        }
-      }
-    }
-  }
-  return seenJiras
-}
diff --git a/dev-support/Jenkinsfile_GitHub b/dev-support/Jenkinsfile_GitHub
deleted file mode 100644
index 6a952fa..0000000
--- a/dev-support/Jenkinsfile_GitHub
+++ /dev/null
@@ -1,443 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-pipeline {
-
-    agent {
-        label 'Hadoop'
-    }
-
-    options {
-        // N.B. this is per-branch, which means per PR
-        disableConcurrentBuilds()
-        buildDiscarder(logRotator(numToKeepStr: '15'))
-        timeout (time: 10, unit: 'HOURS')
-        timestamps()
-        skipDefaultCheckout()
-    }
-
-    environment {
-        SRC_REL = 'src'
-        PATCH_REL = 'output'
-        YETUS_REL = 'yetus'
-        DOCKERFILE_REL = "${SRC_REL}/dev-support/docker/Dockerfile"
-        YETUS_DRIVER_REL = "${SRC_REL}/dev-support/jenkins_precommit_github_yetus.sh"
-        // Branch or tag name.  Yetus release tags are 'rel/X.Y.Z'
-        YETUS_VERSION = 'rel/0.12.0'
-        GENERAL_CHECK_PLUGINS = 'all,-javadoc,-jira,-shadedjars,-unit'
-        JDK_SPECIFIC_PLUGINS = 'compile,github,htmlout,javac,javadoc,maven,mvninstall,shadedjars,unit'
-        // output from surefire; sadly the archive function in yetus only works on file names.
-        ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
-        // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
-        TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
-        EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase/job/HBase-Find-Flaky-Tests/job/${CHANGE_TARGET}/lastSuccessfulBuild/artifact/output/excludes"
-
-        // a global view of paths. parallel stages can land on the same host concurrently, so each
-        // stage works in its own subdirectory. there is an "output" under each of these
-        // directories, which we retrieve after the build is complete.
-        WORKDIR_REL_GENERAL_CHECK = 'yetus-general-check'
-        WORKDIR_REL_JDK8_HADOOP2_CHECK = 'yetus-jdk8-hadoop2-check'
-        WORKDIR_REL_JDK11_HADOOP3_CHECK = 'yetus-jdk11-hadoop3-check'
-        ASF_NIGHTLIES = 'https://nightlies.apache.org'
-    }
-
-    parameters {
-        booleanParam(name: 'DEBUG',
-               defaultValue: false,
-               description: 'Print extra outputs for debugging the jenkins job and yetus')
-    }
-
-    stages {
-        stage ('precommit checks') {
-            parallel {
-                stage ('yetus general check') {
-                    agent {
-                        node {
-                            label 'Hadoop'
-                        }
-                    }
-                    environment {
-                        // customized per parallel stage
-                        PLUGINS = "${GENERAL_CHECK_PLUGINS}"
-                        SET_JAVA_HOME = '/usr/lib/jvm/java-8'
-                        WORKDIR_REL = "${WORKDIR_REL_GENERAL_CHECK}"
-                        // identical for all parallel stages
-                        WORKDIR = "${WORKSPACE}/${WORKDIR_REL}"
-                        YETUSDIR = "${WORKDIR}/${YETUS_REL}"
-                        SOURCEDIR = "${WORKDIR}/${SRC_REL}"
-                        PATCHDIR = "${WORKDIR}/${PATCH_REL}"
-                        BUILD_URL_ARTIFACTS = "artifact/${WORKDIR_REL}/${PATCH_REL}"
-                        DOCKERFILE = "${WORKDIR}/${DOCKERFILE_REL}"
-                        YETUS_DRIVER = "${WORKDIR}/${YETUS_DRIVER_REL}"
-                    }
-                    steps {
-                        dir("${SOURCEDIR}") {
-                            checkout scm
-                        }
-                        dir("${YETUSDIR}") {
-                            checkout([
-                              $class           : 'GitSCM',
-                              branches         : [[name: "${YETUS_VERSION}"]],
-                              userRemoteConfigs: [[url: 'https://github.com/apache/yetus.git']]]
-                            )
-                        }
-                        dir("${WORKDIR}") {
-                            withCredentials([
-                                usernamePassword(
-                                  credentialsId: 'apache-hbase-at-github.com',
-                                  passwordVariable: 'GITHUB_PASSWORD',
-                                  usernameVariable: 'GITHUB_USER'
-                                )]) {
-                                script {
-                                  def ret = sh(
-                                    label: 'test-patch',
-                                    returnStatus: true,
-                                    script: '''#!/bin/bash -e
-                                      hostname -a ; pwd ; ls -la
-                                      printenv 2>&1 | sort
-                                      echo "[INFO] Launching Yetus via ${YETUS_DRIVER}"
-                                      "${YETUS_DRIVER}"
-                                    '''
-                                  )
-                                  if (ret != 0) {
-                                    // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
-                                    // test output. See HBASE-26339 for more details.
-                                    currentBuild.result = 'UNSTABLE'
-                                  }
-                                }
-                            }
-                        }
-                    }
-                    post {
-                        always {
-                            // Has to be relative to WORKSPACE.
-                            archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit"
-                            archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/**/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit/**/*"
-                            publishHTML target: [
-                              allowMissing: true,
-                              keepAll: true,
-                              alwaysLinkToLastBuild: true,
-                              // Has to be relative to WORKSPACE
-                              reportDir: "${WORKDIR_REL}/${PATCH_REL}",
-                              reportFiles: 'report.html',
-                              reportName: 'PR General Check Report'
-                            ]
-                        }
-                        // Jenkins pipeline jobs fill slaves on PRs without this :(
-                        cleanup() {
-                            script {
-                                sh label: 'Cleanup workspace', script: '''#!/bin/bash -e
-                                    # See YETUS-764
-                                    if [ -f "${PATCHDIR}/pidfile.txt" ]; then
-                                      echo "test-patch process appears to still be running: killing"
-                                      kill `cat "${PATCHDIR}/pidfile.txt"` || true
-                                      sleep 10
-                                    fi
-                                    if [ -f "${PATCHDIR}/cidfile.txt" ]; then
-                                      echo "test-patch container appears to still be running: killing"
-                                      docker kill `cat "${PATCHDIR}/cidfile.txt"` || true
-                                    fi
-                                    # See HADOOP-13951
-                                    chmod -R u+rxw "${WORKSPACE}"
-                                '''
-                                dir ("${WORKDIR}") {
-                                    deleteDir()
-                                }
-                            }
-                        }
-                    }
-                }
-                stage ('yetus jdk8 Hadoop2 checks') {
-                    agent {
-                        node {
-                            label 'Hadoop'
-                        }
-                    }
-                    environment {
-                        // customized per parallel stage
-                        PLUGINS = "${JDK_SPECIFIC_PLUGINS}"
-                        SET_JAVA_HOME = '/usr/lib/jvm/java-8'
-                        WORKDIR_REL = "${WORKDIR_REL_JDK8_HADOOP2_CHECK}"
-                        // identical for all parallel stages
-                        WORKDIR = "${WORKSPACE}/${WORKDIR_REL}"
-                        YETUSDIR = "${WORKDIR}/${YETUS_REL}"
-                        SOURCEDIR = "${WORKDIR}/${SRC_REL}"
-                        PATCHDIR = "${WORKDIR}/${PATCH_REL}"
-                        BUILD_URL_ARTIFACTS = "artifact/${WORKDIR_REL}/${PATCH_REL}"
-                        DOCKERFILE = "${WORKDIR}/${DOCKERFILE_REL}"
-                        YETUS_DRIVER = "${WORKDIR}/${YETUS_DRIVER_REL}"
-                        SKIP_ERRORPRONE = true
-                    }
-                    steps {
-                        dir("${SOURCEDIR}") {
-                            checkout scm
-                        }
-                        dir("${YETUSDIR}") {
-                            checkout([
-                              $class           : 'GitSCM',
-                              branches         : [[name: "${YETUS_VERSION}"]],
-                              userRemoteConfigs: [[url: 'https://github.com/apache/yetus.git']]]
-                            )
-                        }
-                        dir("${WORKDIR}") {
-                            withCredentials([
-                              usernamePassword(
-                                credentialsId: 'apache-hbase-at-github.com',
-                                passwordVariable: 'GITHUB_PASSWORD',
-                                usernameVariable: 'GITHUB_USER'
-                              )]) {
-                                script {
-                                  def ret = sh(
-                                    label: 'test-patch',
-                                    returnStatus: true,
-                                    script: '''#!/bin/bash -e
-                                      hostname -a ; pwd ; ls -la
-                                      printenv 2>&1 | sort
-                                      echo "[INFO] Launching Yetus via ${YETUS_DRIVER}"
-                                      "${YETUS_DRIVER}"
-                                    '''
-                                  )
-                                  if (ret != 0) {
-                                    // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
-                                    // test output. See HBASE-26339 for more details.
-                                    currentBuild.result = 'UNSTABLE'
-                                  }
-                                }
-                            }
-                        }
-                    }
-                    post {
-                        always {
-                            junit testResults: "${WORKDIR_REL}/${SRC_REL}/**/target/**/TEST-*.xml", allowEmptyResults: true
-                            sh label: 'zip surefire reports', script: '''#!/bin/bash -e
-                                if [ -d "${PATCHDIR}/archiver" ]; then
-                                  count=$(find "${PATCHDIR}/archiver" -type f | wc -l)
-                                  if [[ 0 -ne ${count} ]]; then
-                                    echo "zipping ${count} archived files"
-                                    zip -q -m -r "${PATCHDIR}/test_logs.zip" "${PATCHDIR}/archiver"
-                                  else
-                                    echo "No archived files, skipping compressing."
-                                  fi
-                                else
-                                  echo "No archiver directory, skipping compressing."
-                                fi
-                            '''
-                            sshPublisher(publishers: [
-                              sshPublisherDesc(configName: 'Nightlies',
-                                transfers: [
-                                  sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
-                                    sourceFiles: "${env.WORKDIR_REL}/${env.PATCH_REL}/test_logs.zip"
-                                  )
-                                ]
-                              )
-                            ])
-                            // remove the big test logs zip file, store the nightlies url in test_logs.txt
-                            sh '''#!/bin/bash -e
-                            if [ -f "${PATCHDIR}/test_logs.zip" ]; then
-                              echo "Remove ${PATCHDIR}/test_logs.zip for saving space"
-                              rm -rf "${PATCHDIR}/test_logs.zip"
-                              echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${WORKDIR_REL}/${PATCH_REL}/test_logs.zip" > "${PATCHDIR}/test_logs.txt"
-                            else
-                              echo "No test_logs.zip, skipping"
-                            fi
-                            '''
-                            // Has to be relative to WORKSPACE.
-                            archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit"
-                            archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/**/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit/**/*"
-                            publishHTML target: [
-                              allowMissing: true,
-                              keepAll: true,
-                              alwaysLinkToLastBuild: true,
-                              // Has to be relative to WORKSPACE
-                              reportDir: "${WORKDIR_REL}/${PATCH_REL}",
-                              reportFiles: 'report.html',
-                              reportName: 'PR JDK8 Hadoop3 Check Report'
-                            ]
-                        }
-                        // Jenkins pipeline jobs fill slaves on PRs without this :(
-                        cleanup() {
-                            script {
-                                sh label: 'Cleanup workspace', script: '''#!/bin/bash -e
-                                    # See YETUS-764
-                                    if [ -f "${PATCHDIR}/pidfile.txt" ]; then
-                                      echo "test-patch process appears to still be running: killing"
-                                      kill `cat "${PATCHDIR}/pidfile.txt"` || true
-                                      sleep 10
-                                    fi
-                                    if [ -f "${PATCHDIR}/cidfile.txt" ]; then
-                                      echo "test-patch container appears to still be running: killing"
-                                      docker kill `cat "${PATCHDIR}/cidfile.txt"` || true
-                                    fi
-                                    # See HADOOP-13951
-                                    chmod -R u+rxw "${WORKSPACE}"
-                                '''
-                                dir ("${WORKDIR}") {
-                                    deleteDir()
-                                }
-                            }
-                        }
-                    }
-                }
-                stage ('yetus jdk11 hadoop3 checks') {
-                    agent {
-                        node {
-                            label 'Hadoop'
-                        }
-                    }
-                    environment {
-                        // customized per parallel stage
-                        PLUGINS = "${JDK_SPECIFIC_PLUGINS}"
-                        SET_JAVA_HOME = '/usr/lib/jvm/java-11'
-                        HADOOP_PROFILE = '3.0'
-                        WORKDIR_REL = "${WORKDIR_REL_JDK11_HADOOP3_CHECK}"
-                        // identical for all parallel stages
-                        WORKDIR = "${WORKSPACE}/${WORKDIR_REL}"
-                        YETUSDIR = "${WORKDIR}/${YETUS_REL}"
-                        SOURCEDIR = "${WORKDIR}/${SRC_REL}"
-                        PATCHDIR = "${WORKDIR}/${PATCH_REL}"
-                        BUILD_URL_ARTIFACTS = "artifact/${WORKDIR_REL}/${PATCH_REL}"
-                        DOCKERFILE = "${WORKDIR}/${DOCKERFILE_REL}"
-                        YETUS_DRIVER = "${WORKDIR}/${YETUS_DRIVER_REL}"
-                        SKIP_ERRORPRONE = true
-                    }
-                    steps {
-                        dir("${SOURCEDIR}") {
-                            checkout scm
-                        }
-                        dir("${YETUSDIR}") {
-                            checkout([
-                              $class           : 'GitSCM',
-                              branches         : [[name: "${YETUS_VERSION}"]],
-                              userRemoteConfigs: [[url: 'https://github.com/apache/yetus.git']]]
-                            )
-                        }
-                        dir("${WORKDIR}") {
-                            withCredentials([
-                              usernamePassword(
-                                credentialsId: 'apache-hbase-at-github.com',
-                                passwordVariable: 'GITHUB_PASSWORD',
-                                usernameVariable: 'GITHUB_USER'
-                              )]) {
-                                script {
-                                  def ret = sh(
-                                    label: 'test-patch',
-                                    returnStatus: true,
-                                    script: '''#!/bin/bash -e
-                                      hostname -a ; pwd ; ls -la
-                                      printenv 2>&1 | sort
-                                      echo "[INFO] Launching Yetus via ${YETUS_DRIVER}"
-                                      "${YETUS_DRIVER}"
-                                    '''
-                                  )
-                                  if (ret != 0) {
-                                    // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
-                                    // test output. See HBASE-26339 for more details.
-                                    currentBuild.result = 'UNSTABLE'
-                                  }
-                                }
-                            }
-                        }
-                    }
-                    post {
-                        always {
-                            junit testResults: "${WORKDIR_REL}/${SRC_REL}/**/target/**/TEST-*.xml", allowEmptyResults: true
-                            sh label: 'zip surefire reports', script: '''#!/bin/bash -e
-                                if [ -d "${PATCHDIR}/archiver" ]; then
-                                  count=$(find "${PATCHDIR}/archiver" -type f | wc -l)
-                                  if [[ 0 -ne ${count} ]]; then
-                                    echo "zipping ${count} archived files"
-                                    zip -q -m -r "${PATCHDIR}/test_logs.zip" "${PATCHDIR}/archiver"
-                                  else
-                                    echo "No archived files, skipping compressing."
-                                  fi
-                                else
-                                  echo "No archiver directory, skipping compressing."
-                                fi
-                            '''
-                            sshPublisher(publishers: [
-                              sshPublisherDesc(configName: 'Nightlies',
-                                transfers: [
-                                  sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
-                                    sourceFiles: "${env.WORKDIR_REL}/${env.PATCH_REL}/test_logs.zip"
-                                  )
-                                ]
-                              )
-                            ])
-                            // remove the big test logs zip file, store the nightlies url in test_logs.txt
-                            sh '''#!/bin/bash -e
-                            if [ -f "${PATCHDIR}/test_logs.zip" ]; then
-                              echo "Remove ${PATCHDIR}/test_logs.zip for saving space"
-                              rm -rf "${PATCHDIR}/test_logs.zip"
-                              echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${WORKDIR_REL}/${PATCH_REL}/test_logs.zip" > "${PATCHDIR}/test_logs.txt"
-                            else
-                              echo "No test_logs.zip, skipping"
-                            fi
-                            '''
-                            // Has to be relative to WORKSPACE.
-                            archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit"
-                            archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/**/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit/**/*"
-                            publishHTML target: [
-                              allowMissing: true,
-                              keepAll: true,
-                              alwaysLinkToLastBuild: true,
-                              // Has to be relative to WORKSPACE
-                              reportDir: "${WORKDIR_REL}/${PATCH_REL}",
-                              reportFiles: 'report.html',
-                              reportName: 'PR JDK11 Hadoop3 Check Report'
-                            ]
-                        }
-                        // Jenkins pipeline jobs fill slaves on PRs without this :(
-                        cleanup() {
-                            script {
-                                sh label: 'Cleanup workspace', script: '''#!/bin/bash -e
-                                    # See YETUS-764
-                                    if [ -f "${PATCHDIR}/pidfile.txt" ]; then
-                                      echo "test-patch process appears to still be running: killing"
-                                      kill `cat "${PATCHDIR}/pidfile.txt"` || true
-                                      sleep 10
-                                    fi
-                                    if [ -f "${PATCHDIR}/cidfile.txt" ]; then
-                                      echo "test-patch container appears to still be running: killing"
-                                      docker kill `cat "${PATCHDIR}/cidfile.txt"` || true
-                                    fi
-                                    # See HADOOP-13951
-                                    chmod -R u+rxw "${WORKSPACE}"
-                                '''
-                                dir ("${WORKDIR}") {
-                                    deleteDir()
-                                }
-                            }
-                        }
-                    }
-                }
-            }
-        }
-    }
-
-    post {
-        // Jenkins pipeline jobs fill slaves on PRs without this :(
-        cleanup() {
-            script {
-                sh label: 'Cleanup workspace', script: '''#!/bin/bash -e
-                    # See HADOOP-13951
-                    chmod -R u+rxw "${WORKSPACE}"
-                    '''
-                deleteDir()
-            }
-        }
-    }
-}
diff --git a/dev-support/flaky-tests/flaky-reporting.Jenkinsfile b/dev-support/flaky-tests/flaky-reporting.Jenkinsfile
deleted file mode 100644
index 25e3fde..0000000
--- a/dev-support/flaky-tests/flaky-reporting.Jenkinsfile
+++ /dev/null
@@ -1,67 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-pipeline {
-  agent {
-    node {
-      label 'hbase'
-    }
-  }
-  triggers {
-    cron('@daily')
-  }
-  options {
-    buildDiscarder(logRotator(numToKeepStr: '50'))
-    timeout (time: 15, unit: 'MINUTES')
-    timestamps()
-  }
-  parameters {
-    booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
-  }
-  stages {
-    stage ('build flaky report') {
-      steps {
-        sh '''#!/usr/bin/env bash
-          set -e
-          if [ "${DEBUG}" = "true" ]; then
-            set -x
-          fi
-          declare -a flaky_args
-          flaky_args=("${flaky_args[@]}" --urls "${JENKINS_URL}/job/HBase/job/HBase%20Nightly/job/${BRANCH_NAME}" --is-yetus True --max-builds 10)
-          flaky_args=("${flaky_args[@]}" --urls "${JENKINS_URL}/job/HBase/job/HBase-Flaky-Tests/job/${BRANCH_NAME}" --is-yetus False --max-builds 30)
-          docker build -t hbase-dev-support dev-support
-          docker run --ulimit nproc=12500 -v "${WORKSPACE}":/hbase -u `id -u`:`id -g` --workdir=/hbase hbase-dev-support \
-            python dev-support/flaky-tests/report-flakies.py --mvn -v -o output "${flaky_args[@]}"
-'''
-      }
-    }
-  }
-  post {
-    always {
-      // Has to be relative to WORKSPACE.
-      archiveArtifacts artifacts: "output/*"
-      publishHTML target: [
-        allowMissing: true,
-        keepAll: true,
-        alwaysLinkToLastBuild: true,
-        // Has to be relative to WORKSPACE
-        reportDir: "output",
-        reportFiles: 'dashboard.html',
-        reportName: 'Flaky Test Report'
-      ]
-    }
-  }
-}
diff --git a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile b/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
deleted file mode 100644
index 594000e..0000000
--- a/dev-support/flaky-tests/run-flaky-tests.Jenkinsfile
+++ /dev/null
@@ -1,93 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-pipeline {
-  agent {
-    node {
-      label 'hbase'
-    }
-  }
-  triggers {
-    cron('H H/4 * * *') // Every four hours. See https://jenkins.io/doc/book/pipeline/syntax/#cron-syntax
-  }
-  options {
-    // this should roughly match how long we tell the flaky dashboard to look at
-    buildDiscarder(logRotator(numToKeepStr: '30'))
-    timeout (time: 2, unit: 'HOURS')
-    timestamps()
-  }
-  environment {
-    ASF_NIGHTLIES = 'https://nightlies.apache.org'
-  }
-  parameters {
-    booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
-  }
-  tools {
-    // this should match what the yetus nightly job for the branch will use
-    maven 'maven_latest'
-    jdk "jdk_1.8_latest"
-  }
-  stages {
-    stage ('run flaky tests') {
-      steps {
-        sh '''#!/usr/bin/env bash
-          set -e
-          declare -a curl_args=(--fail)
-          declare -a mvn_args=(--batch-mode -fn -Dbuild.id="${BUILD_ID}" -Dmaven.repo.local="${WORKSPACE}/local-repository")
-          if [ "${DEBUG}" = "true" ]; then
-            curl_args=("${curl_args[@]}" -v)
-            mvn_args=("${mvn_args[@]}" -X)
-            set -x
-          fi
-          curl "${curl_args[@]}" -o includes.txt "${JENKINS_URL}/job/HBase/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/output/includes"
-          if [ -s includes.txt ]; then
-            rm -rf local-repository/org/apache/hbase
-            mvn clean "${mvn_args[@]}"
-            rm -rf "target/machine" && mkdir -p "target/machine"
-            if [ -x dev-support/gather_machine_environment.sh ]; then
-              "./dev-support/gather_machine_environment.sh" "target/machine"
-              echo "got the following saved stats in 'target/machine'"
-              ls -lh "target/machine"
-            else
-              echo "Skipped gathering machine environment because we couldn't read the script to do so."
-            fi
-            mvn -T0.25C package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" -Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=0.25C -Dsurefire.secondPartForkCount=0.25C
-          else
-            echo "set of flaky tests is currently empty."
-          fi
-'''
-      }
-    }
-  }
-  post {
-    always {
-      junit testResults: "**/surefire-reports/*.xml", allowEmptyResults: true
-      sshPublisher(publishers: [
-        sshPublisherDesc(configName: 'Nightlies',
-          transfers: [
-            sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
-              sourceFiles: "**/surefire-reports/*,**/test-data/*"
-            )
-          ]
-        )
-      ])
-      sh '''#!/bin/bash -e
-        echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}" > "test_logs.txt"
-      '''
-      archiveArtifacts artifacts: 'includes.txt,test_logs.txt,target/machine/*'
-    }
-  }
-}