// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements.  See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership.  The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License.  You may obtain a copy of the License at
//
//   http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied.  See the License for the
// specific language governing permissions and limitations
// under the License.
pipeline {
  agent {
    node {
      label 'hbase'
    }
  }
  triggers {
    pollSCM(getCronParams(env.BRANCH_NAME))
  }
  options {
    buildDiscarder(logRotator(numToKeepStr: '20'))
    timeout (time: 16, unit: 'HOURS')
    timestamps()
    skipDefaultCheckout()
    disableConcurrentBuilds()
  }
  environment {
    YETUS_RELEASE = '0.15.0'
    // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
    OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
    OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
    OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
    OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
    OUTPUT_DIR_RELATIVE_JDK17_HADOOP3 = 'output-jdk17-hadoop3'
    OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS = 'output-jdk17-hadoop3-backwards'

    PROJECT = 'hbase'
    PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
    PERSONALITY_FILE = 'tools/personality.sh'
    // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
    AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc'
    BLANKS_EOL_IGNORE_FILE = 'dev-support/blanks-eol-ignore.txt'
    BLANKS_TABS_IGNORE_FILE = 'dev-support/blanks-tabs-ignore.txt'
    // output from surefire; sadly the archive function in yetus only works on file names.
    ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
    // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
    TESTS_FILTER = 'checkstyle,javac,javadoc,pylint,shellcheck,shelldocs,blanks,perlcritic,ruby-lint,rubocop'
    EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/output/excludes"
      // TODO does hadoopcheck need to be jdk specific?
    SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
    DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
    ASF_NIGHTLIES = 'https://nightlies.apache.org'
    ASF_NIGHTLIES_BASE_ORI = "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}"
    ASF_NIGHTLIES_BASE = "${ASF_NIGHTLIES_BASE_ORI.replaceAll(' ', '%20')}"
    // These are dependent on the branch
    // We are not running the tests for 3.4.0, due to time constraints.
    HADOOP3_VERSIONS = "3.2.4,3.3.5,3.3.6,3.4.1"
    HADOOP3_DEFAULT_VERSION = "3.4.1"
  }
  parameters {
    booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.

    Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
    booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
  }
  stages {
    stage ('scm-checkout') {
      steps {
            dir('component') {
              checkout scm
            }
      }
    }
    stage ('thirdparty installs') {
      parallel {
        stage ('yetus install') {
          steps {
            // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
            dir('downloads-yetus') {
              // can't just do a simple echo or the directory won't be created. :(
              sh '''#!/usr/bin/env bash
                echo "Make sure we have a directory for downloading dependencies: $(pwd)"
'''
            }
            sh  '''#!/usr/bin/env bash
              set -e
              echo "Ensure we have a copy of Apache Yetus."
              if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
                YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
                echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
                if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
                  rm -rf "${YETUS_DIR}"
                  "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
                      --working-dir "${WORKSPACE}/downloads-yetus" \
                      --keys 'https://downloads.apache.org/yetus/KEYS' \
                      --verify-tar-gz \
                      "${WORKSPACE}/yetus-${YETUS_RELEASE}-bin.tar.gz" \
                      "yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
                  mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
                else
                  echo "Reusing cached install of Apache Yetus version ${YETUS_RELEASE}."
                fi
              else
                YETUS_DIR="${WORKSPACE}/yetus-git"
                rm -rf "${YETUS_DIR}"
                echo "downloading from github"
                curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
              fi
              if [ ! -d "${YETUS_DIR}" ]; then
                echo "unpacking yetus into '${YETUS_DIR}'"
                mkdir -p "${YETUS_DIR}"
                gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
              fi
            '''
            // Set up the file we need at PERSONALITY_FILE location
            dir ("tools") {
              sh """#!/usr/bin/env bash
                set -e
                echo "Downloading Project personality from ${env.PROJECT_PERSONALITY}"
                curl -L  -o personality.sh "${env.PROJECT_PERSONALITY}"
              """
            }
            stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
          }
        }
        stage ('hadoop 2 cache') {
          environment {
            HADOOP2_VERSION="2.10.2"
          }
          steps {
            // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
            dir('downloads-hadoop-2') {
              sh '''#!/usr/bin/env bash
                echo "Make sure we have a directory for downloading dependencies: $(pwd)"
'''
            }
            sh '''#!/usr/bin/env bash
              set -e
              echo "Ensure we have a copy of Hadoop ${HADOOP2_VERSION}"
              "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
                  --working-dir "${WORKSPACE}/downloads-hadoop-2" \
                  --keys 'https://downloads.apache.org/hadoop/common/KEYS' \
                  --verify-tar-gz \
                  "${WORKSPACE}/hadoop-${HADOOP2_VERSION}-bin.tar.gz" \
                  "hadoop/common/hadoop-${HADOOP2_VERSION}/hadoop-${HADOOP2_VERSION}.tar.gz"
              for stale in $(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | grep -v ${HADOOP2_VERSION}); do
                echo "Delete stale hadoop 2 cache ${stale}"
                rm -rf $stale
              done
            '''
            stash name: 'hadoop-2', includes: "hadoop-${HADOOP2_VERSION}-bin.tar.gz"
          }
        }
        stage ('hadoop 3 cache') {
          steps {
            script {
              hadoop3_versions = env.HADOOP3_VERSIONS.split(",");
              env.HADOOP3_VERSIONS_REGEX = "[" + hadoop3_versions.join("|") + "]";
              for (hadoop3_version in hadoop3_versions) {
                env.HADOOP3_VERSION = hadoop3_version;
                echo "env.HADOOP3_VERSION" + env.hadoop3_version;
                stage ('Hadoop 3 cache inner stage') {
                  // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
                  dir("downloads-hadoop-${HADOOP3_VERSION}") {
                    sh '''#!/usr/bin/env bash
                      echo "Make sure we have a directory for downloading dependencies: $(pwd)"
'''
                  } //dir
                  sh '''#!/usr/bin/env bash
                    set -e
                    echo "Ensure we have a copy of Hadoop ${HADOOP3_VERSION}"
                    "${WORKSPACE}/component/dev-support/jenkins-scripts/cache-apache-project-artifact.sh" \
                        --working-dir "${WORKSPACE}/downloads-hadoop-${HADOOP3_VERSION}" \
                        --keys 'https://downloads.apache.org/hadoop/common/KEYS' \
                        --verify-tar-gz \
                        "${WORKSPACE}/hadoop-${HADOOP3_VERSION}-bin.tar.gz" \
                        "hadoop/common/hadoop-${HADOOP3_VERSION}/hadoop-${HADOOP3_VERSION}.tar.gz"
                    for stale in $(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | grep -v ${HADOOP3_VERSION}); do
                      echo "Delete stale hadoop 3 cache ${stale}"
                      rm -rf $stale
                    done
                  '''
                  stash name: "hadoop-${HADOOP3_VERSION}", includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz"
                  script {
                    if (env.HADOOP3_VERSION == env.HADOOP3_DEFAULT_VERSION) {
                      // FIXME: we never unstash this, because we run the packaging tests with the version-specific stashes
                      stash(name: "hadoop-3", includes: "hadoop-${HADOOP3_VERSION}-bin.tar.gz")
                    } //if
                  } //script
                } //stage ('Hadoop 3 cache inner stage')
              } //for
            } //script
          } //steps
        } //stage ('hadoop 3 cache') {
      } //parallel
    } //stage ('thirdparty installs')
    stage ('init health results') {
      steps {
        // stash with given name for all tests we might run, so that we can unstash all of them even if
        // we skip some due to e.g. branch-specific JDK or Hadoop support
        stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
        stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
        stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
        stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
        stash name: 'jdk17-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}/doesn't-match"
        script {
          for (hadoop3_version in hadoop3_versions) {
            // confusing environment vs Groovy variables
            stash(name: "jdk17-hadoop3-backwards-result-${hadoop3_version}", allowEmpty: true, includes: "${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-${hadoop3_version}/doesn't-match")
          }
        }
        stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
      }
    }
    stage ('health checks') {
      parallel {
        stage ('yetus general check') {
          agent {
            node {
              label 'hbase'
            }
          }
          environment {
            BASEDIR = "${env.WORKSPACE}/component"
            TESTS = "${env.SHALLOW_CHECKS}"
            SET_JAVA_HOME = getJavaHomeForYetusGeneralCheck(env.BRANCH_NAME)
            JAVA8_HOME = "/usr/lib/jvm/java-8"
            // Activates hadoop 3.0 profile in maven runs.
            HADOOP_PROFILE = '3.0'
            OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
            OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
            ASF_NIGHTLIES_GENERAL_CHECK_BASE="${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}"
          }
          steps {
            // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
            sh '''#!/usr/bin/env bash
              set -e
              rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
              echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
              echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
            '''
            unstash 'yetus'
            // since we have a new node definition we need to re-do the scm checkout
            dir('component') {
              checkout scm
            }
            sh '''#!/usr/bin/env bash
              set -e
              rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
              "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
              echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
              ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
            '''
            // TODO roll this into the hbase_nightly_yetus script
            script {
              def ret = sh(
                returnStatus: true,
                script: '''#!/usr/bin/env bash
                  set -e
                  declare -i status=0
                  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
                    echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
                  else
                    echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
                    status=1
                  fi
                  echo "-- For more information [see general report|${BUILD_URL}General_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
                  exit "${status}"
                '''
              )
              if (ret != 0) {
                // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
                // test output. See HBASE-26339 for more details.
                currentBuild.result = 'UNSTABLE'
              }
            }
          }
          post {
            always {
              stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
              sshPublisher(publishers: [
                sshPublisherDesc(configName: 'Nightlies',
                  transfers: [
                    sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
                      sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/*-site/*,${env.OUTPUT_DIR_RELATIVE}/*-site/**/*"
                    )
                  ]
                )
              ])
              sh '''#!/bin/bash -e
              if [ -d "${OUTPUT_DIR}/branch-site" ]; then
                echo "Remove ${OUTPUT_DIR}/branch-site for saving space"
                rm -rf "${OUTPUT_DIR}/branch-site"
                python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/branch-site" > "${OUTPUT_DIR}/branch-site.html"
              else
                echo "No branch-site, skipping"
              fi
              if [ -d "${OUTPUT_DIR}/patch-site" ]; then
                echo "Remove ${OUTPUT_DIR}/patch-site for saving space"
                rm -rf "${OUTPUT_DIR}/patch-site"
                python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/patch-site" > "${OUTPUT_DIR}/patch-site.html"
              else
                echo "No patch-site, skipping"
              fi
              '''
              // Has to be relative to WORKSPACE.
              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
              publishHTML target: [
                allowMissing: true,
                keepAll: true,
                alwaysLinkToLastBuild: true,
                // Has to be relative to WORKSPACE
                reportDir: "${env.OUTPUT_DIR_RELATIVE}",
                reportFiles: 'console-report.html',
                reportName: 'General Nightly Build Report'
              ]
            }
          }
        }
        stage ('yetus jdk8 hadoop2 checks') {
          agent {
            node {
              label 'hbase'
            }
          }
          when {
            branch '*branch-2*'
          }
          environment {
            BASEDIR = "${env.WORKSPACE}/component"
            TESTS = "${env.DEEP_CHECKS}"
            OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
            OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
            SET_JAVA_HOME = '/usr/lib/jvm/java-8'
            SKIP_ERRORPRONE = true
          }
          steps {
            // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
            sh '''#!/usr/bin/env bash
              set -e
              rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
              echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
              echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
            '''
            unstash 'yetus'
            dir('component') {
              checkout scm
            }
            sh '''#!/usr/bin/env bash
              set -e
              rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
              "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
              echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
              ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
            '''
            script {
              def ret = sh(
                returnStatus: true,
                script: '''#!/usr/bin/env bash
                  set -e
                  declare -i status=0
                  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
                    echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
                  else
                    echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
                    status=1
                  fi
                  echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop2_29/]" >> "${OUTPUT_DIR}/commentfile"
                  exit "${status}"
                '''
              )
              if (ret != 0) {
                // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
                // test output. See HBASE-26339 for more details.
                currentBuild.result = 'UNSTABLE'
              }
            }
          }
          post {
            always {
              stash name: 'jdk8-hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
              junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
              // zip surefire reports.
              sh '''#!/bin/bash -e
                if [ -d "${OUTPUT_DIR}/archiver" ]; then
                  count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
                  if [[ 0 -ne ${count} ]]; then
                    echo "zipping ${count} archived files"
                    zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
                  else
                    echo "No archived files, skipping compressing."
                  fi
                else
                  echo "No archiver directory, skipping compressing."
                fi
'''
              sshPublisher(publishers: [
                sshPublisherDesc(configName: 'Nightlies',
                  transfers: [
                    sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
                      sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
                    )
                  ]
                )
              ])
              // remove the big test logs zip file, store the nightlies url in test_logs.html
              sh '''#!/bin/bash -e
                if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
                  echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
                  rm -rf "${OUTPUT_DIR}/test_logs.zip"
                  python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
                else
                  echo "No test_logs.zip, skipping"
                fi
'''
              // Has to be relative to WORKSPACE.
              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
              publishHTML target: [
                allowMissing         : true,
                keepAll              : true,
                alwaysLinkToLastBuild: true,
                // Has to be relative to WORKSPACE.
                reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
                reportFiles          : 'console-report.html',
                reportName           : 'JDK8 Nightly Build Report (Hadoop2)'
              ]
            }
          }
        }
        stage ('yetus jdk8 hadoop3 checks') {
          agent {
            node {
              label 'hbase'
            }
          }
          when {
            branch '*branch-2*'
          }
          environment {
            BASEDIR = "${env.WORKSPACE}/component"
            TESTS = "${env.DEEP_CHECKS}"
            OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
            OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
            SET_JAVA_HOME = '/usr/lib/jvm/java-8'
            // Activates hadoop 3.0 profile in maven runs.
            HADOOP_PROFILE = '3.0'
            SKIP_ERRORPRONE = true
          }
          steps {
            // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
            sh '''#!/usr/bin/env bash
              set -e
              rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
              echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
              echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
            '''
            unstash 'yetus'
            dir('component') {
              checkout scm
            }
            sh '''#!/usr/bin/env bash
              set -e
              rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
              "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
              echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
              ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
            '''
            script {
              def ret = sh(
                returnStatus: true,
                script: '''#!/usr/bin/env bash
                  set -e
                  declare -i status=0
                  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
                    echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
                  else
                    echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
                    status=1
                  fi
                  echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}JDK8_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
                  exit "${status}"
                '''
              )
              if (ret != 0) {
                // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
                // test output. See HBASE-26339 for more details.
                currentBuild.result = 'UNSTABLE'
              }
            }
          }
          post {
            always {
              stash name: 'jdk8-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
              junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
              // zip surefire reports.
              sh '''#!/bin/bash -e
                if [ -d "${OUTPUT_DIR}/archiver" ]; then
                  count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
                  if [[ 0 -ne ${count} ]]; then
                    echo "zipping ${count} archived files"
                    zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
                  else
                    echo "No archived files, skipping compressing."
                  fi
                else
                  echo "No archiver directory, skipping compressing."
                fi
'''
              sshPublisher(publishers: [
                sshPublisherDesc(configName: 'Nightlies',
                  transfers: [
                    sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
                      sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
                    )
                  ]
                )
              ])
              // remove the big test logs zip file, store the nightlies url in test_logs.html
              sh '''#!/bin/bash -e
                if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
                  echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
                  rm -rf "${OUTPUT_DIR}/test_logs.zip"
                  python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
                else
                  echo "No test_logs.zip, skipping"
                fi
'''
              // Has to be relative to WORKSPACE.
              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
              publishHTML target: [
                allowMissing         : true,
                keepAll              : true,
                alwaysLinkToLastBuild: true,
                // Has to be relative to WORKSPACE.
                reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
                reportFiles          : 'console-report.html',
                reportName           : 'JDK8 Nightly Build Report (Hadoop3)'
              ]
            }
          }
        }
        stage ('yetus jdk11 hadoop3 checks') {
          agent {
            node {
              label 'hbase'
            }
          }
          when {
            branch '*branch-2*'
          }
          environment {
            BASEDIR = "${env.WORKSPACE}/component"
            TESTS = "${env.DEEP_CHECKS}"
            OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
            OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
            SET_JAVA_HOME = "/usr/lib/jvm/java-11"
            // Activates hadoop 3.0 profile in maven runs.
            HADOOP_PROFILE = '3.0'
            SKIP_ERRORPRONE = true
          }
          steps {
            // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
            sh '''#!/usr/bin/env bash
              set -e
              rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
              echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
              echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
            '''
            unstash 'yetus'
            dir('component') {
              checkout scm
            }
            sh '''#!/usr/bin/env bash
              set -e
              rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
              "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
              echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
              ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
            '''
            script {
              def ret = sh(
                returnStatus: true,
                script: '''#!/usr/bin/env bash
                  set -e
                  declare -i status=0
                  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
                    echo '(/) {color:green}+1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
                  else
                    echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
                    status=1
                  fi
                  echo "-- For more information [see jdk11 report|${BUILD_URL}JDK11_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
                  exit "${status}"
                '''
              )
              if (ret != 0) {
                // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
                // test output. See HBASE-26339 for more details.
                currentBuild.result = 'UNSTABLE'
              }
            }
          }
          post {
            always {
              stash name: 'jdk11-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
              junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
              // zip surefire reports.
              sh '''#!/bin/bash -e
                if [ -d "${OUTPUT_DIR}/archiver" ]; then
                  count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
                  if [[ 0 -ne ${count} ]]; then
                    echo "zipping ${count} archived files"
                    zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
                  else
                    echo "No archived files, skipping compressing."
                  fi
                else
                  echo "No archiver directory, skipping compressing."
                fi
              '''
              sshPublisher(publishers: [
                sshPublisherDesc(configName: 'Nightlies',
                  transfers: [
                    sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
                      sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
                    )
                  ]
                )
              ])
              // remove the big test logs zip file, store the nightlies url in test_logs.html
              sh '''#!/bin/bash -e
                if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
                  echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
                  rm -rf "${OUTPUT_DIR}/test_logs.zip"
                  python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
                else
                  echo "No test_logs.zip, skipping"
                fi
              '''
              // Has to be relative to WORKSPACE.
              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
              publishHTML target: [
                allowMissing         : true,
                keepAll              : true,
                alwaysLinkToLastBuild: true,
                // Has to be relative to WORKSPACE.
                reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
                reportFiles          : 'console-report.html',
                reportName           : 'JDK11 Nightly Build Report (Hadoop3)'
              ]
            }
          }
        }

        stage ('yetus jdk17 hadoop3 checks') {
          agent {
            node {
              label 'hbase'
            }
          }
          environment {
            BASEDIR = "${env.WORKSPACE}/component"
            TESTS = "${env.DEEP_CHECKS}"
            OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}"
            OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}"
            SET_JAVA_HOME = "/usr/lib/jvm/java-17"
            // Activates hadoop 3.0 profile in maven runs.
            HADOOP_PROFILE = '3.0'
            SKIP_ERRORPRONE = true
          }
          steps {
            // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
            sh '''#!/usr/bin/env bash
              set -e
              rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
              echo '(x) {color:red}-1 jdk17 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
              echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
            '''
            unstash 'yetus'
            dir('component') {
              checkout scm
            }
            sh '''#!/usr/bin/env bash
              set -e
              rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
              "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
              echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
              ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
            '''
            script {
              def ret = sh(
                returnStatus: true,
                script: '''#!/usr/bin/env bash
                  set -e
                  declare -i status=0
                  if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
                    echo '(/) {color:green}+1 jdk17 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
                  else
                    echo '(x) {color:red}-1 jdk17 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
                    status=1
                  fi
                  echo "-- For more information [see jdk17 report|${BUILD_URL}JDK17_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
                  exit "${status}"
                '''
              )
              if (ret != 0) {
                // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
                // test output. See HBASE-26339 for more details.
                currentBuild.result = 'UNSTABLE'
              }
            }
          }
          post {
            always {
              stash name: 'jdk17-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
              junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
              // zip surefire reports.
              sh '''#!/bin/bash -e
                if [ -d "${OUTPUT_DIR}/archiver" ]; then
                  count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
                  if [[ 0 -ne ${count} ]]; then
                    echo "zipping ${count} archived files"
                    zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
                  else
                    echo "No archived files, skipping compressing."
                  fi
                else
                  echo "No archiver directory, skipping compressing."
                fi
              '''
              sshPublisher(publishers: [
                sshPublisherDesc(configName: 'Nightlies',
                  transfers: [
                    sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
                      sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
                    )
                  ]
                )
              ])
              // remove the big test logs zip file, store the nightlies url in test_logs.html
              sh '''#!/bin/bash -e
                if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
                  echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
                  rm -rf "${OUTPUT_DIR}/test_logs.zip"
                  python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
                else
                  echo "No test_logs.zip, skipping"
                fi
              '''
              // Has to be relative to WORKSPACE.
              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
              archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
              publishHTML target: [
                allowMissing         : true,
                keepAll              : true,
                alwaysLinkToLastBuild: true,
                // Has to be relative to WORKSPACE.
                reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
                reportFiles          : 'console-report.html',
                reportName           : 'JDK17 Nightly Build Report (Hadoop3)'
              ]
            }
          }
        }
        // If/when we transition to transient runners, we could run every Hadoop check as a matrix job
        stage ('yetus jdk17 hadoop3 backwards compatibility checks') {
          agent {
            node {
              label 'hbase'
            }
          }
          environment {
            BASEDIR = "${env.WORKSPACE}/component"
            TESTS = "${env.DEEP_CHECKS}"
            SET_JAVA_HOME = "/usr/lib/jvm/java-17"
            // Activates hadoop 3.0 profile in maven runs.
            HADOOP_PROFILE = '3.0'
            // HADOOP_THREE_VERSION is set in script for loop
            TEST_PROFILE = 'runDevTests'
            SKIP_ERRORPRONE = true
          }
          steps {
            script {
              for (hadoop3_version in hadoop3_versions) {
                if (hadoop3_version == env.HADOOP3_DEFAULT_VERSION) {
                  // We are running the full test suite, no need to run the dev tests too
                  continue
                }
                //HADOOP_THREE_VERSION is the environment variable name expected by the nightly shell script
                env.HADOOP_THREE_VERSION = hadoop3_version;
                env.OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-${env.HADOOP_THREE_VERSION}"
                env.OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-${env.HADOOP_THREE_VERSION}"
                try {
                  stage ('yetus jdk17 hadoop3 backwards compatibility checks inner stage') {
                    // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
                    sh '''#!/usr/bin/env bash
                     set -e
                     rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
                     rm -f "${OUTPUT_DIR}/commentfile"
                    '''
                    unstash 'yetus'
                    dir('component') {
                      checkout scm
                    }
                    sh '''#!/usr/bin/env bash
                      set -e
                      rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
                      "${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
                      echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
                      ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
                    '''
                    script {
                      def ret = sh(
                        returnStatus: true,
                        script: '''#!/usr/bin/env bash
                          set -e
                          declare -i status=0
                          if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
                            echo "(/) {color:green}+1 jdk17 hadoop ${HADOOP_THREE_VERSION} backward compatibility checks{color}" > "${OUTPUT_DIR}/commentfile"
                          else
                            echo "(x) {color:red}-1 jdk17 hadoop ${HADOOP_THREE_VERSION} backward compatibility checks{color}" > "${OUTPUT_DIR}/commentfile"
                            status=1
                          fi
                          echo "-- For more information [see jdk17 report|${BUILD_URL}JDK17_20Nightly_20Build_20Report_20_28Hadoop3_29/]" >> "${OUTPUT_DIR}/commentfile"
                          exit "${status}"
                        '''
                      )
                      if (ret != 0) {
                        // mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
                        // test output. See HBASE-26339 for more details.
                        currentBuild.result = 'UNSTABLE'
                      }
                    } //script
                  } //stage ('yetus jdk17 hadoop3 backwards compatibility checks inner stage') {
                } //try
                finally {
                  stash name: "jdk17-hadoop3-backwards-result-${HADOOP_THREE_VERSION}", includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
                  junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
                  // zip surefire reports.
                  sh '''#!/bin/bash -e
                    if [ ! -f "${OUTPUT_DIR}/commentfile" ]; then
                      echo "(x) {color:red}-1 jdk17 hadoop ${HADOOP_THREE_VERSION} backward compatibility checks{color}" >"${OUTPUT_DIR}/commentfile"
                      echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
                    fi
                    if [ -d "${OUTPUT_DIR}/archiver" ]; then
                      count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
                      if [[ 0 -ne ${count} ]]; then
                        echo "zipping ${count} archived files"
                        zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
                      else
                        echo "No archived files, skipping compressing."
                      fi
                    else
                      echo "No archiver directory, skipping compressing."
                    fi
                  '''
                  sshPublisher(publishers: [
                    sshPublisherDesc(configName: 'Nightlies',
                      transfers: [
                        sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
                          sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
                        )
                      ]
                    )
                  ])
                  // remove the big test logs zip file, store the nightlies url in test_logs.html
                  sh '''#!/bin/bash -e
                    if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
                      echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
                      rm -rf "${OUTPUT_DIR}/test_logs.zip"
                      python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
                    else
                      echo "No test_logs.zip, skipping"
                    fi
                '''
                  // Has to be relative to WORKSPACE.
                  archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
                  archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
                  publishHTML target: [
                    allowMissing         : true,
                    keepAll              : true,
                    alwaysLinkToLastBuild: true,
                    // Has to be relative to WORKSPACE.
                    reportDir            : "${env.OUTPUT_DIR_RELATIVE}",
                    reportFiles          : 'console-report.html',
                    reportName           : "JDK17 Nightly Build Report (Hadoop ${HADOOP_THREE_VERSION} backwards compatibility)"
                  ]
                } //finally
              } // for
            } //script
          } //steps
        } //stage ('yetus jdk17 hadoop3 backwards compatibility checks')

        // This is meant to mimic what a release manager will do to create RCs.
        // See http://hbase.apache.org/book.html#maven.release
        // TODO (HBASE-23870): replace this with invocation of the release tool
        stage ('packaging and integration') {
          agent {
            node {
              label 'hbase'
            }
          }
          environment {
            BASEDIR = "${env.WORKSPACE}/component"
            BRANCH = "${env.BRANCH_NAME}"
          }
          steps {
            dir('component') {
              checkout scm
            }
            sh '''#!/bin/bash -e
              echo "Setting up directories"
              rm -rf "output-srctarball" && mkdir "output-srctarball"
              rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
              rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
              rm -rf "hbase-install" && mkdir "hbase-install"
              rm -rf "hbase-client" && mkdir "hbase-client"
              rm -rf "hbase-hadoop3-install"
              rm -rf "hbase-hadoop3-client"
              rm -rf "hadoop-2" && mkdir "hadoop-2"
              rm -rf "hadoop-3" && mkdir "hadoop-3"
              rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
              rm -rf ".m2-for-src" && mkdir ".m2-for-src"
              # remove old hadoop tarballs in workspace
              rm -rf hadoop-2*.tar.gz
              rm -rf hadoop-3*.tar.gz
              rm -f "output-integration/commentfile"
            '''
            sh '''#!/usr/bin/env bash
              set -e
              rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
              "${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
              echo "got the following saved stats in 'output-srctarball/machine'"
              ls -lh "output-srctarball/machine"
            '''
            sh '''#!/bin/bash -e
              echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
              docker build -t hbase-integration-test -f "${BASEDIR}/dev-support/docker/Dockerfile" .
              docker run --rm -v "${WORKSPACE}":/hbase -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
                  -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" --workdir=/hbase hbase-integration-test \
                  "component/dev-support/hbase_nightly_source-artifact.sh" \
                  --intermediate-file-dir output-srctarball \
                  --unpack-temp-dir unpacked_src_tarball \
                  --maven-m2-initial .m2-for-repo \
                  --maven-m2-src-build .m2-for-src \
                  --clean-source-checkout \
                  component
              if [ $? -eq 0 ]; then
                echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
              else
                echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
                exit 1
              fi
            '''
            echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
            sh '''#!/bin/bash -e
              if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | grep -v hadoop3 | wc -l) ]; then
                echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
                exit 1
              fi
              install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | grep -v client-bin | grep -v hadoop3)
              tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
              client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-client-bin.tar.gz | grep -v hadoop3)
              tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
              if [ 2 -eq $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz | wc -l) ]; then
                echo "hadoop3 artifacts available, unpacking the hbase hadoop3 bin tarball into 'hbase-hadoop3-install' and the client hadoop3 tarball into 'hbase-hadoop3-client'"
                mkdir hbase-hadoop3-install
                mkdir hbase-hadoop3-client
                hadoop3_install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz | grep -v client-bin)
                tar --strip-component=1 -xzf "${hadoop3_install_artifact}" -C "hbase-hadoop3-install"
                hadoop3_client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-client-bin.tar.gz)
                tar --strip-component=1 -xzf "${hadoop3_client_artifact}" -C "hbase-hadoop3-client"
              fi
            '''
            unstash 'hadoop-2'
            sh '''#!/bin/bash -xe
              if [[ "${BRANCH}" == *"branch-2"* ]]; then
                echo "Attempting to use run an instance on top of Hadoop 2."
                artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
                tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
                docker build -t hbase-integration-test -f "${BASEDIR}/dev-support/docker/Dockerfile" .
                docker run --rm -v "${WORKSPACE}":/hbase -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
                    -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-8" --workdir=/hbase hbase-integration-test \
                    component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
                    --single-process \
                    --working-dir output-integration/hadoop-2 \
                    --hbase-client-install "hbase-client" \
                    hbase-install \
                    hadoop-2/bin/hadoop \
                    hadoop-2/share/hadoop/yarn/timelineservice \
                    hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
                    hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
                    hadoop-2/bin/mapred \
                    >output-integration/hadoop-2.log 2>&1
                if [ $? -ne 0 ]; then
                  echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
                  exit 2
                fi
                echo "(/) {color:green}+1 client integration test for HBase 2 {color}" >output-integration/commentfile
              else
                echo "Skipping to run against Hadoop 2 for branch ${BRANCH}"
              fi
            '''
            script {
              for (hadoop3_version in hadoop3_versions) {
                env.HADOOP3_VERSION = hadoop3_version;
                echo "env.HADOOP3_VERSION" + env.hadoop3_version;
                stage ("packaging and integration Hadoop 3 inner stage ") {
                  unstash "hadoop-" + env.HADOOP3_VERSION
                  sh '''#!/bin/bash -e
                    echo "Attempting to use run an instance on top of Hadoop ${HADOOP3_VERSION}."
                    # Clean up any previous tested Hadoop3 files before unpacking the current one
                    rm -rf hadoop-3/*
                    # Create working dir
                    rm -rf "output-integration/hadoop-${HADOOP3_VERSION}" && mkdir "output-integration/hadoop-${HADOOP3_VERSION}"
                    rm -rf "output-integration/hadoop-${HADOOP3_VERSION}-shaded" && mkdir "output-integration/hadoop-${HADOOP3_VERSION}-shaded"
                    artifact=$(ls -1 "${WORKSPACE}"/hadoop-${HADOOP3_VERSION}-bin.tar.gz | head -n 1)
                    tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
                    # we need to patch some files otherwise minicluster will fail to start, see MAPREDUCE-7471
                    ${BASEDIR}/dev-support/patch-hadoop3.sh hadoop-3
                    hbase_install_dir="hbase-install"
                    hbase_client_dir="hbase-client"
                    if [ -d "hbase-hadoop3-install" ]; then
                      echo "run hadoop3 client integration test against hbase hadoop3 binaries"
                      hbase_install_dir="hbase-hadoop3-install"
                      hbase_client_dir="hbase-hadoop3-client"
                    fi
                    docker build -t hbase-integration-test -f "${BASEDIR}/dev-support/docker/Dockerfile" .
                    docker run --rm -v "${WORKSPACE}":/hbase -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
                        -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" \
                        -e HADOOP_OPTS="--add-opens java.base/java.lang=ALL-UNNAMED" \
                        --workdir=/hbase hbase-integration-test \
                        component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
                        --single-process \
                        --working-dir output-integration/hadoop-${HADOOP3_VERSION} \
                        --hbase-client-install ${hbase_client_dir} \
                        ${hbase_install_dir} \
                        hadoop-3/bin/hadoop \
                        hadoop-3/share/hadoop/yarn/timelineservice \
                        hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
                        hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
                        hadoop-3/bin/mapred \
                        >output-integration/hadoop-${HADOOP3_VERSION}.log 2>&1
                    if [ $? -ne 0 ]; then
                      echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop ${HADOOP3_VERSION}. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-${HADOOP3_VERSION}.log]. (note that this means we didn't check the Hadoop ${HADOOP3_VERSION} shaded client)" >> output-integration/commentfile
                      exit 2
                    fi
                    echo "Attempting to use run an instance on top of Hadoop ${HADOOP3_VERSION}, relying on the Hadoop client artifacts for the example client program."
                    docker run --rm -v "${WORKSPACE}":/hbase -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
                        -u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" \
                        -e HADOOP_OPTS="--add-opens java.base/java.lang=ALL-UNNAMED" \
                        --workdir=/hbase hbase-integration-test \
                        component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
                        --single-process \
                        --hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
                        --working-dir output-integration/hadoop-${HADOOP3_VERSION}-shaded \
                        --hbase-client-install ${hbase_client_dir} \
                        ${hbase_install_dir} \
                        hadoop-3/bin/hadoop \
                        hadoop-3/share/hadoop/yarn/timelineservice \
                        hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
                        hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
                        hadoop-3/bin/mapred \
                        >output-integration/hadoop-${HADOOP3_VERSION}-shaded.log 2>&1
                    if [ $? -ne 0 ]; then
                      echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop ${HADOOP3_VERSION} using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-${HADOOP3_VERSION}-shaded.log]." >> output-integration/commentfile
                      exit 2
                    fi
                    echo "(/) {color:green}+1 client integration test for ${HADOOP3_VERSION} {color}" >> output-integration/commentfile
                  '''
                } //stage ("packaging and integration Hadoop 3 inner stage ")
              } //for
            } // script
          } //steps
          post {
            always {
              sh '''#!/bin/bash -e
                if [ ! -f "output-integration/commentfile" ]; then
                  echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
                  echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
                fi
              '''
              stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
              sshPublisher(publishers: [
                sshPublisherDesc(configName: 'Nightlies',
                  transfers: [
                    sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
                      sourceFiles: "output-srctarball/hbase-src.tar.gz"
                    )
                  ]
                )
              ])
              // remove the big src tarball, store the nightlies url in hbase-src.html
              sh '''#!/bin/bash -e
                SRC_TAR="${WORKSPACE}/output-srctarball/hbase-src.tar.gz"
                if [ -f "${SRC_TAR}" ]; then
                  echo "Remove ${SRC_TAR} for saving space"
                  rm -rf "${SRC_TAR}"
                  python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/output-srctarball" > "${WORKSPACE}/output-srctarball/hbase-src.html"
                else
                  echo "No hbase-src.tar.gz, skipping"
                fi
              '''
              archiveArtifacts artifacts: 'output-srctarball/*'
              archiveArtifacts artifacts: 'output-srctarball/**/*'
              archiveArtifacts artifacts: 'output-integration/*'
              archiveArtifacts artifacts: 'output-integration/**/*'
            } //always
          } //post
        } //stage packaging
      } // parallel
    } //stage:_health checks
  } //stages
  post {
    always {
      script {
         try {
           sh "printenv"
           // wipe out all the output directories before unstashing
           sh'''
             echo "Clean up result directories"
             rm -rf ${OUTPUT_DIR_RELATIVE_GENERAL}
             rm -rf ${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}
             rm -rf ${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}
             rm -rf ${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}
             rm -rf ${OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}
             rm -rf ${OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-*
             rm -rf output-srctarball
             rm -rf output-integration
           '''
           unstash 'general-result'
           unstash 'jdk8-hadoop2-result'
           unstash 'jdk8-hadoop3-result'
           unstash 'jdk11-hadoop3-result'
           unstash 'jdk17-hadoop3-result'
           unstash 'srctarball-result'

           def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
                          "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
                          "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
                          "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
                          "${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}/commentfile"]
           for (hadoop3_version in hadoop3_versions) {
             if (hadoop3_version == env.HADOOP3_DEFAULT_VERSION) {
                // We haven't run these tests
                continue
            }
             unstash("jdk17-hadoop3-backwards-result-${hadoop3_version}")
               results.add("${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-${hadoop3_version}/commentfile")
           }
           results.add('output-srctarball/commentfile')
           results.add('output-integration/commentfile')
           echo env.BRANCH_NAME
           echo env.BUILD_URL
           echo currentBuild.result
           echo currentBuild.durationString
           def comment = "Results for branch ${env.BRANCH_NAME}\n"
           comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
           if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
              comment += "(/) *{color:green}+1 overall{color}*\n"
           } else {
              comment += "(x) *{color:red}-1 overall{color}*\n"
              // Ideally get the committer our of the change and @ mention them in the per-jira comment
           }
           comment += "----\ndetails (if available):\n\n"
           echo ""
           echo "[DEBUG] trying to aggregate step-wise results"
           comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
           echo "[INFO] Comment:"
           echo comment
           echo ""
           echo "[DEBUG] checking to see if feature branch"
           def jiras = getJirasToComment(env.BRANCH_NAME, [])
           if (jiras.isEmpty()) {
             echo "[DEBUG] non-feature branch, checking change messages for jira keys."
             echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
             jiras = getJirasToCommentFromChangesets(currentBuild)
           }
           jiras.each { currentIssue ->
             jiraComment issueKey: currentIssue, body: comment
           }
        } catch (Exception exception) {
          echo "Got exception: ${exception}"
          echo "    ${exception.getStackTrace()}"
        }
      }
    }
  }
}
import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
@NonCPS
List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
  def seenJiras = []
  thisBuild.changeSets.each { cs ->
    cs.getItems().each { change ->
      CharSequence msg = change.msg
      echo "change: ${change}"
      echo "     ${msg}"
      echo "     ${change.commitId}"
      echo "     ${change.author}"
      echo ""
      seenJiras = getJirasToComment(msg, seenJiras)
    }
  }
  return seenJiras
}
@NonCPS
List<String> getJirasToComment(CharSequence source, List<String> seen) {
  source.eachMatch("HBASE-[0-9]+") { currentIssue ->
    echo "[DEBUG] found jira key: ${currentIssue}"
    if (currentIssue in seen) {
      echo "[DEBUG] already commented on ${currentIssue}."
    } else {
      echo "[INFO] commenting on ${currentIssue}."
      seen << currentIssue
    }
  }
  return seen
}
@NonCPS
String getJavaHomeForYetusGeneralCheck(String branchName) {
  // for 2.x, build with java 11, for 3.x, build with java 17
  if (branchName.indexOf("branch-2") >=0) {
    return "/usr/lib/jvm/java-11";
  } else {
    return "/usr/lib/jvm/java-17"
  }
}
@NonCPS
String getCronParams(String branchName) {
  if (branchName == 'master') {
    return 'H H 1-31/3 * *'
  } else if (branchName == 'branch-3') {
    return 'H H 1-31/3 * *'
  } else if (branchName == 'branch-2') {
    return 'H H 2-31/3 * *'
  } else {
    return 'H H 3-31/3 * *'
  }
}
