You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zh...@apache.org on 2021/10/01 12:42:49 UTC

[hbase] 01/01: test nightlies

This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch HBASE-26313
in repository https://gitbox.apache.org/repos/asf/hbase.git

commit cc5410ac16d1ae6af631ff27c422cbe32d726e45
Author: Duo Zhang <zh...@apache.org>
AuthorDate: Fri Oct 1 20:42:09 2021 +0800

    test nightlies
---
 dev-support/Jenkinsfile          | 43 +++++++++++++++++++++++++++++++++++++++-
 dev-support/hbase-personality.sh |  2 +-
 2 files changed, 43 insertions(+), 2 deletions(-)

diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index f3de8ed..6a8ed6f 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -40,7 +40,7 @@ pipeline {
     OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
 
     PROJECT = 'hbase'
-    PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+    PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/HBASE-26313/dev-support/hbase-personality.sh'
     PERSONALITY_FILE = 'tools/personality.sh'
     // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
     AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
@@ -53,6 +53,7 @@ pipeline {
       // TODO does hadoopcheck need to be jdk specific?
     SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
     DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
+    ASF_NIGHTLIES = 'https://nightlies.apache.org'
   }
   parameters {
     booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
@@ -195,6 +196,9 @@ pipeline {
               label 'hbase'
             }
           }
+          when {
+            branch 'branch-1*'
+          }
           environment {
             BASEDIR = "${env.WORKSPACE}/component"
             TESTS = "${env.SHALLOW_CHECKS}"
@@ -486,6 +490,23 @@ pipeline {
                   echo "No archiver directory, skipping compressing."
                 fi
 '''
+              sshPublisher(publishers: [
+                sshPublisherDesc(configName: 'Nightlies',
+                  transfers: [
+                    sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
+                      sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
+                    )
+                  ]
+                )
+              ])
+              // remove the big test logs zip file, store the nightlies url in test_logs.txt
+              sh '''#!/bin/bash -e
+                echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
+                rm -rf "${OUTPUT_DIR}/test_logs.zip"
+                // we may have space in job name, replace it with %20
+                ENCODED_JOB_NAME=${${JOB_NAME}// /%20}
+                echo "${ASF_NIGHTLIES}/hbase/${ENCODED_JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
+'''
               // Has to be relative to WORKSPACE.
               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
@@ -573,6 +594,23 @@ pipeline {
                   echo "No archiver directory, skipping compressing."
                 fi
 '''
+              sshPublisher(publishers: [
+                sshPublisherDesc(configName: 'Nightlies',
+                  transfers: [
+                    sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
+                      sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
+                    )
+                  ]
+                )
+              ])
+              // remove the big test logs zip file, store the nightlies url in test_logs.txt
+              sh '''#!/bin/bash -e
+                echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
+                rm -rf "${OUTPUT_DIR}/test_logs.zip"
+                // we may have space in job name, replace it with %20
+                ENCODED_JOB_NAME=${${JOB_NAME}// /%20}
+                echo "${ASF_NIGHTLIES}/hbase/${ENCODED_JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
+'''
               // Has to be relative to WORKSPACE.
               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
               archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
@@ -601,6 +639,9 @@ pipeline {
             BASEDIR = "${env.WORKSPACE}/component"
             BRANCH = "${env.BRANCH_NAME}"
           }
+          when {
+            branch 'branch-1*'
+          }
           steps {
             sh '''#!/bin/bash -e
               echo "Setting up directories"
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 6eb38e3..a91b206 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -220,7 +220,7 @@ function personality_modules
   if [[ ${testtype} == unit ]]; then
     local tests_arg=""
     get_include_exclude_tests_arg tests_arg
-    extra="${extra} -PrunAllTests ${tests_arg}"
+    extra="${extra} -PrunSmallTests ${tests_arg}"
 
     # Inject the jenkins build-id for our surefire invocations
     # Used by zombie detection stuff, even though we're not including that yet.