You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zh...@apache.org on 2021/10/07 15:44:57 UTC
[hbase] branch branch-2 updated: HBASE-26313 Publish the test logs
for our nightly jobs to nightlies.apache.org (#3713)
This is an automated email from the ASF dual-hosted git repository.
zhangduo pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/branch-2 by this push:
new 8358f34 HBASE-26313 Publish the test logs for our nightly jobs to nightlies.apache.org (#3713)
8358f34 is described below
commit 8358f3445611ec22cc4cfde78bd63a4554340417
Author: Duo Zhang <zh...@apache.org>
AuthorDate: Thu Oct 7 21:57:21 2021 +0800
HBASE-26313 Publish the test logs for our nightly jobs to nightlies.apache.org (#3713)
Signed-off-by: Peter Somogyi <ps...@apache.org>
---
dev-support/Jenkinsfile | 77 +++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 77 insertions(+)
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 07ffd28..9853add 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -53,6 +53,7 @@ pipeline {
// TODO does hadoopcheck need to be jdk specific?
SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
+ ASF_NIGHTLIES = 'https://nightlies.apache.org'
}
parameters {
booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
@@ -320,6 +321,25 @@ pipeline {
echo "No archiver directory, skipping compressing."
fi
'''
+ sshPublisher(publishers: [
+ sshPublisherDesc(configName: 'Nightlies',
+ transfers: [
+ sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
+ sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
+ )
+ ]
+ )
+ ])
+ // remove the big test logs zip file, store the nightlies url in test_logs.txt
+ sh '''#!/bin/bash -e
+ if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
+ echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
+ rm -rf "${OUTPUT_DIR}/test_logs.zip"
+ echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
+ else
+ echo "No test_logs.zip, skipping"
+ fi
+'''
// Has to be relative to WORKSPACE.
archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
@@ -398,6 +418,25 @@ pipeline {
echo "No archiver directory, skipping compressing."
fi
'''
+ sshPublisher(publishers: [
+ sshPublisherDesc(configName: 'Nightlies',
+ transfers: [
+ sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
+ sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
+ )
+ ]
+ )
+ ])
+ // remove the big test logs zip file, store the nightlies url in test_logs.txt
+ sh '''#!/bin/bash -e
+ if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
+ echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
+ rm -rf "${OUTPUT_DIR}/test_logs.zip"
+ echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
+ else
+ echo "No test_logs.zip, skipping"
+ fi
+'''
// Has to be relative to WORKSPACE.
archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
@@ -483,6 +522,25 @@ pipeline {
echo "No archiver directory, skipping compressing."
fi
'''
+ sshPublisher(publishers: [
+ sshPublisherDesc(configName: 'Nightlies',
+ transfers: [
+ sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
+ sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
+ )
+ ]
+ )
+ ])
+ // remove the big test logs zip file, store the nightlies url in test_logs.txt
+ sh '''#!/bin/bash -e
+ if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
+ echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
+ rm -rf "${OUTPUT_DIR}/test_logs.zip"
+ echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
+ else
+ echo "No test_logs.zip, skipping"
+ fi
+'''
// Has to be relative to WORKSPACE.
archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
@@ -570,6 +628,25 @@ pipeline {
echo "No archiver directory, skipping compressing."
fi
'''
+ sshPublisher(publishers: [
+ sshPublisherDesc(configName: 'Nightlies',
+ transfers: [
+ sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
+ sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
+ )
+ ]
+ )
+ ])
+ // remove the big test logs zip file, store the nightlies url in test_logs.txt
+ sh '''#!/bin/bash -e
+ if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
+ echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
+ rm -rf "${OUTPUT_DIR}/test_logs.zip"
+ echo "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${OUTPUT_DIR_RELATIVE}/test_logs.zip" > "${OUTPUT_DIR}/test_logs.txt"
+ else
+ echo "No test_logs.zip, skipping"
+ fi
+'''
// Has to be relative to WORKSPACE.
archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"