You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@cassandra.apache.org by mc...@apache.org on 2022/05/28 11:59:02 UTC

[cassandra] 01/01: Merge branch 'cassandra-2.2' into cassandra-3.0

This is an automated email from the ASF dual-hosted git repository.

mck pushed a commit to branch cassandra-3.0
in repository https://gitbox.apache.org/repos/asf/cassandra.git

commit 2b533b28c263fce199ae6ab8ed3739d0be7f851f
Merge: fb8bf30c6d 136c8c0f65
Author: Mick Semb Wever <mc...@apache.org>
AuthorDate: Sat May 28 13:52:34 2022 +0200

    Merge branch 'cassandra-2.2' into cassandra-3.0

 .jenkins/Jenkinsfile | 208 +++++++++++++++++++++++++++++++++++++--------------
 1 file changed, 153 insertions(+), 55 deletions(-)

diff --cc .jenkins/Jenkinsfile
index 8df18ec7ce,884c1ffbfe..ca4a0c7f24
--- a/.jenkins/Jenkinsfile
+++ b/.jenkins/Jenkinsfile
@@@ -28,12 -28,24 +28,24 @@@ pipeline 
    stages {
        stage('Init') {
          steps {
--            cleanWs()
 -            script {
 -                currentBuild.result='SUCCESS'
 -            }
++          cleanWs()
++          script {
++              currentBuild.result='SUCCESS'
++          }
          }
        }
        stage('Build') {
          steps {
-             build job: "${env.JOB_NAME}-artifacts"
+           script {
+             def attempt = 1
+             retry(2) {
+               if (attempt > 1) {
+                 sleep(60 * attempt)
+               }
+               attempt = attempt + 1
 -                build job: "${env.JOB_NAME}-artifacts"
++              build job: "${env.JOB_NAME}-artifacts"
+             }
+           }
          }
        }
        stage('Test') {
@@@ -110,30 -150,23 +150,48 @@@
                }
              }
            }
-           stage('burn') {
++          stage('cqlsh') {
 +            steps {
 +              script {
-                 burn = build job: "${env.JOB_NAME}-test-burn", propagate: false
-                 if (burn.result != 'SUCCESS') unstable('burn test failures')
-                 if (burn.result == 'FAILURE') currentBuild.result='FAILURE'
++                def attempt = 1
++                retry(2) {
++                  if (attempt > 1) {
++                    sleep(60 * attempt)
++                  }
++                  attempt = attempt + 1
++                  cqlsh = build job: "${env.JOB_NAME}-cqlsh-tests", propagate: false
++                }
++                if (cqlsh.result != 'SUCCESS') unstable('cqlsh failures')
++                if (cqlsh.result == 'FAILURE') currentBuild.result='FAILURE'
 +              }
 +            }
 +            post {
 +              always {
 +                  warnError('missing test xml files') {
 +                      script {
-                           copyTestResults('test-burn', burn.getNumber())
++                          copyTestResults('cqlsh-tests', cqlsh.getNumber())
 +                      }
 +                  }
 +              }
 +            }
 +          }
-           stage('compression') {
++        }
+       }
 -    }
 -    stage('Distributed Test') {
++      stage('Distributed Test') {
+         parallel {
 -          stage('JVM DTests') {
++          stage('jvm-dtest') {
              steps {
                script {
-                 compression = build job: "${env.JOB_NAME}-test-compression", propagate: false
-                 if (compression.result != 'SUCCESS') unstable('compression failures')
-                 if (compression.result == 'FAILURE') currentBuild.result='FAILURE'
+                 def attempt = 1
+                 retry(2) {
+                   if (attempt > 1) {
+                     sleep(60 * attempt)
+                   }
+                   attempt = attempt + 1
+                   jvm_dtest = build job: "${env.JOB_NAME}-jvm-dtest", propagate: false
+                 }
+                 if (jvm_dtest.result != 'SUCCESS') unstable('jvm-dtest failures')
+                 if (jvm_dtest.result == 'FAILURE') currentBuild.result='FAILURE'
                }
              }
              post {
@@@ -146,28 -179,6 +204,31 @@@
                }
              }
            }
-           stage('cqlsh') {
++          stage('jvm-dtest-upgrade') {
 +            steps {
 +              script {
-                 cqlsh = build job: "${env.JOB_NAME}-cqlsh-tests", propagate: false
-                   if (cqlsh.result != 'SUCCESS') unstable('cqlsh failures')
-                   if (cqlsh.result == 'FAILURE') currentBuild.result='FAILURE'
++                def attempt = 1
++                retry(2) {
++                  if (attempt > 1) {
++                    sleep(60 * attempt)
++                  }
++                  attempt = attempt + 1
++                  jvm_dtest_upgrade = build job: "${env.JOB_NAME}-jvm-dtest-upgrade", propagate: false
 +                }
++                if (jvm_dtest_upgrade.result != 'SUCCESS') unstable('jvm-dtest-upgrade failures')
++                if (jvm_dtest_upgrade.result == 'FAILURE') currentBuild.result='FAILURE'
 +              }
-               post {
-                 always {
-                     warnError('missing test xml files') {
-                         script {
-                             copyTestResults('cqlsh-tests', cqlsh.getNumber())
-                         }
-                     }
-                 }
++            }
++            post {
++              always {
++                  warnError('missing test xml files') {
++                      script {
++                          copyTestResults('jvm-dtest-upgrade', jvm_dtest_upgrade.getNumber())
++                      }
++                  }
 +              }
 +            }
 +          }
-       }
-       stage('Distributed Test') {
-         parallel {
            stage('dtest') {
              steps {
                script {
@@@ -222,12 -254,19 +304,19 @@@
                }
              }
            }
 -          stage('dtest-offheap') {
 +          stage('dtest-large-novnode') {
              steps {
                script {
-                 dtest_large_novnode = build job: "${env.JOB_NAME}-dtest-large-novnode", propagate: false
+                 def attempt = 1
+                 retry(2) {
+                   if (attempt > 1) {
+                     sleep(60 * attempt)
+                   }
+                   attempt = attempt + 1
 -                  dtest_offheap = build job: "${env.JOB_NAME}-dtest-offheap", propagate: false
++                  dtest_large_novnode = build job: "${env.JOB_NAME}-dtest-large-novnode", propagate: false
+                 }
 -                if (dtest_offheap.result != 'SUCCESS') unstable('dtest-offheap failures')
 -                if (dtest_offheap.result == 'FAILURE') currentBuild.result='FAILURE'
 +                if (dtest_large_novnode.result != 'SUCCESS') unstable('dtest-large-novnode failures')
 +                if (dtest_large_novnode.result == 'FAILURE') currentBuild.result='FAILURE'
                }
              }
              post {
@@@ -240,24 -279,6 +329,31 @@@
                }
              }
            }
 +          stage('dtest-upgrade') {
 +            steps {
 +              script {
-                 dtest_upgrade = build job: "${env.JOB_NAME}-dtest-upgrade", propagate: false
++                def attempt = 1
++                retry(2) {
++                  if (attempt > 1) {
++                    sleep(60 * attempt)
++                  }
++                  attempt = attempt + 1
++                  dtest_upgrade = build job: "${env.JOB_NAME}-dtest-upgrade", propagate: false
++                }
 +                if (dtest_upgrade.result != 'SUCCESS') unstable('dtest failures')
 +                if (dtest_upgrade.result == 'FAILURE') currentBuild.result='FAILURE'
 +              }
 +            }
 +            post {
 +              always {
 +                  warnError('missing test xml files') {
 +                      script {
 +                          copyTestResults('dtest-upgrade', dtest_upgrade.getNumber())
 +                      }
 +                  }
 +              }
 +            }
 +          }
          }
      }
      stage('Summary') {
@@@ -289,10 -310,11 +385,12 @@@ ${FAILED_TESTS,maxTests=500,showMessage
  For complete test report and logs see https://nightlies.apache.org/cassandra/${JOB_NAME}/${BUILD_NUMBER}/
  '''
            }
-           sh "echo \"cassandra-builds at: `git -C cassandra-builds log -1 --pretty=format:'%h %an %ad %s'`\" > builds.head"
-           sh "find . -type f -name \\*.head -exec cat {} \\;"
+           sh "echo \"summary) cassandra-builds: `git -C cassandra-builds log -1 --pretty=format:'%H %an %ad %s'`\" > builds.head"
+           sh "./cassandra-builds/jenkins-dsl/print-shas.sh"
            sh "xz TESTS-TestSuites.xml"
+           sh "wget --retry-connrefused --waitretry=1 \"\${BUILD_URL}/timestamps/?time=HH:mm:ss&timeZone=UTC&appendLog\" -qO - > console.log || echo wget failed"
+           sh "xz console.log"
 +          sh "echo \"For test report and logs see https://nightlies.apache.org/cassandra/${JOB_NAME}/${BUILD_NUMBER}/\""
        }
        post {
            always {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@cassandra.apache.org
For additional commands, e-mail: commits-help@cassandra.apache.org